summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/Alpha_complex/concept/SimplicialComplexForAlpha.h89
-rw-r--r--src/Alpha_complex/doc/COPYRIGHT19
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h36
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.ipe315
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.pngbin25554 -> 18720 bytes
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_off.cpp47
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_points.cpp46
-rw-r--r--src/Alpha_complex/example/CMakeLists.txt50
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h233
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp201
-rw-r--r--src/Alpha_complex/test/CMakeLists.txt34
-rw-r--r--src/Bitmap_cubical_complex/doc/COPYRIGHT19
-rw-r--r--src/Bottleneck/concept/Persistence_diagram.h7
-rw-r--r--src/Bottleneck/example/CMakeLists.txt5
-rw-r--r--src/Bottleneck/include/gudhi/Graph_matching.h197
-rw-r--r--src/Bottleneck/include/gudhi/Layered_neighbors_finder.h74
-rw-r--r--src/Bottleneck/include/gudhi/Neighbors_finder.h96
-rw-r--r--src/Bottleneck/include/gudhi/Persistence_diagrams_graph.h147
-rw-r--r--src/Bottleneck/include/gudhi/Planar_neighbors_finder.h119
-rw-r--r--src/Bottleneck/test/CMakeLists.txt21
-rw-r--r--src/Bottleneck/test/bottleneck_unit_test.cpp26
-rw-r--r--src/Bottleneck_distance/benchmark/CMakeLists.txt9
-rw-r--r--src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp62
-rw-r--r--src/Bottleneck_distance/concept/Persistence_diagram.h50
-rw-r--r--src/Bottleneck_distance/doc/COPYRIGHT19
-rw-r--r--src/Bottleneck_distance/doc/Intro_bottleneck_distance.h51
-rw-r--r--src/Bottleneck_distance/doc/perturb_pd.pngbin0 -> 20864 bytes
-rw-r--r--src/Bottleneck_distance/example/CMakeLists.txt20
-rw-r--r--src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp190
-rw-r--r--src/Bottleneck_distance/example/bottleneck_basic_example.cpp50
-rw-r--r--src/Bottleneck_distance/example/bottleneck_read_file_example.cpp72
-rw-r--r--src/Bottleneck_distance/include/gudhi/Bottleneck.h115
-rw-r--r--src/Bottleneck_distance/include/gudhi/Graph_matching.h182
-rw-r--r--src/Bottleneck_distance/include/gudhi/Internal_point.h91
-rw-r--r--src/Bottleneck_distance/include/gudhi/Neighbors_finder.h172
-rw-r--r--src/Bottleneck_distance/include/gudhi/Persistence_graph.h188
-rw-r--r--src/Bottleneck_distance/test/CMakeLists.txt25
-rw-r--r--src/Bottleneck_distance/test/README (renamed from src/Bottleneck/test/README)0
-rw-r--r--src/Bottleneck_distance/test/bottleneck_unit_test.cpp167
-rw-r--r--src/CMakeLists.txt215
-rw-r--r--src/Contraction/doc/COPYRIGHT18
-rw-r--r--src/Contraction/example/Garland_heckbert.cpp25
-rw-r--r--src/Contraction/example/Garland_heckbert/Error_quadric.h6
-rw-r--r--src/Contraction/example/Rips_contraction.cpp24
-rw-r--r--src/Doxyfile24
-rw-r--r--src/GudhUI/CMakeLists.txt69
-rw-r--r--src/GudhUI/gui/MainWindow.h4
-rw-r--r--src/GudhUI/gui/Menu_edge_contraction.h5
-rw-r--r--src/GudhUI/gui/Menu_k_nearest_neighbors.h2
-rw-r--r--src/GudhUI/gui/Menu_persistence.h2
-rw-r--r--src/GudhUI/gui/Menu_uniform_neighbors.h2
-rw-r--r--src/GudhUI/model/Model.h6
-rw-r--r--src/GudhUI/utils/Critical_points.h1
-rw-r--r--src/GudhUI/utils/Is_manifold.h1
-rw-r--r--src/GudhUI/utils/Persistence_compute.h17
-rw-r--r--src/GudhUI/utils/Vertex_collapsor.h1
-rw-r--r--src/GudhUI/view/Viewer.h3
-rw-r--r--src/GudhUI/view/Viewer_instructor.h3
-rw-r--r--src/Gudhi_stat/utilities/Landscape_bootstrap.cpp34
-rw-r--r--src/Persistent_cohomology/benchmark/CMakeLists.txt14
-rw-r--r--src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp (renamed from src/Persistent_cohomology/example/performance_rips_persistence.cpp)36
-rw-r--r--src/Persistent_cohomology/doc/COPYRIGHT19
-rw-r--r--src/Persistent_cohomology/doc/Intro_persistent_cohomology.h42
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt98
-rw-r--r--src/Persistent_cohomology/example/README50
-rw-r--r--src/Persistent_cohomology/example/alpha_complex_3d_helper.h76
-rw-r--r--src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp140
-rw-r--r--src/Persistent_cohomology/example/alpha_complex_persistence.cpp66
-rw-r--r--src/Persistent_cohomology/example/custom_persistence_sort.cpp89
-rw-r--r--src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp245
-rw-r--r--src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp79
-rw-r--r--src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp19
-rw-r--r--src/Persistent_cohomology/example/plain_homology.cpp11
-rw-r--r--src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp144
-rw-r--r--src/Persistent_cohomology/example/rips_multifield_persistence.cpp58
-rw-r--r--src/Persistent_cohomology/example/rips_persistence.cpp60
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp217
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp52
-rw-r--r--src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp263
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h26
-rw-r--r--src/Persistent_cohomology/test/betti_numbers_unit_test.cpp57
-rw-r--r--src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp4
-rw-r--r--src/Rips_complex/concept/SimplicialComplexForRips.h54
-rw-r--r--src/Rips_complex/doc/COPYRIGHT19
-rw-r--r--src/Rips_complex/doc/Intro_rips_complex.h158
-rw-r--r--src/Rips_complex/doc/rips_complex_representation.ipe326
-rw-r--r--src/Rips_complex/doc/rips_complex_representation.pngbin0 -> 15677 bytes
-rw-r--r--src/Rips_complex/doc/rips_one_skeleton.ipe326
-rw-r--r--src/Rips_complex/doc/rips_one_skeleton.pngbin0 -> 47651 bytes
-rw-r--r--src/Rips_complex/example/CMakeLists.txt47
-rw-r--r--src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp58
-rw-r--r--src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp52
-rw-r--r--src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp72
-rw-r--r--src/Rips_complex/example/example_rips_complex_from_off_file.cpp71
-rw-r--r--src/Rips_complex/example/full_skeleton_rips_for_doc.txt26
-rw-r--r--src/Rips_complex/example/one_skeleton_rips_for_doc.txt20
-rw-r--r--src/Rips_complex/include/gudhi/Rips_complex.h185
-rw-r--r--src/Rips_complex/test/CMakeLists.txt25
-rw-r--r--src/Rips_complex/test/README12
-rw-r--r--src/Rips_complex/test/test_rips_complex.cpp353
-rw-r--r--src/Simplex_tree/doc/COPYRIGHT19
-rw-r--r--src/Simplex_tree/doc/Intro_simplex_tree.h4
-rw-r--r--src/Simplex_tree/example/CMakeLists.txt16
-rw-r--r--src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp (renamed from src/Simplex_tree/example/simplex_tree_from_alpha_shapes_3.cpp)26
-rw-r--r--src/Simplex_tree/example/mini_simplex_tree.cpp9
-rw-r--r--src/Simplex_tree/example/simple_simplex_tree.cpp15
-rw-r--r--src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp11
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h2
-rw-r--r--src/Simplex_tree/test/simplex_tree_unit_test.cpp69
-rw-r--r--src/Skeleton_blocker/doc/COPYRIGHT18
-rw-r--r--src/Spatial_searching/doc/Intro_spatial_searching.h62
-rw-r--r--src/Spatial_searching/example/CMakeLists.txt13
-rw-r--r--src/Spatial_searching/example/example_spatial_searching.cpp52
-rw-r--r--src/Spatial_searching/include/gudhi/Kd_tree_search.h264
-rw-r--r--src/Spatial_searching/test/CMakeLists.txt21
-rw-r--r--src/Spatial_searching/test/test_Kd_tree_search.cpp112
-rw-r--r--src/Subsampling/doc/Intro_subsampling.h70
-rw-r--r--src/Subsampling/example/CMakeLists.txt17
-rw-r--r--src/Subsampling/example/example_choose_n_farthest_points.cpp27
-rw-r--r--src/Subsampling/example/example_custom_kernel.cpp63
-rw-r--r--src/Subsampling/example/example_pick_n_random_points.cpp27
-rw-r--r--src/Subsampling/example/example_sparsify_point_set.cpp27
-rw-r--r--src/Subsampling/include/gudhi/choose_n_farthest_points.h157
-rw-r--r--src/Subsampling/include/gudhi/pick_n_random_points.h86
-rw-r--r--src/Subsampling/include/gudhi/sparsify_point_set.h113
-rw-r--r--src/Subsampling/test/CMakeLists.txt34
-rw-r--r--src/Subsampling/test/test_choose_n_farthest_points.cpp103
-rw-r--r--src/Subsampling/test/test_pick_n_random_points.cpp69
-rw-r--r--src/Subsampling/test/test_sparsify_point_set.cpp55
-rw-r--r--src/Tangential_complex/benchmark/CMakeLists.txt11
-rw-r--r--src/Tangential_complex/benchmark/RIB_exporter.h269
-rw-r--r--src/Tangential_complex/benchmark/XML_exporter.h207
-rw-r--r--src/Tangential_complex/benchmark/benchmark_script.txt221
-rw-r--r--src/Tangential_complex/benchmark/benchmark_tc.cpp785
-rw-r--r--src/Tangential_complex/doc/COPYRIGHT19
-rw-r--r--src/Tangential_complex/doc/Intro_tangential_complex.h119
-rw-r--r--src/Tangential_complex/doc/tc_example_01.pngbin0 -> 20323 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_02.pngbin0 -> 36017 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_03.pngbin0 -> 62990 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_05.pngbin0 -> 36032 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_06.pngbin0 -> 37195 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_07.pngbin0 -> 49399 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_07_after.pngbin0 -> 50132 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_07_before.pngbin0 -> 48898 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_08.pngbin0 -> 63636 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_09.pngbin0 -> 35453 bytes
-rw-r--r--src/Tangential_complex/doc/tc_examples.pngbin0 -> 150776 bytes
-rw-r--r--src/Tangential_complex/example/CMakeLists.txt19
-rw-r--r--src/Tangential_complex/example/example_basic.cpp46
-rw-r--r--src/Tangential_complex/example/example_with_perturb.cpp53
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h2276
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h539
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex/config.h43
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h195
-rw-r--r--src/Tangential_complex/test/CMakeLists.txt23
-rw-r--r--src/Tangential_complex/test/test_tangential_complex.cpp128
-rw-r--r--src/Witness_complex/concept/SimplicialComplexForWitness.h (renamed from src/Witness_complex/concept/Simplicial_complex_for_witness.h)65
-rw-r--r--src/Witness_complex/doc/COPYRIGHT19
-rw-r--r--src/Witness_complex/doc/Witness_complex_doc.h105
-rw-r--r--src/Witness_complex/doc/Witness_complex_representation.ipe280
-rw-r--r--src/Witness_complex/doc/Witness_complex_representation.pngbin48899 -> 21202 bytes
-rw-r--r--src/Witness_complex/doc/swit.svg1303
-rw-r--r--src/Witness_complex/example/CMakeLists.txt52
-rw-r--r--src/Witness_complex/example/example_nearest_landmark_table.cpp69
-rw-r--r--src/Witness_complex/example/example_strong_witness_complex_off.cpp79
-rw-r--r--src/Witness_complex/example/example_strong_witness_persistence.cpp171
-rw-r--r--src/Witness_complex/example/example_witness_complex_off.cpp60
-rw-r--r--src/Witness_complex/example/example_witness_complex_persistence.cpp171
-rw-r--r--src/Witness_complex/example/example_witness_complex_sphere.cpp (renamed from src/Witness_complex/example/witness_complex_sphere.cpp)34
-rw-r--r--src/Witness_complex/example/generators.h31
-rw-r--r--src/Witness_complex/example/witness_complex_from_file.cpp100
-rw-r--r--src/Witness_complex/include/gudhi/Active_witness/Active_witness.h67
-rw-r--r--src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h108
-rw-r--r--src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h104
-rw-r--r--src/Witness_complex/include/gudhi/Euclidean_witness_complex.h106
-rw-r--r--src/Witness_complex/include/gudhi/Landmark_choice_by_furthest_point.h105
-rw-r--r--src/Witness_complex/include/gudhi/Landmark_choice_by_random_point.h96
-rw-r--r--src/Witness_complex/include/gudhi/Strong_witness_complex.h185
-rw-r--r--src/Witness_complex/include/gudhi/Witness_complex.h333
-rw-r--r--src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h55
-rw-r--r--src/Witness_complex/test/CMakeLists.txt34
-rw-r--r--src/Witness_complex/test/simple_witness_complex.cpp59
-rw-r--r--src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp135
-rw-r--r--src/Witness_complex/test/test_simple_witness_complex.cpp53
-rw-r--r--src/Witness_complex/test/witness_complex_points.cpp64
-rw-r--r--src/cmake/modules/FindGMPXX.cmake4
-rw-r--r--src/cmake/modules/FindQGLViewer.cmake2
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.txt107
-rw-r--r--src/cmake/modules/GUDHI_user_version_target.txt110
-rw-r--r--src/common/doc/header.html24
-rw-r--r--src/common/doc/main_page.h186
-rw-r--r--src/common/example/CMakeLists.txt14
-rw-r--r--src/common/example/example_CGAL_3D_points_off_reader.cpp2
-rw-r--r--src/common/example/example_CGAL_points_off_reader.cpp2
-rw-r--r--src/common/include/gudhi/Clock.h48
-rw-r--r--src/common/include/gudhi/Debug_utils.h2
-rw-r--r--src/common/include/gudhi/console_color.h97
-rw-r--r--src/common/include/gudhi/distance_functions.h36
-rw-r--r--src/common/include/gudhi/graph_simplicial_complex.h59
-rw-r--r--src/common/include/gudhi/random_point_generators.h474
-rw-r--r--src/common/include/gudhi/reader_utils.h166
-rw-r--r--src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt3
-rw-r--r--src/common/include/gudhi_patches/CGAL/Convex_hull.h56
-rw-r--r--src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h933
-rw-r--r--src/common/include/gudhi_patches/CGAL/Epeck_d.h53
-rw-r--r--src/common/include/gudhi_patches/CGAL/Epick_d.h71
-rw-r--r--src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h320
-rw-r--r--src/common/include/gudhi_patches/CGAL/Kd_tree.h582
-rw-r--r--src/common/include/gudhi_patches/CGAL/Kd_tree_node.h586
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h177
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h344
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h40
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h117
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h33
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h79
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h93
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h33
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h95
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h330
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Define_kernel_types.h50
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Dimension_base.h49
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Filtered_predicate2.h137
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h199
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h104
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h102
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h298
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h134
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h175
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h162
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h188
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h59
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h159
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h88
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h66
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h66
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h121
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h132
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h205
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h165
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h213
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h76
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h211
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h201
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h164
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h58
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h164
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h46
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h145
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h181
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h167
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h305
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h131
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h284
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h120
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h133
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h130
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h266
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h129
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h1355
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h40
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h363
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h61
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h104
-rw-r--r--src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h306
-rw-r--r--src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h620
-rw-r--r--src/common/include/gudhi_patches/CGAL/Regular_triangulation.h1169
-rw-r--r--src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h288
-rw-r--r--src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h99
-rw-r--r--src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h71
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation.h1424
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h1603
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h311
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h154
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation_face.h111
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h148
-rw-r--r--src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h128
-rw-r--r--src/common/include/gudhi_patches/CGAL/argument_swaps.h88
-rw-r--r--src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h117
-rw-r--r--src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h148
-rw-r--r--src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h116
-rw-r--r--src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h49
-rw-r--r--src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h154
-rw-r--r--src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h154
-rw-r--r--src/common/include/gudhi_patches/CGAL/iterator_from_indices.h75
-rw-r--r--src/common/include/gudhi_patches/CGAL/transforming_iterator.h123
-rw-r--r--src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h127
-rw-r--r--src/common/include/gudhi_patches/CGAL/typeset.h117
-rw-r--r--src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt82
-rw-r--r--src/common/test/CMakeLists.txt9
-rw-r--r--src/common/test/test_distance_matrix_reader.cpp85
-rw-r--r--src/common/test/test_points_off_reader.cpp2
-rw-r--r--src/cython/CMakeLists.txt243
-rw-r--r--src/cython/CONVENTIONS9
-rw-r--r--src/cython/README3
-rw-r--r--src/cython/cython/alpha_complex.pyx121
-rw-r--r--src/cython/cython/bottleneck_distance.pyx59
-rw-r--r--src/cython/cython/cubical_complex.pyx197
-rw-r--r--src/cython/cython/euclidean_strong_witness_complex.pyx97
-rw-r--r--src/cython/cython/euclidean_witness_complex.pyx97
-rw-r--r--src/cython/cython/off_reader.pyx49
-rw-r--r--src/cython/cython/periodic_cubical_complex.pyx197
-rwxr-xr-xsrc/cython/cython/persistence_graphical_tools.py152
-rw-r--r--src/cython/cython/rips_complex.pyx125
-rw-r--r--src/cython/cython/simplex_tree.pyx427
-rw-r--r--src/cython/cython/strong_witness_complex.pyx81
-rw-r--r--src/cython/cython/subsampling.pyx140
-rw-r--r--src/cython/cython/tangential_complex.pyx151
-rw-r--r--src/cython/cython/witness_complex.pyx81
-rw-r--r--src/cython/cythonize_gudhi.py.in48
-rw-r--r--src/cython/doc/Makefile.in44
-rw-r--r--src/cython/doc/_templates/layout.html270
-rw-r--r--src/cython/doc/alpha_complex_ref.rst10
-rw-r--r--src/cython/doc/alpha_complex_sum.rst22
-rw-r--r--src/cython/doc/alpha_complex_user.rst205
-rw-r--r--src/cython/doc/bottleneck_distance_sum.rst15
-rw-r--r--src/cython/doc/bottleneck_distance_user.rst37
-rw-r--r--src/cython/doc/citation.rst15
-rwxr-xr-xsrc/cython/doc/conf.py278
-rw-r--r--src/cython/doc/cubical_complex_ref.rst9
-rw-r--r--src/cython/doc/cubical_complex_sum.rst15
-rw-r--r--src/cython/doc/cubical_complex_user.rst161
-rw-r--r--src/cython/doc/euclidean_strong_witness_complex_ref.rst10
-rw-r--r--src/cython/doc/euclidean_witness_complex_ref.rst10
-rw-r--r--src/cython/doc/examples.rst4
-rwxr-xr-xsrc/cython/doc/generate_examples.py43
-rw-r--r--src/cython/doc/img/graphical_tools_representation.pngbin0 -> 10846 bytes
-rw-r--r--src/cython/doc/index.rst87
-rw-r--r--src/cython/doc/installation.rst171
-rw-r--r--src/cython/doc/make.bat.in67
-rw-r--r--src/cython/doc/periodic_cubical_complex_ref.rst9
-rw-r--r--src/cython/doc/persistence_graphical_tools_ref.rst8
-rw-r--r--src/cython/doc/persistence_graphical_tools_sum.rst12
-rw-r--r--src/cython/doc/persistence_graphical_tools_user.rst67
-rw-r--r--src/cython/doc/persistent_cohomology_sum.rst27
-rw-r--r--src/cython/doc/persistent_cohomology_user.rst115
-rwxr-xr-xsrc/cython/doc/pyplots/barcode_persistence.py5
-rwxr-xr-xsrc/cython/doc/pyplots/diagram_persistence.py5
-rwxr-xr-xsrc/cython/doc/pyplots/show_palette_values.py2
-rwxr-xr-xsrc/cython/doc/python3-sphinx-build11
-rw-r--r--src/cython/doc/rips_complex_ref.rst10
-rw-r--r--src/cython/doc/rips_complex_sum.rst17
-rw-r--r--src/cython/doc/rips_complex_user.rst237
-rw-r--r--src/cython/doc/simplex_tree_ref.rst10
-rw-r--r--src/cython/doc/simplex_tree_sum.rst14
-rw-r--r--src/cython/doc/simplex_tree_user.rst69
-rw-r--r--src/cython/doc/strong_witness_complex_ref.rst10
-rw-r--r--src/cython/doc/tangential_complex_ref.rst10
-rw-r--r--src/cython/doc/tangential_complex_sum.rst15
-rw-r--r--src/cython/doc/tangential_complex_user.rst195
-rw-r--r--src/cython/doc/todos.rst5
-rw-r--r--src/cython/doc/witness_complex_ref.rst10
-rw-r--r--src/cython/doc/witness_complex_sum.rst17
-rw-r--r--src/cython/doc/witness_complex_user.rst135
-rwxr-xr-xsrc/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py70
-rwxr-xr-xsrc/cython/example/alpha_complex_from_points_example.py67
-rwxr-xr-xsrc/cython/example/alpha_rips_persistence_bottleneck_distance.py100
-rwxr-xr-xsrc/cython/example/bottleneck_basic_example.py48
-rwxr-xr-xsrc/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py78
-rwxr-xr-xsrc/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py78
-rwxr-xr-xsrc/cython/example/gudhi_graphical_tools_example.py47
-rwxr-xr-xsrc/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py76
-rwxr-xr-xsrc/cython/example/random_cubical_complex_persistence_example.py57
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py71
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_off_file_example.py71
-rwxr-xr-xsrc/cython/example/rips_complex_from_points_example.py40
-rwxr-xr-xsrc/cython/example/rips_persistence_diagram.py42
-rwxr-xr-xsrc/cython/example/simplex_tree_example.py66
-rwxr-xr-xsrc/cython/example/tangential_complex_plain_homology_from_off_file_example.py66
-rwxr-xr-xsrc/cython/example/witness_complex_from_nearest_landmark_table.py46
-rw-r--r--src/cython/gudhi.pyx.in39
-rw-r--r--src/cython/include/Alpha_complex_interface.h82
-rw-r--r--src/cython/include/Bottleneck_distance_interface.h53
-rw-r--r--src/cython/include/Cubical_complex_interface.h56
-rw-r--r--src/cython/include/Euclidean_strong_witness_complex_interface.h93
-rw-r--r--src/cython/include/Euclidean_witness_complex_interface.h92
-rw-r--r--src/cython/include/Off_reader_interface.h (renamed from src/Bottleneck/example/random_diagrams.cpp)36
-rw-r--r--src/cython/include/Persistent_cohomology_interface.h95
-rw-r--r--src/cython/include/Rips_complex_interface.h86
-rw-r--r--src/cython/include/Simplex_tree_interface.h149
-rw-r--r--src/cython/include/Strong_witness_complex_interface.h73
-rw-r--r--src/cython/include/Subsampling_interface.h119
-rw-r--r--src/cython/include/Tangential_complex_interface.h123
-rw-r--r--src/cython/include/Witness_complex_interface.h74
-rwxr-xr-xsrc/cython/test/test_alpha_complex.py86
-rwxr-xr-xsrc/cython/test/test_bottleneck_distance.py35
-rwxr-xr-xsrc/cython/test/test_cubical_complex.py86
-rwxr-xr-xsrc/cython/test/test_euclidean_witness_complex.py71
-rwxr-xr-xsrc/cython/test/test_rips_complex.py111
-rwxr-xr-xsrc/cython/test/test_simplex_tree.py133
-rwxr-xr-xsrc/cython/test/test_subsampling.py133
-rwxr-xr-xsrc/cython/test/test_tangential_complex.py52
-rwxr-xr-xsrc/cython/test/test_witness_complex.py62
-rw-r--r--src/debian/changelog5
-rw-r--r--src/debian/compat1
-rw-r--r--src/debian/control26
-rw-r--r--src/debian/copyright28
-rw-r--r--src/debian/docs2
-rwxr-xr-xsrc/debian/rules28
-rw-r--r--src/debian/source/format1
399 files changed, 43531 insertions, 3081 deletions
diff --git a/src/Alpha_complex/concept/SimplicialComplexForAlpha.h b/src/Alpha_complex/concept/SimplicialComplexForAlpha.h
new file mode 100644
index 00000000..2b8bff94
--- /dev/null
+++ b/src/Alpha_complex/concept/SimplicialComplexForAlpha.h
@@ -0,0 +1,89 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
+#define CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
+
+namespace Gudhi {
+
+namespace alpha_complex {
+
+/** \brief The concept SimplicialComplexForAlpha describes the requirements for a type to implement a simplicial
+ * complex, that can be created from a `Alpha_complex`.
+ */
+struct SimplicialComplexForAlpha {
+ /** Handle to specify a simplex. */
+ typedef unspecified Simplex_handle;
+ /** Handle to specify a vertex. Must be a non-negative integer. */
+ typedef unspecified Vertex_handle;
+ /** Handle to specify the simplex filtration value. */
+ typedef unspecified Filtration_value;
+
+ /** Returns the number of vertices in the simplicial complex. */
+ std::size_t num_vertices();
+
+ /** Sets the simplicial complex dimension. */
+ void set_dimension(int dimension);
+
+ /** Gets the 'simplex' dimension. */
+ int dimension(Simplex_handle simplex);
+
+ /** Assigns the 'simplex' with the given 'filtration' value. */
+ int assign_filtration(Simplex_handle simplex, Filtration_value filtration);
+
+ /** \brief Inserts a simplex with vertices from a given simplex (represented by a vector of Vertex_handle) in the
+ * simplicial complex with the given 'filtration' value. */
+ void insert_simplex_and_subfaces(std::vector<Vertex_handle> const & vertex_range, Filtration_value filtration);
+
+ /** Browses the simplicial complex to make the filtration non-decreasing. */
+ void make_filtration_non_decreasing();
+
+ /** Prune the simplicial complex above 'filtration' value given as parameter. */
+ void prune_above_filtration(Filtration_value filtration);
+
+ /** \brief Iterator over vertices of a simplex.
+ *
+ * 'value type' must be 'Vertex_handle'.*/
+ typedef unspecified Simplex_vertex_range;
+
+ /** \brief Returns a range over vertices of a given
+ * simplex. */
+ Simplex_vertex_range simplex_vertex_range(Simplex_handle const & simplex);
+
+ /** \brief Iterator over the boundaries of the complex, in an arbitrary order.
+ *
+ * 'value_type' must be 'Simplex_handle'.*/
+ typedef unspecified Boundary_simplex_range;
+
+ /** \brief Returns a range over boundaries of a given simplex. */
+ Boundary_simplex_range boundary_simplex_range(Simplex_handle const & simplex);
+
+ /** \brief Return type of an insertion of a simplex
+ */
+ typedef unspecified Insertion_result_type;
+};
+
+} // namespace alpha_complex
+
+} // namespace Gudhi
+
+#endif // CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
diff --git a/src/Alpha_complex/doc/COPYRIGHT b/src/Alpha_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..dbad2380
--- /dev/null
+++ b/src/Alpha_complex/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Vincent Rouvreau
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
index f3126169..3ffdae7f 100644
--- a/src/Alpha_complex/doc/Intro_alpha_complex.h
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -39,7 +39,8 @@ namespace alpha_complex {
* Alpha_complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
* constructed from the finite cells of a Delaunay Triangulation.
*
- * The filtration value of each simplex is computed as the square of the circumradius of the simplex if the circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
+ * The filtration value of each simplex is computed as the square of the circumradius of the simplex if the
+ * circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
* values of the codimension 1 cofaces that make it not Gabriel otherwise.
*
* All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into
@@ -47,23 +48,24 @@ namespace alpha_complex {
*
* \image html "alpha_complex_representation.png" "Alpha-complex representation"
*
- * Alpha_complex is constructing a `Simplex_tree` using <a target="_blank"
+ * Alpha_complex is constructing a <a target="_blank"
* href="http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations">Delaunay Triangulation</a>
* \cite cgal:hdj-t-15b from <a target="_blank" href="http://www.cgal.org/">CGAL</a> (the Computational Geometry
- * Algorithms Library \cite cgal:eb-15b).
+ * Algorithms Library \cite cgal:eb-15b) and is able to create a `SimplicialComplexForAlpha`.
*
* The complex is a template class requiring an Epick_d <a target="_blank"
* href="http://doc.cgal.org/latest/Kernel_d/index.html#Chapter_dD_Geometry_Kernel">dD Geometry Kernel</a>
* \cite cgal:s-gkd-15b from CGAL as template parameter.
*
- * \remark When Alpha_complex is constructed with an infinite value of alpha, the complex is a Delaunay complex.
+ * \remark When the simplicial complex is constructed with an infinite value of alpha, the complex is a Delaunay
+ * complex.
*
* \section pointsexample Example from points
*
- * This example builds the Delaunay triangulation from the given points in a 2D static kernel, and initializes the
- * alpha complex with it.
+ * This example builds the Delaunay triangulation from the given points in a 2D static kernel, and creates a
+ * `Simplex_tree` with it.
*
- * Then, it is asked to display information about the alpha complex.
+ * Then, it is asked to display information about the simplicial complex.
*
* \include Alpha_complex/Alpha_complex_from_points.cpp
*
@@ -76,13 +78,15 @@ namespace alpha_complex {
*
* \include Alpha_complex/alphaoffreader_for_doc_60.txt
*
- * \section algorithm Algorithm
+ * \section createcomplexalgorithm Create complex algorithm
*
* \subsection datastructure Data structure
*
- * In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation.
- * (The filtration value is set to NaN, which stands for unknown value):
- * \image html "alpha_complex_doc.png" "Simplex tree structure construction example"
+ * In order to create the simplicial complex, first, it is built from the cells of the Delaunay Triangulation.
+ * The filtration values are set to NaN, which stands for unknown value.
+ *
+ * In example, :
+ * \image html "alpha_complex_doc.png" "Simplicial complex structure construction example"
*
* \subsection filtrationcomputation Filtration value computation algorithm
*
@@ -129,12 +133,14 @@ namespace alpha_complex {
*
* \subsubsection nondecreasing Non decreasing filtration values
*
- * As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
- * We fix that up by calling `Simplex_tree::make_filtration_non_decreasing()`.
+ * As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not
+ * quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
+ * We fix that up by calling `SimplicialComplexForAlpha::make_filtration_non_decreasing()`.
*
* \subsubsection pruneabove Prune above given filtration value
*
- * The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()`).
+ * The simplex tree is pruned from the given maximum alpha squared value (cf.
+ * `SimplicialComplexForAlpha::prune_above_filtration()`).
* In the following example, the value is given by the user as argument of the program.
*
*
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe
index baf0d26a..71e5ce6c 100644
--- a/src/Alpha_complex/doc/alpha_complex_doc.ipe
+++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe
@@ -1,7 +1,7 @@
<?xml version="1.0"?>
<!DOCTYPE ipe SYSTEM "ipe.dtd">
<ipe version="70107" creator="Ipe 7.1.10">
-<info created="D:20150603143945" modified="D:20160406112209"/>
+<info created="D:20150603143945" modified="D:20160921180211"/>
<ipestyle name="basic">
<symbol name="arrow/arc(spx)">
<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
@@ -278,35 +278,7 @@ h
320 580 l
280 660 l
</path>
-<path matrix="1 0 0 1 104.05 -60.1773" stroke="black">
-4 0 0 4 320 704 e
-</path>
-<path matrix="1 0 0 1 104.05 -60.1773" stroke="black">
-322.919 706.788 m
-317.189 701.058 l
-317.189 701.203 l
-</path>
-<path matrix="1 0 0 1 104.05 -60.1773" stroke="black">
-317.551 706.934 m
-322.629 701.058 l
-</path>
-<path matrix="1 0 0 1 50 0" stroke="black">
-240 620 m
-220 600 l
-</path>
-<path matrix="1 0 0 1 50 0" stroke="black">
-240 620 m
-220 640 l
-</path>
-<text transformations="translations" pos="180 620" stroke="black" type="label" width="97.274" height="6.926" depth="1.93" valign="baseline">Simplex tree structure</text>
-<path stroke="black">
-280 630 m
-170 630 l
-</path>
-<path stroke="black">
-280 610 m
-170 610 l
-</path>
+<text matrix="1 0 0 1 76 36" transformations="translations" pos="180 620" stroke="black" type="label" width="153.148" height="6.926" depth="1.93" valign="baseline">Simplicial complex data structure :</text>
<use matrix="1 0 0 1 -239.3 -10.1537" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
@@ -314,282 +286,11 @@ h
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
-<text matrix="1 0 0 1 4 -96" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<path matrix="1 0 0 1 4 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 24 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<path matrix="1 0 0 1 36 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 24 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 24 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<text matrix="1 0 0 1 12 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
-<path matrix="1 0 0 1 12 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 24 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 12 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<text matrix="1 0 0 1 64 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<path matrix="1 0 0 1 64 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 52 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<path matrix="1 0 0 1 52 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 36 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<text matrix="1 0 0 1 104 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 104 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 92 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<path matrix="1 0 0 1 92 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 96 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 84 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<text matrix="1 0 0 1 148 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 148 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 136 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
-<path matrix="1 0 0 1 136 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 120 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 108 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 120 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 108 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-292 716 m
-292 728 l
-316 728 l
-316 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-316 716 m
-316 728 l
-340 728 l
-340 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-340 716 m
-340 728 l
-364 728 l
-364 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-364 716 m
-364 728 l
-388 728 l
-388 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-388 716 m
-388 728 l
-412 728 l
-412 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-412 716 m
-412 728 l
-436 728 l
-436 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-436 716 m
-436 728 l
-460 728 l
-460 716 l
-h
-</path>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="304 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="328 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="352 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="376 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="400 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="424 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="448 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 -12 -96" stroke="black">
-436 708 m
-436 716 l
-</path>
-<path matrix="1 0 0 1 28 -96" stroke="black">
-364 708 m
-364 716 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-364 688 m
-364 696 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-320 688 m
-320 696 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-296 708 m
-308 716 l
-308 716 l
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-264 688 m
-268 696 l
-</path>
-<path matrix="1 0 0 1 40 -96" stroke="black">
-292 688 m
-292 696 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-388 736 m
-388 728 l
-</path>
-<path stroke="black">
-372 612 m
-376 620 l
-</path>
-<path stroke="black">
-448 612 m
-448 620 l
-</path>
-<text matrix="1 0 0 1 80 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<path matrix="1 0 0 1 80 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 80 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 68 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 24 -96" stroke="black">
-364 688 m
-364 696 l
-</path>
-<path matrix="1 0 0 1 136 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 124 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 136 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 124 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 4 -116" stroke="black">
-436 708 m
-436 716 l
-</path>
-<path matrix="1 0 0 1 168 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 156 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 168 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 156 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-436 708 m
-436 716 l
-</path>
+<text matrix="1 0 0 1 -20 -32" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [0,1,2]</text>
+<text matrix="1 0 0 1 -20 -56" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [1,2,3]</text>
+<text matrix="1 0 0 1 -20 -44" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [0,2,4]</text>
+<text matrix="1 0 0 1 -20 -68" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [2,3,6]</text>
+<text matrix="1 0 0 1 -20 -80" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [2,4,6]</text>
+<text matrix="1 0 0 1 -20 -92" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [4,5,6]</text>
</page>
</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png
index 0b6201da..170bae80 100644
--- a/src/Alpha_complex/doc/alpha_complex_doc.png
+++ b/src/Alpha_complex/doc/alpha_complex_doc.png
Binary files differ
diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
index 7836d59a..d411e90a 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
@@ -1,4 +1,7 @@
#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from alpha complex
+#include <gudhi/Simplex_tree.h>
+
#include <CGAL/Epick_d.h>
#include <iostream>
@@ -14,14 +17,14 @@ void usage(int nbArgs, char * const progName) {
int main(int argc, char **argv) {
if ((argc != 3) && (argc != 4)) usage(argc, (argv[0] - 1));
- std::string off_file_name(argv[1]);
- double alpha_square_max_value = atof(argv[2]);
+ std::string off_file_name {argv[1]};
+ double alpha_square_max_value {atof(argv[2])};
// ----------------------------------------------------------------------------
// Init of an alpha complex from an OFF file
// ----------------------------------------------------------------------------
- typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
- Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name, alpha_square_max_value);
+ using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name);
std::streambuf* streambufffer;
std::ofstream ouput_file_stream;
@@ -33,23 +36,27 @@ int main(int argc, char **argv) {
streambufffer = std::cout.rdbuf();
}
- std::ostream output_stream(streambufffer);
-
- // ----------------------------------------------------------------------------
- // Display information about the alpha complex
- // ----------------------------------------------------------------------------
- output_stream << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
- " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
- alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
-
- output_stream << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
- for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) {
- output_stream << " ( ";
- for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) {
- output_stream << vertex << " ";
+ Gudhi::Simplex_tree<> simplex;
+ if (alpha_complex_from_file.create_complex(simplex, alpha_square_max_value)) {
+ std::ostream output_stream(streambufffer);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ output_stream << "Alpha complex is of dimension " << simplex.dimension() <<
+ " - " << simplex.num_simplices() << " simplices - " <<
+ simplex.num_vertices() << " vertices." << std::endl;
+
+ output_stream << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" <<
+ std::endl;
+ for (auto f_simplex : simplex.filtration_simplex_range()) {
+ output_stream << " ( ";
+ for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
+ output_stream << vertex << " ";
+ }
+ output_stream << ") -> " << "[" << simplex.filtration(f_simplex) << "] ";
+ output_stream << std::endl;
}
- output_stream << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] ";
- output_stream << std::endl;
}
ouput_file_stream.close();
return 0;
diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
index 49f77276..c19f7cc8 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
@@ -1,14 +1,17 @@
-#include <CGAL/Epick_d.h>
#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from alpha complex
+#include <gudhi/Simplex_tree.h>
+
+#include <CGAL/Epick_d.h>
#include <iostream>
#include <string>
#include <vector>
#include <limits> // for numeric limits
-typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel;
-typedef Kernel::Point_d Point;
-typedef std::vector<Point> Vector_of_points;
+using Kernel = CGAL::Epick_d< CGAL::Dimension_tag<2> >;
+using Point = Kernel::Point_d;
+using Vector_of_points = std::vector<Point>;
void usage(int nbArgs, char * const progName) {
std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
@@ -21,7 +24,7 @@ int main(int argc, char **argv) {
if ((argc != 1) && (argc != 2)) usage(argc, (argv[0] - 1));
// Delaunay complex if alpha_square_max_value is not given by the user.
- double alpha_square_max_value = std::numeric_limits<double>::infinity();
+ double alpha_square_max_value {std::numeric_limits<double>::infinity()};
if (argc == 2)
alpha_square_max_value = atof(argv[1]);
@@ -40,23 +43,26 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
- Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_points(points, alpha_square_max_value);
+ Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_points(points);
- // ----------------------------------------------------------------------------
- // Display information about the alpha complex
- // ----------------------------------------------------------------------------
- std::cout << "Alpha complex is of dimension " << alpha_complex_from_points.dimension() <<
- " - " << alpha_complex_from_points.num_simplices() << " simplices - " <<
- alpha_complex_from_points.num_vertices() << " vertices." << std::endl;
-
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
- std::cout << " ( ";
- for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ Gudhi::Simplex_tree<> simplex;
+ if (alpha_complex_from_points.create_complex(simplex, alpha_square_max_value)) {
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << simplex.dimension() <<
+ " - " << simplex.num_simplices() << " simplices - " <<
+ simplex.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : simplex.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << simplex.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
}
- std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
- std::cout << std::endl;
}
return 0;
}
diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt
index 71a95d61..b9bcdb55 100644
--- a/src/Alpha_complex/example/CMakeLists.txt
+++ b/src/Alpha_complex/example/CMakeLists.txt
@@ -2,32 +2,28 @@ cmake_minimum_required(VERSION 2.6)
project(Alpha_complex_examples)
# need CGAL 4.7
-# cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../..
-if(CGAL_FOUND)
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (EIGEN3_FOUND)
- add_executable ( alphapoints Alpha_complex_from_points.cpp )
- target_link_libraries(alphapoints ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY})
- add_executable ( alphaoffreader Alpha_complex_from_off.cpp )
- target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY})
- if (TBB_FOUND)
- target_link_libraries(alphapoints ${TBB_LIBRARIES})
- target_link_libraries(alphaoffreader ${TBB_LIBRARIES})
- endif()
+# cmake -DCGAL_DIR=~/workspace/CGAL-4.7 ..
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ add_executable ( alphapoints Alpha_complex_from_points.cpp )
+ target_link_libraries(alphapoints ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY})
+ add_executable ( alphaoffreader Alpha_complex_from_off.cpp )
+ target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(alphapoints ${TBB_LIBRARIES})
+ target_link_libraries(alphaoffreader ${TBB_LIBRARIES})
+ endif()
- add_test(alphapoints ${CMAKE_CURRENT_BINARY_DIR}/alphapoints)
- # Do not forget to copy test files in current binary dir
- file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
- add_test(alphaoffreader_doc_60 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader alphacomplexdoc.off 60.0 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt)
- add_test(alphaoffreader_doc_32 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader alphacomplexdoc.off 32.0 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt)
- if (DIFF_PATH)
- # Do not forget to copy test results files in current binary dir
- file(COPY "alphaoffreader_for_doc_32.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
- file(COPY "alphaoffreader_for_doc_60.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ add_test(alphapoints ${CMAKE_CURRENT_BINARY_DIR}/alphapoints)
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ add_test(alphaoffreader_doc_60 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader alphacomplexdoc.off 60.0 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt)
+ add_test(alphaoffreader_doc_32 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader alphacomplexdoc.off 32.0 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt)
+ if (DIFF_PATH)
+ # Do not forget to copy test results files in current binary dir
+ file(COPY "alphaoffreader_for_doc_32.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY "alphaoffreader_for_doc_60.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
- add_test(alphaoffreader_doc_60_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_60.txt)
- add_test(alphaoffreader_doc_32_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt)
- endif()
- endif(EIGEN3_FOUND)
- endif(NOT CGAL_VERSION VERSION_LESS 4.7.0)
-endif(CGAL_FOUND)
+ add_test(alphaoffreader_doc_60_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_60.txt)
+ add_test(alphaoffreader_doc_32_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt)
+ endif()
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index 2c95ceb4..1ff95c3d 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -23,9 +23,6 @@
#ifndef ALPHA_COMPLEX_H_
#define ALPHA_COMPLEX_H_
-// to construct a simplex_tree from Delaunay_triangulation
-#include <gudhi/graph_simplicial_complex.h>
-#include <gudhi/Simplex_tree.h>
#include <gudhi/Debug_utils.h>
// to construct Alpha_complex from a OFF file of points
#include <gudhi/Points_off_io.h>
@@ -36,6 +33,7 @@
#include <CGAL/Delaunay_triangulation.h>
#include <CGAL/Epick_d.h>
#include <CGAL/Spatial_sort_traits_adapter_d.h>
+#include <CGAL/property_map.h> // for CGAL::Identity_property_map
#include <iostream>
#include <vector>
@@ -57,9 +55,9 @@ namespace alpha_complex {
* \ingroup alpha_complex
*
* \details
- * The data structure can be constructed from a CGAL Delaunay triangulation (for more informations on CGAL Delaunay
- * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) or from
- * an OFF file (cf. Points_off_reader).
+ * The data structure is constructing a CGAL Delaunay triangulation (for more informations on CGAL Delaunay
+ * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) from a
+ * range of points or from an OFF file (cf. Points_off_reader).
*
* Please refer to \ref alpha_complex for examples.
*
@@ -74,7 +72,7 @@ namespace alpha_complex {
*
*/
template<class Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>>
-class Alpha_complex : public Simplex_tree<> {
+class Alpha_complex {
public:
// Add an int in TDS to save point index in the structure
typedef CGAL::Triangulation_data_structure<typename Kernel::Dimension,
@@ -90,13 +88,6 @@ class Alpha_complex : public Simplex_tree<> {
typedef Kernel Geom_traits;
private:
- // From Simplex_tree
- // Type required to insert into a simplex_tree (with or without subfaces).
- typedef std::vector<Vertex_handle> Vector_vertex;
-
- // Simplex_result is the type returned from simplex_tree insert function.
- typedef typename std::pair<Simplex_handle, bool> Simplex_result;
-
typedef typename Kernel::Compute_squared_radius_d Squared_Radius;
typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel;
typedef typename Kernel::Point_dimension_d Point_Dimension;
@@ -111,7 +102,7 @@ class Alpha_complex : public Simplex_tree<> {
typedef typename Delaunay_triangulation::size_type size_type;
// Map type to switch from simplex tree vertex handle to CGAL vertex iterator.
- typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Vector_vertex_iterator;
+ typedef typename std::map< std::size_t, CGAL_vertex_iterator > Vector_vertex_iterator;
private:
/** \brief Vertex iterator vector to switch from simplex tree vertex handle to CGAL vertex iterator.
@@ -124,16 +115,15 @@ class Alpha_complex : public Simplex_tree<> {
public:
/** \brief Alpha_complex constructor from an OFF file name.
- * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize
+ *
+ * Uses the Points_off_reader to construct the Delaunay triangulation required to initialize
* the Alpha_complex.
*
* Duplicate points are inserted once in the Alpha_complex. This is the reason why the vertices may be not contiguous.
*
* @param[in] off_file_name OFF file [path and] name.
- * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
*/
- Alpha_complex(const std::string& off_file_name,
- Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ Alpha_complex(const std::string& off_file_name)
: triangulation_(nullptr) {
Gudhi::Points_off_reader<Point_d> off_reader(off_file_name);
if (!off_reader.is_valid()) {
@@ -141,7 +131,7 @@ class Alpha_complex : public Simplex_tree<> {
exit(-1); // ----- >>
}
- init_from_range(off_reader.get_point_cloud(), max_alpha_square);
+ init_from_range(off_reader.get_point_cloud());
}
/** \brief Alpha_complex constructor from a list of points.
@@ -149,23 +139,17 @@ class Alpha_complex : public Simplex_tree<> {
* Duplicate points are inserted once in the Alpha_complex. This is the reason why the vertices may be not contiguous.
*
* @param[in] points Range of points to triangulate. Points must be in Kernel::Point_d
- * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
*
* The type InputPointRange must be a range for which std::begin and
* std::end return input iterators on a Kernel::Point_d.
- *
- * @post Compare num_simplices with InputPointRange points number (not the same in case of duplicate points).
*/
template<typename InputPointRange >
- Alpha_complex(const InputPointRange& points,
- Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ Alpha_complex(const InputPointRange& points)
: triangulation_(nullptr) {
- init_from_range(points, max_alpha_square);
+ init_from_range(points);
}
- /** \brief Alpha_complex destructor.
- *
- * @warning Deletes the Delaunay triangulation.
+ /** \brief Alpha_complex destructor deletes the Delaunay triangulation.
*/
~Alpha_complex() {
delete triangulation_;
@@ -183,15 +167,24 @@ class Alpha_complex : public Simplex_tree<> {
* @return The point found.
* @exception std::out_of_range In case vertex is not found (cf. std::vector::at).
*/
- Point_d get_point(Vertex_handle vertex) const {
+ const Point_d& get_point(std::size_t vertex) const {
return vertex_handle_to_iterator_.at(vertex)->point();
}
+ /** \brief number_of_vertices returns the number of vertices (same as the number of points).
+ *
+ * @return The number of vertices.
+ */
+ const std::size_t number_of_vertices() const {
+ return vertex_handle_to_iterator_.size();
+ }
+
private:
template<typename InputPointRange >
- void init_from_range(const InputPointRange& points, Filtration_value max_alpha_square) {
+ void init_from_range(const InputPointRange& points) {
auto first = std::begin(points);
auto last = std::end(points);
+
if (first != last) {
// point_dimension function initialization
Point_Dimension point_dimension = kernel_.point_dimension_d_object();
@@ -199,90 +192,107 @@ class Alpha_complex : public Simplex_tree<> {
// Delaunay triangulation is point dimension.
triangulation_ = new Delaunay_triangulation(point_dimension(*first));
- std::vector<Point_d> points(first, last);
+ std::vector<Point_d> point_cloud(first, last);
// Creates a vector {0, 1, ..., N-1}
std::vector<std::ptrdiff_t> indices(boost::counting_iterator<std::ptrdiff_t>(0),
- boost::counting_iterator<std::ptrdiff_t>(points.size()));
+ boost::counting_iterator<std::ptrdiff_t>(point_cloud.size()));
+
+ typedef boost::iterator_property_map<typename std::vector<Point_d>::iterator,
+ CGAL::Identity_property_map<std::ptrdiff_t>> Point_property_map;
+ typedef CGAL::Spatial_sort_traits_adapter_d<Kernel, Point_property_map> Search_traits_d;
- // Sort indices considering CGAL spatial sort
- typedef CGAL::Spatial_sort_traits_adapter_d<Kernel, Point_d*> Search_traits_d;
- spatial_sort(indices.begin(), indices.end(), Search_traits_d(&(points[0])));
+ CGAL::spatial_sort(indices.begin(), indices.end(), Search_traits_d(std::begin(point_cloud)));
typename Delaunay_triangulation::Full_cell_handle hint;
for (auto index : indices) {
- typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(points[index], hint);
+ typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(point_cloud[index], hint);
// Save index value as data to retrieve it after insertion
pos->data() = index;
hint = pos->full_cell();
}
- init(max_alpha_square);
+ // --------------------------------------------------------------------------------------------
+ // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
+ // Loop on triangulation vertices list
+ for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
+ if (!triangulation_->is_infinite(*vit)) {
+#ifdef DEBUG_TRACES
+ std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
+#endif // DEBUG_TRACES
+ vertex_handle_to_iterator_.emplace(vit->data(), vit);
+ }
+ }
+ // --------------------------------------------------------------------------------------------
}
}
- /** \brief Initialize the Alpha_complex from the Delaunay triangulation.
+ public:
+ template <typename SimplicialComplexForAlpha>
+ bool create_complex(SimplicialComplexForAlpha& complex) {
+ typedef typename SimplicialComplexForAlpha::Filtration_value Filtration_value;
+ return create_complex(complex, std::numeric_limits<Filtration_value>::infinity());
+ }
+
+ /** \brief Inserts all Delaunay triangulation into the simplicial complex.
+ * It also computes the filtration values accordingly to the \ref createcomplexalgorithm
*
- * @param[in] max_alpha_square maximum for alpha square value.
+ * \tparam SimplicialComplexForAlpha must meet `SimplicialComplexForAlpha` concept.
*
- * @warning Delaunay triangulation must be already constructed with at least one vertex and dimension must be more
- * than 0.
+ * @param[in] complex SimplicialComplexForAlpha to be created.
+ * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
+ *
+ * @return true if creation succeeds, false otherwise.
+ *
+ * @pre Delaunay triangulation must be already constructed with dimension strictly greater than 0.
+ * @pre The simplicial complex must be empty (no vertices)
*
* Initialization can be launched once.
*/
- void init(Filtration_value max_alpha_square) {
+ template <typename SimplicialComplexForAlpha, typename Filtration_value>
+ bool create_complex(SimplicialComplexForAlpha& complex, Filtration_value max_alpha_square) {
+ // From SimplicialComplexForAlpha type required to insert into a simplicial complex (with or without subfaces).
+ typedef typename SimplicialComplexForAlpha::Vertex_handle Vertex_handle;
+ typedef typename SimplicialComplexForAlpha::Simplex_handle Simplex_handle;
+ typedef std::vector<Vertex_handle> Vector_vertex;
+
if (triangulation_ == nullptr) {
- std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation\n";
- return; // ----- >>
- }
- if (triangulation_->number_of_vertices() < 1) {
- std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices\n";
- return; // ----- >>
+ std::cerr << "Alpha_complex cannot create_complex from a NULL triangulation\n";
+ return false; // ----- >>
}
if (triangulation_->maximal_dimension() < 1) {
- std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation\n";
- return; // ----- >>
+ std::cerr << "Alpha_complex cannot create_complex from a zero-dimension triangulation\n";
+ return false; // ----- >>
}
- if (num_vertices() > 0) {
- std::cerr << "Alpha_complex init - Cannot init twice\n";
- return; // ----- >>
+ if (complex.num_vertices() > 0) {
+ std::cerr << "Alpha_complex create_complex - complex is not empty\n";
+ return false; // ----- >>
}
- set_dimension(triangulation_->maximal_dimension());
-
- // --------------------------------------------------------------------------------------------
- // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
- // Loop on triangulation vertices list
- for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
- if (!triangulation_->is_infinite(*vit)) {
-#ifdef DEBUG_TRACES
- std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
-#endif // DEBUG_TRACES
- vertex_handle_to_iterator_.emplace(vit->data(), vit);
- }
- }
- // --------------------------------------------------------------------------------------------
+ complex.set_dimension(triangulation_->maximal_dimension());
// --------------------------------------------------------------------------------------------
// Simplex_tree construction from loop on triangulation finite full cells list
- for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
- Vector_vertex vertexVector;
+ if (triangulation_->number_of_vertices() > 0) {
+ for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
+ Vector_vertex vertexVector;
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree insertion ";
+ std::cout << "Simplex_tree insertion ";
#endif // DEBUG_TRACES
- for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
- if (*vit != nullptr) {
+ for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
+ if (*vit != nullptr) {
#ifdef DEBUG_TRACES
- std::cout << " " << (*vit)->data();
+ std::cout << " " << (*vit)->data();
#endif // DEBUG_TRACES
- // Vector of vertex construction for simplex_tree structure
- vertexVector.push_back((*vit)->data());
+ // Vector of vertex construction for simplex_tree structure
+ vertexVector.push_back((*vit)->data());
+ }
}
- }
#ifdef DEBUG_TRACES
- std::cout << std::endl;
+ std::cout << std::endl;
#endif // DEBUG_TRACES
- // Insert each simplex and its subfaces in the simplex tree - filtration is NaN
- insert_simplex_and_subfaces(vertexVector, std::numeric_limits<double>::quiet_NaN());
+ // Insert each simplex and its subfaces in the simplex tree - filtration is NaN
+ complex.insert_simplex_and_subfaces(vertexVector, std::numeric_limits<double>::quiet_NaN());
+ }
}
// --------------------------------------------------------------------------------------------
@@ -290,16 +300,16 @@ class Alpha_complex : public Simplex_tree<> {
// Will be re-used many times
Vector_of_CGAL_points pointVector;
// ### For i : d -> 0
- for (int decr_dim = dimension(); decr_dim >= 0; decr_dim--) {
+ for (int decr_dim = triangulation_->maximal_dimension(); decr_dim >= 0; decr_dim--) {
// ### Foreach Sigma of dim i
- for (auto f_simplex : skeleton_simplex_range(decr_dim)) {
- int f_simplex_dim = dimension(f_simplex);
+ for (Simplex_handle f_simplex : complex.skeleton_simplex_range(decr_dim)) {
+ int f_simplex_dim = complex.dimension(f_simplex);
if (decr_dim == f_simplex_dim) {
pointVector.clear();
#ifdef DEBUG_TRACES
std::cout << "Sigma of dim " << decr_dim << " is";
#endif // DEBUG_TRACES
- for (auto vertex : simplex_vertex_range(f_simplex)) {
+ for (auto vertex : complex.simplex_vertex_range(f_simplex)) {
pointVector.push_back(get_point(vertex));
#ifdef DEBUG_TRACES
std::cout << " " << vertex;
@@ -309,7 +319,7 @@ class Alpha_complex : public Simplex_tree<> {
std::cout << std::endl;
#endif // DEBUG_TRACES
// ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma)
- if (std::isnan(filtration(f_simplex))) {
+ if (std::isnan(complex.filtration(f_simplex))) {
Filtration_value alpha_complex_filtration = 0.0;
// No need to compute squared_radius on a single point - alpha is 0.0
if (f_simplex_dim > 0) {
@@ -318,12 +328,12 @@ class Alpha_complex : public Simplex_tree<> {
alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end());
}
- assign_filtration(f_simplex, alpha_complex_filtration);
+ complex.assign_filtration(f_simplex, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl;
+ std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << complex.filtration(f_simplex) << std::endl;
#endif // DEBUG_TRACES
}
- propagate_alpha_filtration(f_simplex, decr_dim);
+ propagate_alpha_filtration(complex, f_simplex, decr_dim);
}
}
}
@@ -331,36 +341,41 @@ class Alpha_complex : public Simplex_tree<> {
// --------------------------------------------------------------------------------------------
// As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension
- bool modified_filt = make_filtration_non_decreasing();
+ complex.make_filtration_non_decreasing();
// Remove all simplices that have a filtration value greater than max_alpha_square
- // Remark: prune_above_filtration does not require initialize_filtration to be done before.
- modified_filt |= prune_above_filtration(max_alpha_square);
- if (modified_filt) {
- initialize_filtration();
- }
+ complex.prune_above_filtration(max_alpha_square);
// --------------------------------------------------------------------------------------------
+ return true;
}
- template<typename Simplex_handle>
- void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) {
+ private:
+ template <typename SimplicialComplexForAlpha, typename Simplex_handle>
+ void propagate_alpha_filtration(SimplicialComplexForAlpha& complex, Simplex_handle f_simplex, int decr_dim) {
+ // From SimplicialComplexForAlpha type required to assign filtration values.
+ typedef typename SimplicialComplexForAlpha::Filtration_value Filtration_value;
+#ifdef DEBUG_TRACES
+ typedef typename SimplicialComplexForAlpha::Vertex_handle Vertex_handle;
+#endif // DEBUG_TRACES
+
// ### Foreach Tau face of Sigma
- for (auto f_boundary : boundary_simplex_range(f_simplex)) {
+ for (auto f_boundary : complex.boundary_simplex_range(f_simplex)) {
#ifdef DEBUG_TRACES
std::cout << " | --------------------------------------------------\n";
std::cout << " | Tau ";
- for (auto vertex : simplex_vertex_range(f_boundary)) {
+ for (auto vertex : complex.simplex_vertex_range(f_boundary)) {
std::cout << vertex << " ";
}
std::cout << "is a face of Sigma\n";
- std::cout << " | isnan(filtration(Tau)=" << std::isnan(filtration(f_boundary)) << std::endl;
+ std::cout << " | isnan(complex.filtration(Tau)=" << std::isnan(complex.filtration(f_boundary)) << std::endl;
#endif // DEBUG_TRACES
// ### If filt(Tau) is not NaN
- if (!std::isnan(filtration(f_boundary))) {
+ if (!std::isnan(complex.filtration(f_boundary))) {
// ### filt(Tau) = fmin(filt(Tau), filt(Sigma))
- Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex));
- assign_filtration(f_boundary, alpha_complex_filtration);
+ Filtration_value alpha_complex_filtration = fmin(complex.filtration(f_boundary),
+ complex.filtration(f_simplex));
+ complex.assign_filtration(f_boundary, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl;
+ std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << complex.filtration(f_boundary) << std::endl;
#endif // DEBUG_TRACES
// ### Else
} else {
@@ -372,12 +387,12 @@ class Alpha_complex : public Simplex_tree<> {
#ifdef DEBUG_TRACES
Vertex_handle vertexForGabriel = Vertex_handle();
#endif // DEBUG_TRACES
- for (auto vertex : simplex_vertex_range(f_boundary)) {
+ for (auto vertex : complex.simplex_vertex_range(f_boundary)) {
pointVector.push_back(get_point(vertex));
}
// Retrieve the Sigma point that is not part of Tau - parameter for is_gabriel function
Point_d point_for_gabriel;
- for (auto vertex : simplex_vertex_range(f_simplex)) {
+ for (auto vertex : complex.simplex_vertex_range(f_simplex)) {
point_for_gabriel = get_point(vertex);
if (std::find(pointVector.begin(), pointVector.end(), point_for_gabriel) == pointVector.end()) {
#ifdef DEBUG_TRACES
@@ -398,10 +413,10 @@ class Alpha_complex : public Simplex_tree<> {
// ### If Tau is not Gabriel of Sigma
if (false == is_gab) {
// ### filt(Tau) = filt(Sigma)
- Filtration_value alpha_complex_filtration = filtration(f_simplex);
- assign_filtration(f_boundary, alpha_complex_filtration);
+ Filtration_value alpha_complex_filtration = complex.filtration(f_simplex);
+ complex.assign_filtration(f_boundary, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl;
+ std::cout << " | filt(Tau) = filt(Sigma) = " << complex.filtration(f_boundary) << std::endl;
#endif // DEBUG_TRACES
}
}
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 4d7bf622..7380547f 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -33,12 +33,20 @@
#include <vector>
#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from Delaunay_triangulation
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <boost/mpl/list.hpp>
// Use dynamic_dimension_tag for the user to be able to set dimension
typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d;
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel_s;
// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
-BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
+typedef boost::mpl::list<Kernel_d, Kernel_s> list_of_kernel_variants;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of_kernel_variants) {
// ----------------------------------------------------------------------------
//
// Init of an alpha-complex from a OFF file
@@ -49,60 +57,55 @@ BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
- Gudhi::alpha_complex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name, max_alpha_square_value);
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_file(off_file_name);
- const int DIMENSION = 2;
- std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_file.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.number_of_vertices() == 7);
- const int NUMBER_OF_VERTICES = 7;
- std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+ Gudhi::Simplex_tree<> simplex_tree_60;
+ BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
- const int NUMBER_OF_SIMPLICES = 25;
- std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+ std::cout << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree_60.dimension() == 2);
-}
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_file.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.number_of_vertices() == 7);
-BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file_filtered) {
- // ----------------------------------------------------------------------------
- //
- // Init of an alpha-complex from a OFF file
- //
- // ----------------------------------------------------------------------------
- std::string off_file_name("alphacomplexdoc.off");
- double max_alpha_square_value = 59.0;
+ std::cout << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
+
+ std::cout << "simplex_tree_60.num_simplices()=" << simplex_tree_60.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree_60.num_simplices() == 25);
+
+ max_alpha_square_value = 59.0;
std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
- // Use of the default dynamic kernel
- Gudhi::alpha_complex::Alpha_complex<> alpha_complex_from_file(off_file_name, max_alpha_square_value);
-
- const int DIMENSION = 2;
- std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+ Gudhi::Simplex_tree<> simplex_tree_59;
+ BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_59, max_alpha_square_value));
+
+ std::cout << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree_59.dimension() == 2);
- const int NUMBER_OF_VERTICES = 7;
- std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+ std::cout << "simplex_tree_59.num_vertices()=" << simplex_tree_59.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree_59.num_vertices() == 7);
- const int NUMBER_OF_SIMPLICES = 23;
- std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+ std::cout << "simplex_tree_59.num_simplices()=" << simplex_tree_59.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree_59.num_simplices() == 23);
}
bool are_almost_the_same(float a, float b) {
return std::fabs(a - b) < std::numeric_limits<float>::epsilon();
}
-// Use dynamic_dimension_tag for the user to be able to set dimension
-typedef CGAL::Epick_d< CGAL::Dimension_tag<4> > Kernel_s;
-typedef Kernel_s::Point_d Point;
-typedef std::vector<Point> Vector_of_points;
-
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<4> > Kernel_4;
+typedef Kernel_4::Point_d Point_4;
+typedef std::vector<Point_4> Vector_4_Points;
-bool is_point_in_list(Vector_of_points points_list, Point point) {
+bool is_point_in_list(Vector_4_Points points_list, Point_4 point) {
for (auto& point_in_list : points_list) {
if (point_in_list == point) {
return true; // point found
@@ -115,57 +118,64 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
// ----------------------------------------------------------------------------
// Init of a list of points
// ----------------------------------------------------------------------------
- Vector_of_points points;
+ Vector_4_Points points;
std::vector<double> coords = { 0.0, 0.0, 0.0, 1.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
coords = { 0.0, 0.0, 1.0, 0.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
coords = { 0.0, 1.0, 0.0, 0.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
coords = { 1.0, 0.0, 0.0, 0.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
// ----------------------------------------------------------------------------
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
- Gudhi::alpha_complex::Alpha_complex<Kernel_s> alpha_complex_from_points(points);
+ Gudhi::alpha_complex::Alpha_complex<Kernel_4> alpha_complex_from_points(points);
std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
+ Gudhi::Simplex_tree<> simplex_tree;
+ BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_points.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.number_of_vertices() == points.size());
+
// Another way to check num_simplices
std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
num_simplices++;
std::cout << " ( ";
- for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
std::cout << vertex << " ";
}
- std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
std::cout << std::endl;
}
BOOST_CHECK(num_simplices == 15);
- std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_simplices() == 15);
+ std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 15);
- std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
- std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+ std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == 4);
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == 4);
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
- switch (alpha_complex_from_points.dimension(f_simplex)) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ switch (simplex_tree.dimension(f_simplex)) {
case 0:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 0.0));
break;
case 1:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 1.0/2.0));
break;
case 2:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 2.0/3.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 2.0/3.0));
break;
case 3:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 3.0/4.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 3.0/4.0));
break;
default:
BOOST_CHECK(false); // Shall not happen
@@ -173,22 +183,22 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
}
- Point p0 = alpha_complex_from_points.get_point(0);
+ Point_4 p0 = alpha_complex_from_points.get_point(0);
std::cout << "alpha_complex_from_points.get_point(0)=" << p0 << std::endl;
BOOST_CHECK(4 == p0.dimension());
BOOST_CHECK(is_point_in_list(points, p0));
- Point p1 = alpha_complex_from_points.get_point(1);
+ Point_4 p1 = alpha_complex_from_points.get_point(1);
std::cout << "alpha_complex_from_points.get_point(1)=" << p1 << std::endl;
BOOST_CHECK(4 == p1.dimension());
BOOST_CHECK(is_point_in_list(points, p1));
- Point p2 = alpha_complex_from_points.get_point(2);
+ Point_4 p2 = alpha_complex_from_points.get_point(2);
std::cout << "alpha_complex_from_points.get_point(2)=" << p2 << std::endl;
BOOST_CHECK(4 == p2.dimension());
BOOST_CHECK(is_point_in_list(points, p2));
- Point p3 = alpha_complex_from_points.get_point(3);
+ Point_4 p3 = alpha_complex_from_points.get_point(3);
std::cout << "alpha_complex_from_points.get_point(3)=" << p3 << std::endl;
BOOST_CHECK(4 == p3.dimension());
BOOST_CHECK(is_point_in_list(points, p3));
@@ -199,40 +209,40 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
BOOST_CHECK_THROW (alpha_complex_from_points.get_point(1234), std::out_of_range);
// Test after prune_above_filtration
- bool modified = alpha_complex_from_points.prune_above_filtration(0.6);
+ bool modified = simplex_tree.prune_above_filtration(0.6);
if (modified) {
- alpha_complex_from_points.initialize_filtration();
+ simplex_tree.initialize_filtration();
}
BOOST_CHECK(modified);
// Another way to check num_simplices
std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
num_simplices = 0;
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
num_simplices++;
std::cout << " ( ";
- for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
std::cout << vertex << " ";
}
- std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
std::cout << std::endl;
}
BOOST_CHECK(num_simplices == 10);
- std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_simplices() == 10);
+ std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 10);
- std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
- std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+ std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == 4);
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == 4);
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
- switch (alpha_complex_from_points.dimension(f_simplex)) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ switch (simplex_tree.dimension(f_simplex)) {
case 0:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 0.0));
break;
case 1:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 1.0/2.0));
break;
default:
BOOST_CHECK(false); // Shall not happen
@@ -241,3 +251,36 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, list_of_kernel_variants) {
+ std::cout << "========== Alpha_complex_from_empty_points ==========" << std::endl;
+
+ // ----------------------------------------------------------------------------
+ // Init of an empty list of points
+ // ----------------------------------------------------------------------------
+ std::vector<typename TestedKernel::Point_d> points;
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
+
+ // Test to the limit
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
+
+ Gudhi::Simplex_tree<> simplex_tree;
+ BOOST_CHECK(!alpha_complex_from_points.create_complex(simplex_tree));
+
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_points.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.number_of_vertices() == points.size());
+
+ std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 0);
+
+ std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == -1);
+
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == 0);
+}
diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt
index b0723a41..32091196 100644
--- a/src/Alpha_complex/test/CMakeLists.txt
+++ b/src/Alpha_complex/test/CMakeLists.txt
@@ -10,29 +10,19 @@ if (GPROF_PATH)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
endif()
-# need CGAL 4.7
-# cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../..
-if(CGAL_FOUND)
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (EIGEN3_FOUND)
- add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp )
- target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
- if (TBB_FOUND)
- target_link_libraries(AlphaComplexUT ${TBB_LIBRARIES})
- endif()
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp )
+ target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(AlphaComplexUT ${TBB_LIBRARIES})
+ endif()
- # Do not forget to copy test files in current binary dir
- file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
- add_test(AlphaComplexUT ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUT
- # XML format for Jenkins xUnit plugin
- --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/AlphaComplexUT.xml --log_level=test_suite --report_level=no)
+ add_test(AlphaComplexUT ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/AlphaComplexUT.xml --log_level=test_suite --report_level=no)
- else()
- message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex unitary tests.")
- endif()
- else()
- message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex unitary tests. Version 4.7.0 is required.")
- endif ()
-endif()
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
diff --git a/src/Bitmap_cubical_complex/doc/COPYRIGHT b/src/Bitmap_cubical_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..bcd46b23
--- /dev/null
+++ b/src/Bitmap_cubical_complex/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Pawel Dlotko
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Bottleneck/concept/Persistence_diagram.h b/src/Bottleneck/concept/Persistence_diagram.h
deleted file mode 100644
index eaaf8bc5..00000000
--- a/src/Bottleneck/concept/Persistence_diagram.h
+++ /dev/null
@@ -1,7 +0,0 @@
-typedef typename std::pair<double,double> Diagram_point;
-
-struct Persistence_Diagram
-{
- const_iterator<Diagram_point> cbegin() const;
- const_iterator<Diagram_point> cend() const;
-};
diff --git a/src/Bottleneck/example/CMakeLists.txt b/src/Bottleneck/example/CMakeLists.txt
deleted file mode 100644
index 77797202..00000000
--- a/src/Bottleneck/example/CMakeLists.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-cmake_minimum_required(VERSION 2.6)
-project(Bottleneck_examples)
-
-add_executable ( RandomDiagrams random_diagrams.cpp )
-add_test(RandomDiagrams ${CMAKE_CURRENT_BINARY_DIR}/RandomDiagrams)
diff --git a/src/Bottleneck/include/gudhi/Graph_matching.h b/src/Bottleneck/include/gudhi/Graph_matching.h
deleted file mode 100644
index ea47e1d5..00000000
--- a/src/Bottleneck/include/gudhi/Graph_matching.h
+++ /dev/null
@@ -1,197 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Francois Godi
- *
- * Copyright (C) 2015 INRIA Saclay (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef SRC_BOTTLENECK_INCLUDE_GUDHI_GRAPH_MATCHING_H_
-#define SRC_BOTTLENECK_INCLUDE_GUDHI_GRAPH_MATCHING_H_
-
-#include <deque>
-#include <list>
-#include <vector>
-
-#include "gudhi/Layered_neighbors_finder.h"
-
-namespace Gudhi {
-
-namespace bottleneck {
-
-template<typename Persistence_diagram1, typename Persistence_diagram2>
-double bottleneck_distance(Persistence_diagram1& diag1, Persistence_diagram2& diag2, double e = 0.);
-
-class Graph_matching {
- public:
- Graph_matching(const Persistence_diagrams_graph& g);
- Graph_matching& operator=(const Graph_matching& m);
- bool perfect() const;
- bool multi_augment();
- void set_r(double r);
-
- private:
- const Persistence_diagrams_graph& g;
- double r;
- std::vector<int> v_to_u;
- std::list<int> unmatched_in_u;
-
- Layered_neighbors_finder* layering() const;
- bool augment(Layered_neighbors_finder* layered_nf, int u_start_index, int max_depth);
- void update(std::deque<int>& path);
-};
-
-Graph_matching::Graph_matching(const Persistence_diagrams_graph& g)
- : g(g), r(0), v_to_u(g.size()), unmatched_in_u() {
- for (int u_point_index = 0; u_point_index < g.size(); ++u_point_index)
- unmatched_in_u.emplace_back(u_point_index);
-}
-
-Graph_matching& Graph_matching::operator=(const Graph_matching& m) {
- r = m.r;
- v_to_u = m.v_to_u;
- unmatched_in_u = m.unmatched_in_u;
- return *this;
-}
-
-inline bool Graph_matching::perfect() const {
- return unmatched_in_u.empty();
-}
-
-inline bool Graph_matching::multi_augment() {
- if (perfect())
- return false;
- Layered_neighbors_finder* layered_nf = layering();
- double rn = sqrt(g.size());
- int nblmax = layered_nf->vlayers_number()*2 + 1;
- // verification of a necessary criterion
- if ((unmatched_in_u.size() > rn && nblmax > rn) || nblmax == 0)
- return false;
- bool successful = false;
- std::list<int>* tries = new std::list<int>(unmatched_in_u);
- for (auto it = tries->cbegin(); it != tries->cend(); it++)
- successful = successful || augment(layered_nf, *it, nblmax);
- delete tries;
- delete layered_nf;
- return successful;
-}
-
-inline void Graph_matching::set_r(double r) {
- this->r = r;
-}
-
-Layered_neighbors_finder* Graph_matching::layering() const {
- bool end = false;
- int layer = 0;
- std::list<int> u_vertices(unmatched_in_u);
- std::list<int> v_vertices;
- Neighbors_finder nf(g, r);
- Layered_neighbors_finder* layered_nf = new Layered_neighbors_finder(g, r);
- for (int v_point_index = 0; v_point_index < g.size(); ++v_point_index)
- nf.add(v_point_index);
- while (!u_vertices.empty()) {
- for (auto it = u_vertices.cbegin(); it != u_vertices.cend(); ++it) {
- std::list<int>* u_succ = nf.pull_all_near(*it);
- for (auto it = u_succ->cbegin(); it != u_succ->cend(); ++it) {
- layered_nf->add(*it, layer);
- v_vertices.emplace_back(*it);
- }
- delete u_succ;
- }
- u_vertices.clear();
- for (auto it = v_vertices.cbegin(); it != v_vertices.cend(); it++) {
- if (v_to_u.at(*it) == null_point_index())
- end = true;
- else
- u_vertices.emplace_back(v_to_u.at(*it));
- }
- if (end)
- return layered_nf;
- v_vertices.clear();
- layer++;
- }
- return layered_nf;
-}
-
-bool Graph_matching::augment(Layered_neighbors_finder *layered_nf, int u_start_index, int max_depth) {
- std::deque<int> path;
- path.emplace_back(u_start_index);
- // start is a point from U
- do {
- if (static_cast<int>(path.size()) > max_depth) {
- path.pop_back();
- path.pop_back();
- }
- if (path.empty())
- return false;
- int w = path.back();
- path.emplace_back(layered_nf->pull_near(w, path.size() / 2));
- while (path.back() == null_point_index()) {
- path.pop_back();
- path.pop_back();
- if (path.empty())
- return false;
- path.pop_back();
- path.emplace_back(layered_nf->pull_near(path.back(), path.size() / 2));
- }
- path.emplace_back(v_to_u.at(path.back()));
- } while (path.back() != null_point_index());
- path.pop_back();
- update(path);
- return true;
-}
-
-void Graph_matching::update(std::deque<int>& path) {
- unmatched_in_u.remove(path.front());
- for (auto it = path.cbegin(); it != path.cend(); ++it) {
- int tmp = *it;
- ++it;
- v_to_u[*it] = tmp;
- }
-}
-
-template<typename Persistence_diagram1, typename Persistence_diagram2>
-double bottleneck_distance(Persistence_diagram1& diag1, Persistence_diagram2& diag2, double e) {
- Persistence_diagrams_graph g(diag1, diag2, e);
- std::vector<double>* sd = g.sorted_distances();
- int idmin = 0;
- int idmax = sd->size() - 1;
- double alpha = pow(sd->size(), 0.25);
- Graph_matching m(g);
- Graph_matching biggest_unperfect = m;
- while (idmin != idmax) {
- int pas = static_cast<int>((idmax - idmin) / alpha);
- m.set_r(sd->at(idmin + pas));
- while (m.multi_augment()) {}
- if (m.perfect()) {
- idmax = idmin + pas;
- m = biggest_unperfect;
- } else {
- biggest_unperfect = m;
- idmin = idmin + pas + 1;
- }
- }
- double b = sd->at(idmin);
- delete sd;
- return b;
-}
-
-} // namespace bottleneck
-
-} // namespace Gudhi
-
-#endif // SRC_BOTTLENECK_INCLUDE_GUDHI_GRAPH_MATCHING_H_
diff --git a/src/Bottleneck/include/gudhi/Layered_neighbors_finder.h b/src/Bottleneck/include/gudhi/Layered_neighbors_finder.h
deleted file mode 100644
index de36e00b..00000000
--- a/src/Bottleneck/include/gudhi/Layered_neighbors_finder.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Francois Godi
- *
- * Copyright (C) 2015 INRIA Saclay (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef SRC_BOTTLENECK_INCLUDE_GUDHI_LAYERED_NEIGHBORS_FINDER_H_
-#define SRC_BOTTLENECK_INCLUDE_GUDHI_LAYERED_NEIGHBORS_FINDER_H_
-
-#include <vector>
-
-#include "Neighbors_finder.h"
-
-// Layered_neighbors_finder is a data structure used to find if a query point from U has neighbors in V in a given
-// vlayer of the vlayered persistence diagrams graph. V's points have to be added manually using their index.
-// A neighbor returned is automatically removed.
-
-namespace Gudhi {
-
-namespace bottleneck {
-
-class Layered_neighbors_finder {
- public:
- Layered_neighbors_finder(const Persistence_diagrams_graph& g, double r);
- void add(int v_point_index, int vlayer);
- int pull_near(int u_point_index, int vlayer);
- int vlayers_number() const;
-
- private:
- const Persistence_diagrams_graph& g;
- const double r;
- std::vector<Neighbors_finder> neighbors_finder;
-};
-
-Layered_neighbors_finder::Layered_neighbors_finder(const Persistence_diagrams_graph& g, double r) :
- g(g), r(r), neighbors_finder() { }
-
-inline void Layered_neighbors_finder::add(int v_point_index, int vlayer) {
- for (int l = neighbors_finder.size(); l <= vlayer; l++)
- neighbors_finder.emplace_back(Neighbors_finder(g, r));
- neighbors_finder.at(vlayer).add(v_point_index);
-}
-
-inline int Layered_neighbors_finder::pull_near(int u_point_index, int vlayer) {
- if (static_cast<int> (neighbors_finder.size()) <= vlayer)
- return null_point_index();
- return neighbors_finder.at(vlayer).pull_near(u_point_index);
-}
-
-inline int Layered_neighbors_finder::vlayers_number() const {
- return neighbors_finder.size();
-}
-
-} // namespace bottleneck
-
-} // namespace Gudhi
-
-#endif // SRC_BOTTLENECK_INCLUDE_GUDHI_LAYERED_NEIGHBORS_FINDER_H_
diff --git a/src/Bottleneck/include/gudhi/Neighbors_finder.h b/src/Bottleneck/include/gudhi/Neighbors_finder.h
deleted file mode 100644
index 98256571..00000000
--- a/src/Bottleneck/include/gudhi/Neighbors_finder.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Francois Godi
- *
- * Copyright (C) 2015 INRIA Saclay (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef SRC_BOTTLENECK_INCLUDE_GUDHI_NEIGHBORS_FINDER_H_
-#define SRC_BOTTLENECK_INCLUDE_GUDHI_NEIGHBORS_FINDER_H_
-
-#include <unordered_set>
-#include <list>
-
-#include "gudhi/Planar_neighbors_finder.h"
-
-namespace Gudhi {
-
-namespace bottleneck {
-
-// Neighbors_finder is a data structure used to find if a query point from U has neighbors in V
-// in the persistence diagrams graph.
-// V's points have to be added manually using their index. A neighbor returned is automatically removed.
-
-class Neighbors_finder {
- public:
- Neighbors_finder(const Persistence_diagrams_graph& g, double r);
- void add(int v_point_index);
- int pull_near(int u_point_index);
- std::list<int>* pull_all_near(int u_point_index);
-
- private:
- const Persistence_diagrams_graph& g;
- const double r;
- Planar_neighbors_finder planar_neighbors_f;
- std::unordered_set<int> projections_f;
-};
-
-Neighbors_finder::Neighbors_finder(const Persistence_diagrams_graph& g, double r) :
- g(g), r(r), planar_neighbors_f(g, r), projections_f() { }
-
-inline void Neighbors_finder::add(int v_point_index) {
- if (g.on_the_v_diagonal(v_point_index))
- projections_f.emplace(v_point_index);
- else
- planar_neighbors_f.add(v_point_index);
-}
-
-inline int Neighbors_finder::pull_near(int u_point_index) {
- int v_challenger = g.corresponding_point_in_v(u_point_index);
- if (planar_neighbors_f.contains(v_challenger) && g.distance(u_point_index, v_challenger) < r) {
- planar_neighbors_f.remove(v_challenger);
- return v_challenger;
- }
- if (g.on_the_u_diagonal(u_point_index)) {
- auto it = projections_f.cbegin();
- if (it != projections_f.cend()) {
- int tmp = *it;
- projections_f.erase(it);
- return tmp;
- }
- } else {
- return planar_neighbors_f.pull_near(u_point_index);
- }
- return null_point_index();
-}
-
-inline std::list<int>* Neighbors_finder::pull_all_near(int u_point_index) {
- std::list<int>* all_pull = planar_neighbors_f.pull_all_near(u_point_index);
- int last_pull = pull_near(u_point_index);
- while (last_pull != null_point_index()) {
- all_pull->emplace_back(last_pull);
- last_pull = pull_near(u_point_index);
- }
- return all_pull;
-}
-
-} // namespace bottleneck
-
-} // namespace Gudhi
-
-#endif // SRC_BOTTLENECK_INCLUDE_GUDHI_NEIGHBORS_FINDER_H_
diff --git a/src/Bottleneck/include/gudhi/Persistence_diagrams_graph.h b/src/Bottleneck/include/gudhi/Persistence_diagrams_graph.h
deleted file mode 100644
index 73ad940b..00000000
--- a/src/Bottleneck/include/gudhi/Persistence_diagrams_graph.h
+++ /dev/null
@@ -1,147 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Francois Godi
- *
- * Copyright (C) 2015 INRIA Saclay (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef SRC_BOTTLENECK_INCLUDE_GUDHI_PERSISTENCE_DIAGRAMS_GRAPH_H_
-#define SRC_BOTTLENECK_INCLUDE_GUDHI_PERSISTENCE_DIAGRAMS_GRAPH_H_
-
-#include <vector>
-#include <set>
-#include <cmath>
-#include <utility> // for pair<>
-#include <algorithm> // for max
-
-namespace Gudhi {
-
-namespace bottleneck {
-
-// Diagram_point is the type of the persistence diagram's points
-typedef std::pair<double, double> Diagram_point;
-
-// Return the used index for encoding none of the points
-int null_point_index();
-
-// Persistence_diagrams_graph is the interface beetwen any external representation of the two persistence diagrams and
-// the bottleneck distance computation. An interface is necessary to ensure basic functions complexity.
-
-class Persistence_diagrams_graph {
- public:
- // Persistence_diagram1 and 2 are the types of any externals representations of persistence diagrams.
- // They have to have an iterator over points, which have to have fields first (for birth) and second (for death).
- template<typename Persistence_diagram1, typename Persistence_diagram2>
- Persistence_diagrams_graph(Persistence_diagram1& diag1, Persistence_diagram2& diag2, double e = 0.);
- Persistence_diagrams_graph();
- bool on_the_u_diagonal(int u_point_index) const;
- bool on_the_v_diagonal(int v_point_index) const;
- int corresponding_point_in_u(int v_point_index) const;
- int corresponding_point_in_v(int u_point_index) const;
- double distance(int u_point_index, int v_point_index) const;
- int size() const;
- std::vector<double>* sorted_distances();
-
- private:
- std::vector<Diagram_point> u;
- std::vector<Diagram_point> v;
- Diagram_point get_u_point(int u_point_index) const;
- Diagram_point get_v_point(int v_point_index) const;
-};
-
-inline int null_point_index() {
- return -1;
-}
-
-template<typename Persistence_diagram1, typename Persistence_diagram2>
-Persistence_diagrams_graph::Persistence_diagrams_graph(Persistence_diagram1& diag1, Persistence_diagram2& diag2, double e)
- : u(), v() {
- for (auto it = diag1.cbegin(); it != diag1.cend(); ++it)
- if (it->second - it->first > e)
- u.emplace_back(*it);
- for (auto it = diag2.cbegin(); it != diag2.cend(); ++it)
- if (it->second - it->first > e)
- v.emplace_back(*it);
- if (u.size() < v.size())
- swap(u, v);
-}
-
-Persistence_diagrams_graph::Persistence_diagrams_graph()
- : u(), v() { }
-
-inline bool Persistence_diagrams_graph::on_the_u_diagonal(int u_point_index) const {
- return u_point_index >= static_cast<int> (u.size());
-}
-
-inline bool Persistence_diagrams_graph::on_the_v_diagonal(int v_point_index) const {
- return v_point_index >= static_cast<int> (v.size());
-}
-
-inline int Persistence_diagrams_graph::corresponding_point_in_u(int v_point_index) const {
- return on_the_v_diagonal(v_point_index) ?
- v_point_index - static_cast<int> (v.size()) : v_point_index + static_cast<int> (u.size());
-}
-
-inline int Persistence_diagrams_graph::corresponding_point_in_v(int u_point_index) const {
- return on_the_u_diagonal(u_point_index) ?
- u_point_index - static_cast<int> (u.size()) : u_point_index + static_cast<int> (v.size());
-}
-
-inline double Persistence_diagrams_graph::distance(int u_point_index, int v_point_index) const {
- // could be optimized for the case where one point is the projection of the other
- if (on_the_u_diagonal(u_point_index) && on_the_v_diagonal(v_point_index))
- return 0;
- Diagram_point p_u = get_u_point(u_point_index);
- Diagram_point p_v = get_v_point(v_point_index);
- return (std::max)(std::fabs(p_u.first - p_v.first), std::fabs(p_u.second - p_v.second));
-}
-
-inline int Persistence_diagrams_graph::size() const {
- return static_cast<int> (u.size() + v.size());
-}
-
-inline std::vector<double>* Persistence_diagrams_graph::sorted_distances() {
- // could be optimized
- std::set<double> sorted_distances;
- for (int u_point_index = 0; u_point_index < size(); ++u_point_index)
- for (int v_point_index = 0; v_point_index < size(); ++v_point_index)
- sorted_distances.emplace(distance(u_point_index, v_point_index));
- return new std::vector<double>(sorted_distances.cbegin(), sorted_distances.cend());
-}
-
-inline Diagram_point Persistence_diagrams_graph::get_u_point(int u_point_index) const {
- if (!on_the_u_diagonal(u_point_index))
- return u.at(u_point_index);
- Diagram_point projector = v.at(corresponding_point_in_v(u_point_index));
- double x = (projector.first + projector.second) / 2;
- return Diagram_point(x, x);
-}
-
-inline Diagram_point Persistence_diagrams_graph::get_v_point(int v_point_index) const {
- if (!on_the_v_diagonal(v_point_index))
- return v.at(v_point_index);
- Diagram_point projector = u.at(corresponding_point_in_u(v_point_index));
- double x = (projector.first + projector.second) / 2;
- return Diagram_point(x, x);
-}
-
-} // namespace bottleneck
-
-} // namespace Gudhi
-
-#endif // SRC_BOTTLENECK_INCLUDE_GUDHI_PERSISTENCE_DIAGRAMS_GRAPH_H_
diff --git a/src/Bottleneck/include/gudhi/Planar_neighbors_finder.h b/src/Bottleneck/include/gudhi/Planar_neighbors_finder.h
deleted file mode 100644
index 4af672e4..00000000
--- a/src/Bottleneck/include/gudhi/Planar_neighbors_finder.h
+++ /dev/null
@@ -1,119 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Francois Godi
- *
- * Copyright (C) 2015 INRIA Saclay (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef SRC_BOTTLENECK_INCLUDE_GUDHI_PLANAR_NEIGHBORS_FINDER_H_
-#define SRC_BOTTLENECK_INCLUDE_GUDHI_PLANAR_NEIGHBORS_FINDER_H_
-
-#include <list>
-#include <iostream>
-#include <set>
-
-#include "Persistence_diagrams_graph.h"
-
-namespace Gudhi {
-
-namespace bottleneck {
-
-// Planar_neighbors_finder is a data structure used to find if a query point from U has planar neighbors in V with the
-// planar distance.
-// V's points have to be added manually using their index. A neighbor returned is automatically removed but we can also
-// remove points manually using their index.
-
-class Abstract_planar_neighbors_finder {
- public:
- Abstract_planar_neighbors_finder(const Persistence_diagrams_graph& g, double r);
- virtual ~Abstract_planar_neighbors_finder() = 0;
- virtual void add(int v_point_index) = 0;
- virtual void remove(int v_point_index) = 0;
- virtual bool contains(int v_point_index) const = 0;
- virtual int pull_near(int u_point_index) = 0;
- virtual std::list<int>* pull_all_near(int u_point_index);
-
- protected:
- const Persistence_diagrams_graph& g;
- const double r;
-};
-
-
-// Naive_pnf is a nave implementation of Abstract_planar_neighbors_finder
-
-class Naive_pnf : public Abstract_planar_neighbors_finder {
- public:
- Naive_pnf(const Persistence_diagrams_graph& g, double r);
- void add(int v_point_index);
- void remove(int v_point_index);
- bool contains(int v_point_index) const;
- int pull_near(int u_point_index);
-
- private:
- std::set<int> candidates;
-};
-
-
-// Planar_neighbors_finder is the used Abstract_planar_neighbors_finder's implementation
-typedef Naive_pnf Planar_neighbors_finder;
-
-Abstract_planar_neighbors_finder::Abstract_planar_neighbors_finder(const Persistence_diagrams_graph& g, double r) :
- g(g), r(r) { }
-
-inline Abstract_planar_neighbors_finder::~Abstract_planar_neighbors_finder() { }
-
-inline std::list<int>* Abstract_planar_neighbors_finder::pull_all_near(int u_point_index) {
- std::list<int>* all_pull = new std::list<int>();
- int last_pull = pull_near(u_point_index);
- while (last_pull != null_point_index()) {
- all_pull->emplace_back(last_pull);
- last_pull = pull_near(u_point_index);
- }
- return all_pull;
-}
-
-Naive_pnf::Naive_pnf(const Persistence_diagrams_graph& g, double r) :
- Abstract_planar_neighbors_finder(g, r), candidates() { }
-
-inline void Naive_pnf::add(int v_point_index) {
- candidates.emplace(v_point_index);
-}
-
-inline void Naive_pnf::remove(int v_point_index) {
- candidates.erase(v_point_index);
-}
-
-inline bool Naive_pnf::contains(int v_point_index) const {
- return (candidates.count(v_point_index) > 0);
-}
-
-inline int Naive_pnf::pull_near(int u_point_index) {
- for (auto it = candidates.begin(); it != candidates.end(); ++it)
- if (g.distance(u_point_index, *it) <= r) {
- int tmp = *it;
- candidates.erase(it);
- return tmp;
- }
- return null_point_index();
-}
-
-} // namespace bottleneck
-
-} // namespace Gudhi
-
-#endif // SRC_BOTTLENECK_INCLUDE_GUDHI_PLANAR_NEIGHBORS_FINDER_H_
diff --git a/src/Bottleneck/test/CMakeLists.txt b/src/Bottleneck/test/CMakeLists.txt
deleted file mode 100644
index 9d88ab25..00000000
--- a/src/Bottleneck/test/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-cmake_minimum_required(VERSION 2.6)
-project(Bottleneck_tests)
-
-if (GCOVR_PATH)
- # for gcovr to make coverage reports - Corbera Jenkins plugin
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
-endif()
-if (GPROF_PATH)
- # for gprof to make coverage reports - Jenkins
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
-endif()
-
-add_executable ( BottleneckUT bottleneck_unit_test.cpp )
-target_link_libraries(BottleneckUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
-
-# Unitary tests
-add_test(NAME BottleneckUT
- COMMAND ${CMAKE_CURRENT_BINARY_DIR}/BottleneckUT
- # XML format for Jenkins xUnit plugin
- --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/BottleneckUT.xml --log_level=test_suite --report_level=no)
-
diff --git a/src/Bottleneck/test/bottleneck_unit_test.cpp b/src/Bottleneck/test/bottleneck_unit_test.cpp
deleted file mode 100644
index c60f5d8a..00000000
--- a/src/Bottleneck/test/bottleneck_unit_test.cpp
+++ /dev/null
@@ -1,26 +0,0 @@
-#define BOOST_TEST_DYN_LINK
-#define BOOST_TEST_MODULE "bottleneck"
-#include <boost/test/unit_test.hpp>
-
-#include "gudhi/Graph_matching.h"
-#include <iostream>
-
-using namespace Gudhi::bottleneck;
-
-BOOST_AUTO_TEST_CASE(random_diagrams) {
- int n = 100;
- // Random construction
- std::vector< std::pair<double, double> > v1, v2;
- for (int i = 0; i < n; i++) {
- int a = rand() % n;
- v1.emplace_back(a, a + rand() % (n - a));
- int b = rand() % n;
- v2.emplace_back(b, b + rand() % (n - b));
- }
- // v1 and v2 are persistence diagrams containing each 100 randoms points.
- double b = bottleneck_distance(v1, v2, 0);
- //
- std::cout << b << std::endl;
- const double EXPECTED_DISTANCE = 98.5;
- BOOST_CHECK(b == EXPECTED_DISTANCE);
-}
diff --git a/src/Bottleneck_distance/benchmark/CMakeLists.txt b/src/Bottleneck_distance/benchmark/CMakeLists.txt
new file mode 100644
index 00000000..170081ce
--- /dev/null
+++ b/src/Bottleneck_distance/benchmark/CMakeLists.txt
@@ -0,0 +1,9 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bottleneck_distance_benchmark)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable ( bottleneck_chrono bottleneck_chrono.cpp )
+ if (TBB_FOUND)
+ target_link_libraries(bottleneck_chrono ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp b/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp
new file mode 100644
index 00000000..456c570b
--- /dev/null
+++ b/src/Bottleneck_distance/benchmark/bottleneck_chrono.cpp
@@ -0,0 +1,62 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Bottleneck.h>
+#include <chrono>
+#include <fstream>
+#include <random>
+
+using namespace Gudhi::persistence_diagram;
+
+
+double upper_bound = 400.; // any real > 0
+
+int main() {
+ std::ofstream result_file;
+ result_file.open("results.csv", std::ios::out);
+
+ for (int n = 1000; n <= 10000; n += 1000) {
+ std::uniform_real_distribution<double> unif1(0., upper_bound);
+ std::uniform_real_distribution<double> unif2(upper_bound / 1000., upper_bound / 100.);
+ std::default_random_engine re;
+ std::vector< std::pair<double, double> > v1, v2;
+ for (int i = 0; i < n; i++) {
+ double a = unif1(re);
+ double b = unif1(re);
+ double x = unif2(re);
+ double y = unif2(re);
+ v1.emplace_back(std::min(a, b), std::max(a, b));
+ v2.emplace_back(std::min(a, b) + std::min(x, y), std::max(a, b) + std::max(x, y));
+ if (i % 5 == 0)
+ v1.emplace_back(std::min(a, b), std::min(a, b) + x);
+ if (i % 3 == 0)
+ v2.emplace_back(std::max(a, b), std::max(a, b) + y);
+ }
+ std::chrono::steady_clock::time_point start = std::chrono::steady_clock::now();
+ double b = bottleneck_distance(v1, v2);
+ std::chrono::steady_clock::time_point end = std::chrono::steady_clock::now();
+ typedef std::chrono::duration<int, std::milli> millisecs_t;
+ millisecs_t duration(std::chrono::duration_cast<millisecs_t>(end - start));
+ result_file << n << ";" << duration.count() << ";" << b << std::endl;
+ }
+ result_file.close();
+}
diff --git a/src/Bottleneck_distance/concept/Persistence_diagram.h b/src/Bottleneck_distance/concept/Persistence_diagram.h
new file mode 100644
index 00000000..b157f22a
--- /dev/null
+++ b/src/Bottleneck_distance/concept/Persistence_diagram.h
@@ -0,0 +1,50 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: François Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CONCEPT_BOTTLENECK_DISTANCE_PERSISTENCE_DIAGRAM_H_
+#define CONCEPT_BOTTLENECK_DISTANCE_PERSISTENCE_DIAGRAM_H_
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+/** \brief Concept of point in a persistence diagram. std::get<0>(point) must return the birth of the corresponding component and std::get<1>(point) its death.
+ * Both should be convertible to `double`.
+ * A valid implementation of this concept is std::pair<double,double>.
+ * Death should be larger than birth, death can be std::numeric_limits<double>::infinity() for components which stay alive.
+ *
+ * \ingroup bottleneck_distance
+ */
+struct DiagramPoint{};
+
+/** \brief Concept of persistence diagram. It is a range of `DiagramPoint`.
+ * std::begin(diagram) and std::end(diagram) must return corresponding iterators.
+ *
+ * \ingroup bottleneck_distance
+ */
+struct PersistenceDiagram{};
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // CONCEPT_BOTTLENECK_DISTANCE_PERSISTENCE_DIAGRAM_H_
diff --git a/src/Bottleneck_distance/doc/COPYRIGHT b/src/Bottleneck_distance/doc/COPYRIGHT
new file mode 100644
index 00000000..179740a6
--- /dev/null
+++ b/src/Bottleneck_distance/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): François Godi
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h
new file mode 100644
index 00000000..3998fe8d
--- /dev/null
+++ b/src/Bottleneck_distance/doc/Intro_bottleneck_distance.h
@@ -0,0 +1,51 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: François Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_BOTTLENECK_DISTANCE_INTRO_BOTTLENECK_DISTANCE_H_
+#define DOC_BOTTLENECK_DISTANCE_INTRO_BOTTLENECK_DISTANCE_H_
+
+// needs namespace for Doxygen to link on classes
+namespace Gudhi {
+// needs namespace for Doxygen to link on classes
+namespace persistence_diagram {
+
+/** \defgroup bottleneck_distance Bottleneck distance
+ *
+ * \author Fran&ccedil;ois Godi
+ * @{
+ *
+ * \section bottleneckdefinition Definition
+ *
+ * The bottleneck distance measures the similarity between two persistence diagrams. It is the shortest distance b for
+ * which there exists a perfect matching between the points of the two diagrams (completed with all the points on the
+ * diagonal in order to ignore cardinality mismatchs) such that any couple of matched points are at distance at most b.
+ *
+ * \image html perturb_pd.png On this picture, the red edges represent the matching. The bottleneck distance is the length of the longest edge.
+ *
+ */
+/** @} */ // end defgroup bottleneck_distance
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // DOC_BOTTLENECK_DISTANCE_INTRO_BOTTLENECK_DISTANCE_H_
diff --git a/src/Bottleneck_distance/doc/perturb_pd.png b/src/Bottleneck_distance/doc/perturb_pd.png
new file mode 100644
index 00000000..be638de0
--- /dev/null
+++ b/src/Bottleneck_distance/doc/perturb_pd.png
Binary files differ
diff --git a/src/Bottleneck_distance/example/CMakeLists.txt b/src/Bottleneck_distance/example/CMakeLists.txt
new file mode 100644
index 00000000..b0a19f8b
--- /dev/null
+++ b/src/Bottleneck_distance/example/CMakeLists.txt
@@ -0,0 +1,20 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bottleneck_distance_examples)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable (bottleneck_read_file_example bottleneck_read_file_example.cpp)
+ add_executable (bottleneck_basic_example bottleneck_basic_example.cpp)
+
+ add_test(bottleneck_basic_example ${CMAKE_CURRENT_BINARY_DIR}/bottleneck_basic_example)
+
+ add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp)
+ target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(bottleneck_read_file_example ${TBB_LIBRARIES})
+ target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES})
+ target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+ add_test(alpha_rips_persistence_bottleneck_distance ${CMAKE_CURRENT_BINARY_DIR}/alpha_rips_persistence_bottleneck_distance
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off -r 0.15 -m 0.12 -d 3 -p 3)
+
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
new file mode 100644
index 00000000..fd9f0858
--- /dev/null
+++ b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
@@ -0,0 +1,190 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Alpha_complex.h>
+#include <gudhi/Rips_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/Bottleneck.h>
+
+#include <CGAL/Epick_d.h>
+
+#include <boost/program_options.hpp>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+#include <utility> // for pair
+#include <algorithm> // for transform
+
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+using Point_d = Kernel::Point_d;
+using Points_off_reader = Gudhi::Points_off_reader<Point_d>;
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , Filtration_value & threshold
+ , int & dim_max
+ , int & p
+ , Filtration_value & min_persistence);
+
+static inline std::pair<double, double> compute_root_square(std::pair<double, double> input) {
+ return std::make_pair(std::sqrt(input.first), std::sqrt(input.second));
+}
+
+int main(int argc, char * argv[]) {
+ std::string off_file_points;
+ Filtration_value threshold;
+ int dim_max;
+ int p;
+ Filtration_value min_persistence;
+
+ program_options(argc, argv, off_file_points, threshold, dim_max, p, min_persistence);
+
+ Points_off_reader off_reader(off_file_points);
+
+ // --------------------------------------------
+ // Rips persistence
+ // --------------------------------------------
+ Rips_complex rips_complex(off_reader.get_point_cloud(), threshold, Euclidean_distance());
+
+ // Construct the Rips complex in a Simplex Tree
+ Simplex_tree rips_stree;
+
+ rips_complex.create_complex(rips_stree, dim_max);
+ std::cout << "The Rips complex contains " << rips_stree.num_simplices() << " simplices and has dimension "
+ << rips_stree.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ rips_stree.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology rips_pcoh(rips_stree);
+ // initializes the coefficient field for homology
+ rips_pcoh.init_coefficients(p);
+ rips_pcoh.compute_persistent_cohomology(min_persistence);
+
+ // rips_pcoh.output_diagram();
+
+ // --------------------------------------------
+ // Alpha persistence
+ // --------------------------------------------
+ Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex(off_reader.get_point_cloud());
+
+ Simplex_tree alpha_stree;
+ alpha_complex.create_complex(alpha_stree, threshold * threshold);
+ std::cout << "The Alpha complex contains " << alpha_stree.num_simplices() << " simplices and has dimension "
+ << alpha_stree.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ alpha_stree.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology alpha_pcoh(alpha_stree);
+ // initializes the coefficient field for homology
+ alpha_pcoh.init_coefficients(p);
+ alpha_pcoh.compute_persistent_cohomology(min_persistence * min_persistence);
+
+ // alpha_pcoh.output_diagram();
+
+ // --------------------------------------------
+ // Bottleneck distance between both persistence
+ // --------------------------------------------
+ double max_b_distance {};
+ for (int dim = 0; dim < dim_max; dim ++) {
+ std::vector< std::pair< Filtration_value , Filtration_value > > rips_intervals;
+ std::vector< std::pair< Filtration_value , Filtration_value > > alpha_intervals;
+ rips_intervals = rips_pcoh.intervals_in_dimension(dim);
+ alpha_intervals = alpha_pcoh.intervals_in_dimension(dim);
+ std::transform(alpha_intervals.begin(), alpha_intervals.end(), alpha_intervals.begin(), compute_root_square);
+
+ double bottleneck_distance = Gudhi::persistence_diagram::bottleneck_distance(rips_intervals, alpha_intervals);
+ std::cout << "In dimension " << dim << ", bottleneck distance = " << bottleneck_distance << std::endl;
+ if (bottleneck_distance > max_b_distance)
+ max_b_distance = bottleneck_distance;
+ }
+ std::cout << "================================================================================" << std::endl;
+ std::cout << "Bottleneck distance is " << max_b_distance << std::endl;
+
+ return 0;
+}
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , Filtration_value & threshold
+ , int & dim_max
+ , int & p
+ , Filtration_value & min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Rips complex construction.")
+ ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Rips complex we want to compute.")
+ ("field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")
+ ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a Rips complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Bottleneck_distance/example/bottleneck_basic_example.cpp b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp
new file mode 100644
index 00000000..d0ca4e20
--- /dev/null
+++ b/src/Bottleneck_distance/example/bottleneck_basic_example.cpp
@@ -0,0 +1,50 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Authors: Francois Godi, small modifications by Pawel Dlotko
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Bottleneck.h>
+
+#include <iostream>
+#include <vector>
+#include <utility> // for pair
+#include <limits> // for numeric_limits
+
+int main() {
+ std::vector< std::pair<double, double> > v1, v2;
+
+ v1.emplace_back(2.7, 3.7);
+ v1.emplace_back(9.6, 14.);
+ v1.emplace_back(34.2, 34.974);
+ v1.emplace_back(3., std::numeric_limits<double>::infinity());
+
+ v2.emplace_back(2.8, 4.45);
+ v2.emplace_back(9.5, 14.1);
+ v2.emplace_back(3.2, std::numeric_limits<double>::infinity());
+
+
+ double b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2);
+
+ std::cout << "Bottleneck distance = " << b << std::endl;
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.1);
+
+ std::cout << "Approx bottleneck distance = " << b << std::endl;
+}
diff --git a/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp b/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp
new file mode 100644
index 00000000..bde05825
--- /dev/null
+++ b/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp
@@ -0,0 +1,72 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Authors: Francois Godi, small modifications by Pawel Dlotko
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define CGAL_HAS_THREADS
+
+#include <gudhi/Bottleneck.h>
+#include <iostream>
+#include <vector>
+#include <utility> // for pair
+#include <fstream>
+#include <sstream>
+#include <string>
+
+std::vector< std::pair<double, double> > read_diagram_from_file(const char* filename) {
+ std::ifstream in;
+ in.open(filename);
+ std::vector< std::pair<double, double> > result;
+ if (!in.is_open()) {
+ std::cerr << "File : " << filename << " do not exist. The program will now terminate \n";
+ throw "File do not exist \n";
+ }
+
+ std::string line;
+ while (!in.eof()) {
+ getline(in, line);
+ if (line.length() != 0) {
+ std::stringstream lineSS;
+ lineSS << line;
+ double beginn, endd;
+ lineSS >> beginn;
+ lineSS >> endd;
+ result.push_back(std::make_pair(beginn, endd));
+ }
+ }
+ in.close();
+ return result;
+} // read_diagram_from_file
+
+int main(int argc, char** argv) {
+ if (argc < 3) {
+ std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file " <<
+ "should contain a birth-death pair per line. Third, optional parameter is an error bound on a bottleneck" <<
+ " distance (set by default to zero). The program will now terminate \n";
+ }
+ std::vector< std::pair< double, double > > diag1 = read_diagram_from_file(argv[1]);
+ std::vector< std::pair< double, double > > diag2 = read_diagram_from_file(argv[2]);
+ double tolerance = 0.;
+ if (argc == 4) {
+ tolerance = atof(argv[3]);
+ }
+ double b = Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2, tolerance);
+ std::cout << "The distance between the diagrams is : " << b << ". The tolerance is : " << tolerance << std::endl;
+}
diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h
new file mode 100644
index 00000000..b90a0ee0
--- /dev/null
+++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h
@@ -0,0 +1,115 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef BOTTLENECK_H_
+#define BOTTLENECK_H_
+
+#include <gudhi/Graph_matching.h>
+
+#include <vector>
+#include <algorithm> // for max
+#include <limits> // for numeric_limits
+
+#include <cmath>
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+double bottleneck_distance_approx(Persistence_graph& g, double e) {
+ double b_lower_bound = 0.;
+ double b_upper_bound = g.diameter_bound();
+ const double alpha = std::pow(g.size(), 1. / 5.);
+ Graph_matching m(g);
+ Graph_matching biggest_unperfect(g);
+ while (b_upper_bound - b_lower_bound > 2 * e) {
+ double step = b_lower_bound + (b_upper_bound - b_lower_bound) / alpha;
+ if (step <= b_lower_bound || step >= b_upper_bound) // Avoid precision problem
+ break;
+ m.set_r(step);
+ while (m.multi_augment()) {}; // compute a maximum matching (in the graph corresponding to the current r)
+ if (m.perfect()) {
+ m = biggest_unperfect;
+ b_upper_bound = step;
+ } else {
+ biggest_unperfect = m;
+ b_lower_bound = step;
+ }
+ }
+ return (b_lower_bound + b_upper_bound) / 2.;
+}
+
+double bottleneck_distance_exact(Persistence_graph& g) {
+ std::vector<double> sd = g.sorted_distances();
+ long lower_bound_i = 0;
+ long upper_bound_i = sd.size() - 1;
+ const double alpha = std::pow(g.size(), 1. / 5.);
+ Graph_matching m(g);
+ Graph_matching biggest_unperfect(g);
+ while (lower_bound_i != upper_bound_i) {
+ long step = lower_bound_i + static_cast<long> ((upper_bound_i - lower_bound_i - 1) / alpha);
+ m.set_r(sd.at(step));
+ while (m.multi_augment()) {}; // compute a maximum matching (in the graph corresponding to the current r)
+ if (m.perfect()) {
+ m = biggest_unperfect;
+ upper_bound_i = step;
+ } else {
+ biggest_unperfect = m;
+ lower_bound_i = step + 1;
+ }
+ }
+ return sd.at(lower_bound_i);
+}
+
+/** \brief Function to compute the Bottleneck distance between two persistence diagrams.
+ *
+ * \tparam Persistence_diagram1,Persistence_diagram2
+ * models of the concept `PersistenceDiagram`.
+ * \param[in] e
+ * \parblock
+ * If `e` is 0, this uses an expensive algorithm to compute the exact distance.
+ *
+ * If `e` is not 0, it asks for an additive `e`-approximation, and currently
+ * also allows a small multiplicative error (the last 2 or 3 bits of the
+ * mantissa may be wrong). This version of the algorithm takes advantage of the
+ * limited precision of `double` and is usually a lot faster to compute,
+ * whatever the value of `e`.
+ *
+ * Thus, by default, `e` is the smallest positive double.
+ * \endparblock
+ *
+ * \ingroup bottleneck_distance
+ */
+template<typename Persistence_diagram1, typename Persistence_diagram2>
+double bottleneck_distance(const Persistence_diagram1 &diag1, const Persistence_diagram2 &diag2,
+ double e = std::numeric_limits<double>::min()) {
+ Persistence_graph g(diag1, diag2, e);
+ if (g.bottleneck_alive() == std::numeric_limits<double>::infinity())
+ return std::numeric_limits<double>::infinity();
+ return std::max(g.bottleneck_alive(), e == 0. ? bottleneck_distance_exact(g) : bottleneck_distance_approx(g, e));
+}
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // BOTTLENECK_H_
diff --git a/src/Bottleneck_distance/include/gudhi/Graph_matching.h b/src/Bottleneck_distance/include/gudhi/Graph_matching.h
new file mode 100644
index 00000000..e1708c5b
--- /dev/null
+++ b/src/Bottleneck_distance/include/gudhi/Graph_matching.h
@@ -0,0 +1,182 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef GRAPH_MATCHING_H_
+#define GRAPH_MATCHING_H_
+
+#include <gudhi/Neighbors_finder.h>
+
+#include <vector>
+#include <list>
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+/** \internal \brief Structure representing a graph matching. The graph is a Persistence_diagrams_graph.
+ *
+ * \ingroup bottleneck_distance
+ */
+class Graph_matching {
+ public:
+ /** \internal \brief Constructor constructing an empty matching. */
+ explicit Graph_matching(Persistence_graph &g);
+ /** \internal \brief Copy operator. */
+ Graph_matching& operator=(const Graph_matching& m);
+ /** \internal \brief Is the matching perfect ? */
+ bool perfect() const;
+ /** \internal \brief Augments the matching with a maximal set of edge-disjoint shortest augmenting paths. */
+ bool multi_augment();
+ /** \internal \brief Sets the maximum length of the edges allowed to be added in the matching, 0 initially. */
+ void set_r(double r);
+
+ private:
+ Persistence_graph& g;
+ double r;
+ /** \internal \brief Given a point from V, provides its matched point in U, null_point_index() if there isn't. */
+ std::vector<int> v_to_u;
+ /** \internal \brief All the unmatched points in U. */
+ std::list<int> unmatched_in_u;
+
+ /** \internal \brief Provides a Layered_neighbors_finder dividing the graph in layers. Basically a BFS. */
+ Layered_neighbors_finder layering() const;
+ /** \internal \brief Augments the matching with a simple path no longer than max_depth. Basically a DFS. */
+ bool augment(Layered_neighbors_finder & layered_nf, int u_start_index, int max_depth);
+ /** \internal \brief Update the matching with the simple augmenting path given as parameter. */
+ void update(std::vector<int> & path);
+};
+
+inline Graph_matching::Graph_matching(Persistence_graph& g)
+ : g(g), r(0.), v_to_u(g.size(), null_point_index()), unmatched_in_u() {
+ for (int u_point_index = 0; u_point_index < g.size(); ++u_point_index)
+ unmatched_in_u.emplace_back(u_point_index);
+}
+
+inline Graph_matching& Graph_matching::operator=(const Graph_matching& m) {
+ g = m.g;
+ r = m.r;
+ v_to_u = m.v_to_u;
+ unmatched_in_u = m.unmatched_in_u;
+ return *this;
+}
+
+inline bool Graph_matching::perfect() const {
+ return unmatched_in_u.empty();
+}
+
+inline bool Graph_matching::multi_augment() {
+ if (perfect())
+ return false;
+ Layered_neighbors_finder layered_nf(layering());
+ int max_depth = layered_nf.vlayers_number()*2 - 1;
+ double rn = sqrt(g.size());
+ // verification of a necessary criterion in order to shortcut if possible
+ if (max_depth < 0 || (unmatched_in_u.size() > rn && max_depth >= rn))
+ return false;
+ bool successful = false;
+ std::list<int> tries(unmatched_in_u);
+ for (auto it = tries.cbegin(); it != tries.cend(); it++)
+ // 'augment' has side-effects which have to be always executed, don't change order
+ successful = augment(layered_nf, *it, max_depth) || successful;
+ return successful;
+}
+
+inline void Graph_matching::set_r(double r) {
+ this->r = r;
+}
+
+inline bool Graph_matching::augment(Layered_neighbors_finder & layered_nf, int u_start_index, int max_depth) {
+ // V vertices have at most one successor, thus when we backtrack from U we can directly pop_back 2 vertices.
+ std::vector<int> path;
+ path.emplace_back(u_start_index);
+ do {
+ if (static_cast<int> (path.size()) > max_depth) {
+ path.pop_back();
+ path.pop_back();
+ }
+ if (path.empty())
+ return false;
+ path.emplace_back(layered_nf.pull_near(path.back(), static_cast<int> (path.size()) / 2));
+ while (path.back() == null_point_index()) {
+ path.pop_back();
+ path.pop_back();
+ if (path.empty())
+ return false;
+ path.pop_back();
+ path.emplace_back(layered_nf.pull_near(path.back(), path.size() / 2));
+ }
+ path.emplace_back(v_to_u.at(path.back()));
+ } while (path.back() != null_point_index());
+ // if v_to_u.at(path.back()) has no successor, path.back() is an exposed vertex
+ path.pop_back();
+ update(path);
+ return true;
+}
+
+inline Layered_neighbors_finder Graph_matching::layering() const {
+ std::list<int> u_vertices(unmatched_in_u);
+ std::list<int> v_vertices;
+ Neighbors_finder nf(g, r);
+ for (int v_point_index = 0; v_point_index < g.size(); ++v_point_index)
+ nf.add(v_point_index);
+ Layered_neighbors_finder layered_nf(g, r);
+ for (int layer = 0; !u_vertices.empty(); layer++) {
+ // one layer is one step in the BFS
+ for (auto it1 = u_vertices.cbegin(); it1 != u_vertices.cend(); ++it1) {
+ std::vector<int> u_succ(nf.pull_all_near(*it1));
+ for (auto it2 = u_succ.begin(); it2 != u_succ.end(); ++it2) {
+ layered_nf.add(*it2, layer);
+ v_vertices.emplace_back(*it2);
+ }
+ }
+ // When the above for finishes, we have progress of one half-step (from U to V) in the BFS
+ u_vertices.clear();
+ bool end = false;
+ for (auto it = v_vertices.cbegin(); it != v_vertices.cend(); it++)
+ if (v_to_u.at(*it) == null_point_index())
+ // we stop when a nearest exposed V vertex (from U exposed vertices) has been found
+ end = true;
+ else
+ u_vertices.emplace_back(v_to_u.at(*it));
+ // When the above for finishes, we have progress of one half-step (from V to U) in the BFS
+ if (end)
+ return layered_nf;
+ v_vertices.clear();
+ }
+ return layered_nf;
+}
+
+inline void Graph_matching::update(std::vector<int>& path) {
+ unmatched_in_u.remove(path.front());
+ for (auto it = path.cbegin(); it != path.cend(); ++it) {
+ // Be careful, the iterator is incremented twice each time
+ int tmp = *it;
+ v_to_u[*(++it)] = tmp;
+ }
+}
+
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // GRAPH_MATCHING_H_
diff --git a/src/Bottleneck_distance/include/gudhi/Internal_point.h b/src/Bottleneck_distance/include/gudhi/Internal_point.h
new file mode 100644
index 00000000..0b2d26fe
--- /dev/null
+++ b/src/Bottleneck_distance/include/gudhi/Internal_point.h
@@ -0,0 +1,91 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INTERNAL_POINT_H_
+#define INTERNAL_POINT_H_
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+/** \internal \brief Returns the used index for encoding none of the points */
+int null_point_index();
+
+/** \internal \typedef \brief Internal_point is the internal points representation, indexes used outside. */
+struct Internal_point {
+ double vec[2];
+ int point_index;
+
+ Internal_point() { }
+
+ Internal_point(double x, double y, int p_i) {
+ vec[0] = x;
+ vec[1] = y;
+ point_index = p_i;
+ }
+
+ double x() const {
+ return vec[ 0 ];
+ }
+
+ double y() const {
+ return vec[ 1 ];
+ }
+
+ double& x() {
+ return vec[ 0 ];
+ }
+
+ double& y() {
+ return vec[ 1 ];
+ }
+
+ bool operator==(const Internal_point& p) const {
+ return point_index == p.point_index;
+ }
+
+ bool operator!=(const Internal_point& p) const {
+ return !(*this == p);
+ }
+};
+
+inline int null_point_index() {
+ return -1;
+}
+
+struct Construct_coord_iterator {
+ typedef const double* result_type;
+
+ const double* operator()(const Internal_point& p) const {
+ return p.vec;
+ }
+
+ const double* operator()(const Internal_point& p, int) const {
+ return p.vec + 2;
+ }
+};
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // INTERNAL_POINT_H_
diff --git a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
new file mode 100644
index 00000000..cd5486f8
--- /dev/null
+++ b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
@@ -0,0 +1,172 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef NEIGHBORS_FINDER_H_
+#define NEIGHBORS_FINDER_H_
+
+// Inclusion order is important for CGAL patch
+#include <CGAL/Kd_tree.h>
+#include <CGAL/Kd_tree_node.h>
+#include <CGAL/Orthogonal_k_neighbor_search.h>
+#include <CGAL/Weighted_Minkowski_distance.h>
+#include <CGAL/Search_traits.h>
+
+#include <gudhi/Persistence_graph.h>
+#include <gudhi/Internal_point.h>
+
+#include <unordered_set>
+#include <vector>
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+/** \internal \brief data structure used to find any point (including projections) in V near to a query point from U
+ * (which can be a projection).
+ *
+ * V points have to be added manually using their index and before the first pull. A neighbor pulled is automatically
+ * removed.
+ *
+ * \ingroup bottleneck_distance
+ */
+class Neighbors_finder {
+ typedef CGAL::Dimension_tag<2> D;
+ typedef CGAL::Search_traits<double, Internal_point, const double*, Construct_coord_iterator, D> Traits;
+ typedef CGAL::Weighted_Minkowski_distance<Traits> Distance;
+ typedef CGAL::Orthogonal_k_neighbor_search<Traits, Distance> K_neighbor_search;
+ typedef K_neighbor_search::Tree Kd_tree;
+
+ public:
+ /** \internal \brief Constructor taking the near distance definition as parameter. */
+ Neighbors_finder(const Persistence_graph& g, double r);
+ /** \internal \brief A point added will be possibly pulled. */
+ void add(int v_point_index);
+ /** \internal \brief Returns and remove a V point near to the U point given as parameter, null_point_index() if
+ * there isn't such a point. */
+ int pull_near(int u_point_index);
+ /** \internal \brief Returns and remove all the V points near to the U point given as parameter. */
+ std::vector<int> pull_all_near(int u_point_index);
+
+ private:
+ const Persistence_graph& g;
+ const double r;
+ Kd_tree kd_t;
+ std::unordered_set<int> projections_f;
+};
+
+/** \internal \brief data structure used to find any point (including projections) in V near to a query point from U
+ * (which can be a projection) in a layered graph layer given as parmeter.
+ *
+ * V points have to be added manually using their index and before the first pull. A neighbor pulled is automatically
+ * removed.
+ *
+ * \ingroup bottleneck_distance
+ */
+class Layered_neighbors_finder {
+ public:
+ /** \internal \brief Constructor taking the near distance definition as parameter. */
+ Layered_neighbors_finder(const Persistence_graph& g, double r);
+ /** \internal \brief A point added will be possibly pulled. */
+ void add(int v_point_index, int vlayer);
+ /** \internal \brief Returns and remove a V point near to the U point given as parameter, null_point_index() if
+ * there isn't such a point. */
+ int pull_near(int u_point_index, int vlayer);
+ /** \internal \brief Returns the number of layers. */
+ int vlayers_number() const;
+
+ private:
+ const Persistence_graph& g;
+ const double r;
+ std::vector<std::unique_ptr<Neighbors_finder>> neighbors_finder;
+};
+
+inline Neighbors_finder::Neighbors_finder(const Persistence_graph& g, double r) :
+ g(g), r(r), kd_t(), projections_f() { }
+
+inline void Neighbors_finder::add(int v_point_index) {
+ if (g.on_the_v_diagonal(v_point_index))
+ projections_f.emplace(v_point_index);
+ else
+ kd_t.insert(g.get_v_point(v_point_index));
+}
+
+inline int Neighbors_finder::pull_near(int u_point_index) {
+ int tmp;
+ int c = g.corresponding_point_in_v(u_point_index);
+ if (g.on_the_u_diagonal(u_point_index) && !projections_f.empty()) {
+ // Any pair of projection is at distance 0
+ tmp = *projections_f.cbegin();
+ projections_f.erase(tmp);
+ } else if (projections_f.count(c) && (g.distance(u_point_index, c) <= r)) {
+ // Is the query point near to its projection ?
+ tmp = c;
+ projections_f.erase(tmp);
+ } else {
+ // Is the query point near to a V point in the plane ?
+ Internal_point u_point = g.get_u_point(u_point_index);
+ std::array<double, 2> w = {
+ {1., 1.}
+ };
+ K_neighbor_search search(kd_t, u_point, 1, 0., true, Distance(0, 2, w.begin(), w.end()));
+ auto it = search.begin();
+ if (it == search.end() || g.distance(u_point_index, it->first.point_index) > r)
+ return null_point_index();
+ tmp = it->first.point_index;
+ kd_t.remove(g.get_v_point(tmp));
+ }
+ return tmp;
+}
+
+inline std::vector<int> Neighbors_finder::pull_all_near(int u_point_index) {
+ std::vector<int> all_pull;
+ int last_pull = pull_near(u_point_index);
+ while (last_pull != null_point_index()) {
+ all_pull.emplace_back(last_pull);
+ last_pull = pull_near(u_point_index);
+ }
+ return all_pull;
+}
+
+inline Layered_neighbors_finder::Layered_neighbors_finder(const Persistence_graph& g, double r) :
+ g(g), r(r), neighbors_finder() { }
+
+inline void Layered_neighbors_finder::add(int v_point_index, int vlayer) {
+ for (int l = neighbors_finder.size(); l <= vlayer; l++)
+ neighbors_finder.emplace_back(std::unique_ptr<Neighbors_finder>(new Neighbors_finder(g, r)));
+ neighbors_finder.at(vlayer)->add(v_point_index);
+}
+
+inline int Layered_neighbors_finder::pull_near(int u_point_index, int vlayer) {
+ if (static_cast<int> (neighbors_finder.size()) <= vlayer)
+ return null_point_index();
+ return neighbors_finder.at(vlayer)->pull_near(u_point_index);
+}
+
+inline int Layered_neighbors_finder::vlayers_number() const {
+ return static_cast<int> (neighbors_finder.size());
+}
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // NEIGHBORS_FINDER_H_
diff --git a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
new file mode 100644
index 00000000..44f4b827
--- /dev/null
+++ b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
@@ -0,0 +1,188 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef PERSISTENCE_GRAPH_H_
+#define PERSISTENCE_GRAPH_H_
+
+#include <gudhi/Internal_point.h>
+
+#ifdef GUDHI_USE_TBB
+#include <tbb/parallel_sort.h>
+#endif
+
+#include <vector>
+#include <algorithm>
+#include <limits> // for numeric_limits
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+/** \internal \brief Structure representing an euclidean bipartite graph containing
+ * the points from the two persistence diagrams (including the projections).
+ *
+ * \ingroup bottleneck_distance
+ */
+class Persistence_graph {
+ public:
+ /** \internal \brief Constructor taking 2 PersistenceDiagrams (concept) as parameters. */
+ template<typename Persistence_diagram1, typename Persistence_diagram2>
+ Persistence_graph(const Persistence_diagram1& diag1, const Persistence_diagram2& diag2, double e);
+ /** \internal \brief Is the given point from U the projection of a point in V ? */
+ bool on_the_u_diagonal(int u_point_index) const;
+ /** \internal \brief Is the given point from V the projection of a point in U ? */
+ bool on_the_v_diagonal(int v_point_index) const;
+ /** \internal \brief Given a point from V, returns the corresponding (projection or projector) point in U. */
+ int corresponding_point_in_u(int v_point_index) const;
+ /** \internal \brief Given a point from U, returns the corresponding (projection or projector) point in V. */
+ int corresponding_point_in_v(int u_point_index) const;
+ /** \internal \brief Given a point from U and a point from V, returns the distance between those points. */
+ double distance(int u_point_index, int v_point_index) const;
+ /** \internal \brief Returns size = |U| = |V|. */
+ int size() const;
+ /** \internal \brief Is there as many infinite points (alive components) in both diagrams ? */
+ double bottleneck_alive() const;
+ /** \internal \brief Returns the O(n^2) sorted distances between the points. */
+ std::vector<double> sorted_distances() const;
+ /** \internal \brief Returns an upper bound for the diameter of the convex hull of all non infinite points */
+ double diameter_bound() const;
+ /** \internal \brief Returns the corresponding internal point */
+ Internal_point get_u_point(int u_point_index) const;
+ /** \internal \brief Returns the corresponding internal point */
+ Internal_point get_v_point(int v_point_index) const;
+
+ private:
+ std::vector<Internal_point> u;
+ std::vector<Internal_point> v;
+ double b_alive;
+};
+
+template<typename Persistence_diagram1, typename Persistence_diagram2>
+Persistence_graph::Persistence_graph(const Persistence_diagram1 &diag1,
+ const Persistence_diagram2 &diag2, double e)
+ : u(), v(), b_alive(0.) {
+ std::vector<double> u_alive;
+ std::vector<double> v_alive;
+ for (auto it = std::begin(diag1); it != std::end(diag1); ++it) {
+ if (std::get<1>(*it) == std::numeric_limits<double>::infinity())
+ u_alive.push_back(std::get<0>(*it));
+ else if (std::get<1>(*it) - std::get<0>(*it) > e)
+ u.push_back(Internal_point(std::get<0>(*it), std::get<1>(*it), u.size()));
+ }
+ for (auto it = std::begin(diag2); it != std::end(diag2); ++it) {
+ if (std::get<1>(*it) == std::numeric_limits<double>::infinity())
+ v_alive.push_back(std::get<0>(*it));
+ else if (std::get<1>(*it) - std::get<0>(*it) > e)
+ v.push_back(Internal_point(std::get<0>(*it), std::get<1>(*it), v.size()));
+ }
+ if (u.size() < v.size())
+ swap(u, v);
+ std::sort(u_alive.begin(), u_alive.end());
+ std::sort(v_alive.begin(), v_alive.end());
+ if (u_alive.size() != v_alive.size()) {
+ b_alive = std::numeric_limits<double>::infinity();
+ } else {
+ for (auto it_u = u_alive.cbegin(), it_v = v_alive.cbegin(); it_u != u_alive.cend(); ++it_u, ++it_v)
+ b_alive = std::max(b_alive, std::fabs(*it_u - *it_v));
+ }
+}
+
+inline bool Persistence_graph::on_the_u_diagonal(int u_point_index) const {
+ return u_point_index >= static_cast<int> (u.size());
+}
+
+inline bool Persistence_graph::on_the_v_diagonal(int v_point_index) const {
+ return v_point_index >= static_cast<int> (v.size());
+}
+
+inline int Persistence_graph::corresponding_point_in_u(int v_point_index) const {
+ return on_the_v_diagonal(v_point_index) ?
+ v_point_index - static_cast<int> (v.size()) : v_point_index + static_cast<int> (u.size());
+}
+
+inline int Persistence_graph::corresponding_point_in_v(int u_point_index) const {
+ return on_the_u_diagonal(u_point_index) ?
+ u_point_index - static_cast<int> (u.size()) : u_point_index + static_cast<int> (v.size());
+}
+
+inline double Persistence_graph::distance(int u_point_index, int v_point_index) const {
+ if (on_the_u_diagonal(u_point_index) && on_the_v_diagonal(v_point_index))
+ return 0.;
+ Internal_point p_u = get_u_point(u_point_index);
+ Internal_point p_v = get_v_point(v_point_index);
+ return std::max(std::fabs(p_u.x() - p_v.x()), std::fabs(p_u.y() - p_v.y()));
+}
+
+inline int Persistence_graph::size() const {
+ return static_cast<int> (u.size() + v.size());
+}
+
+inline double Persistence_graph::bottleneck_alive() const {
+ return b_alive;
+}
+
+inline std::vector<double> Persistence_graph::sorted_distances() const {
+ std::vector<double> distances;
+ distances.push_back(0.); // for empty diagrams
+ for (int u_point_index = 0; u_point_index < size(); ++u_point_index) {
+ distances.push_back(distance(u_point_index, corresponding_point_in_v(u_point_index)));
+ for (int v_point_index = 0; v_point_index < size(); ++v_point_index)
+ distances.push_back(distance(u_point_index, v_point_index));
+ }
+#ifdef GUDHI_USE_TBB
+ tbb::parallel_sort(distances.begin(), distances.end());
+#else
+ std::sort(distances.begin(), distances.end());
+#endif
+ return distances;
+}
+
+inline Internal_point Persistence_graph::get_u_point(int u_point_index) const {
+ if (!on_the_u_diagonal(u_point_index))
+ return u.at(u_point_index);
+ Internal_point projector = v.at(corresponding_point_in_v(u_point_index));
+ double m = (projector.x() + projector.y()) / 2.;
+ return Internal_point(m, m, u_point_index);
+}
+
+inline Internal_point Persistence_graph::get_v_point(int v_point_index) const {
+ if (!on_the_v_diagonal(v_point_index))
+ return v.at(v_point_index);
+ Internal_point projector = u.at(corresponding_point_in_u(v_point_index));
+ double m = (projector.x() + projector.y()) / 2.;
+ return Internal_point(m, m, v_point_index);
+}
+
+inline double Persistence_graph::diameter_bound() const {
+ double max = 0.;
+ for (auto it = u.cbegin(); it != u.cend(); it++)
+ max = std::max(max, it->y());
+ for (auto it = v.cbegin(); it != v.cend(); it++)
+ max = std::max(max, it->y());
+ return max;
+}
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+#endif // PERSISTENCE_GRAPH_H_
diff --git a/src/Bottleneck_distance/test/CMakeLists.txt b/src/Bottleneck_distance/test/CMakeLists.txt
new file mode 100644
index 00000000..3d8e1f95
--- /dev/null
+++ b/src/Bottleneck_distance/test/CMakeLists.txt
@@ -0,0 +1,25 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bottleneck_distance_tests)
+
+
+if (GCOVR_PATH)
+ # for gcovr to make coverage reports - Corbera Jenkins plugin
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
+endif()
+if (GPROF_PATH)
+ # for gprof to make coverage reports - Jenkins
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
+endif()
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable ( bottleneckUT bottleneck_unit_test.cpp )
+ target_link_libraries(bottleneckUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(bottleneckUT ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ # Unitary tests
+ add_test(NAME bottleneckUT COMMAND ${CMAKE_CURRENT_BINARY_DIR}/bottleneckUT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/bottleneckUT.xml --log_level=test_suite --report_level=no)
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck/test/README b/src/Bottleneck_distance/test/README
index 0e7b8673..0e7b8673 100644
--- a/src/Bottleneck/test/README
+++ b/src/Bottleneck_distance/test/README
diff --git a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
new file mode 100644
index 00000000..e39613b3
--- /dev/null
+++ b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
@@ -0,0 +1,167 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Francois Godi
+ *
+ * Copyright (C) 2015 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "bottleneck distance"
+#include <boost/test/unit_test.hpp>
+
+#include <random>
+#include <gudhi/Bottleneck.h>
+
+using namespace Gudhi::persistence_diagram;
+
+int n1 = 81; // a natural number >0
+int n2 = 180; // a natural number >0
+double upper_bound = 406.43; // any real >0
+
+
+std::uniform_real_distribution<double> unif(0., upper_bound);
+std::default_random_engine re;
+std::vector< std::pair<double, double> > v1, v2;
+
+BOOST_AUTO_TEST_CASE(persistence_graph) {
+ // Random construction
+ for (int i = 0; i < n1; i++) {
+ double a = unif(re);
+ double b = unif(re);
+ v1.emplace_back(std::min(a, b), std::max(a, b));
+ }
+ for (int i = 0; i < n2; i++) {
+ double a = unif(re);
+ double b = unif(re);
+ v2.emplace_back(std::min(a, b), std::max(a, b));
+ }
+ Persistence_graph g(v1, v2, 0.);
+ std::vector<double> d(g.sorted_distances());
+ //
+ BOOST_CHECK(!g.on_the_u_diagonal(n1 - 1));
+ BOOST_CHECK(!g.on_the_u_diagonal(n1));
+ BOOST_CHECK(!g.on_the_u_diagonal(n2 - 1));
+ BOOST_CHECK(g.on_the_u_diagonal(n2));
+ BOOST_CHECK(!g.on_the_v_diagonal(n1 - 1));
+ BOOST_CHECK(g.on_the_v_diagonal(n1));
+ BOOST_CHECK(g.on_the_v_diagonal(n2 - 1));
+ BOOST_CHECK(g.on_the_v_diagonal(n2));
+ //
+ BOOST_CHECK(g.corresponding_point_in_u(0) == n2);
+ BOOST_CHECK(g.corresponding_point_in_u(n1) == 0);
+ BOOST_CHECK(g.corresponding_point_in_v(0) == n1);
+ BOOST_CHECK(g.corresponding_point_in_v(n2) == 0);
+ //
+ BOOST_CHECK(g.size() == (n1 + n2));
+ //
+ BOOST_CHECK((int) d.size() == (n1 + n2)*(n1 + n2) + n1 + n2 + 1);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, 0)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n1 - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n2 - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n2)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, (n1 + n2) - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, 0)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n1 - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n2 - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n2)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, (n1 + n2) - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, 0)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n1 - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n2 - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n2)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, (n1 + n2) - 1)) > 0);
+}
+
+BOOST_AUTO_TEST_CASE(neighbors_finder) {
+ Persistence_graph g(v1, v2, 0.);
+ Neighbors_finder nf(g, 1.);
+ for (int v_point_index = 1; v_point_index < ((n2 + n1)*9 / 10); v_point_index += 2)
+ nf.add(v_point_index);
+ //
+ int v_point_index_1 = nf.pull_near(n2 / 2);
+ BOOST_CHECK((v_point_index_1 == -1) || (g.distance(n2 / 2, v_point_index_1) <= 1.));
+ std::vector<int> l = nf.pull_all_near(n2 / 2);
+ bool v = true;
+ for (auto it = l.cbegin(); it != l.cend(); ++it)
+ v = v && (g.distance(n2 / 2, *it) > 1.);
+ BOOST_CHECK(v);
+ int v_point_index_2 = nf.pull_near(n2 / 2);
+ BOOST_CHECK(v_point_index_2 == -1);
+}
+
+BOOST_AUTO_TEST_CASE(layered_neighbors_finder) {
+ Persistence_graph g(v1, v2, 0.);
+ Layered_neighbors_finder lnf(g, 1.);
+ for (int v_point_index = 1; v_point_index < ((n2 + n1)*9 / 10); v_point_index += 2)
+ lnf.add(v_point_index, v_point_index % 7);
+ //
+ int v_point_index_1 = lnf.pull_near(n2 / 2, 6);
+ BOOST_CHECK((v_point_index_1 == -1) || (g.distance(n2 / 2, v_point_index_1) <= 1.));
+ int v_point_index_2 = lnf.pull_near(n2 / 2, 6);
+ BOOST_CHECK(v_point_index_2 == -1);
+ v_point_index_1 = lnf.pull_near(n2 / 2, 0);
+ BOOST_CHECK((v_point_index_1 == -1) || (g.distance(n2 / 2, v_point_index_1) <= 1.));
+ v_point_index_2 = lnf.pull_near(n2 / 2, 0);
+ BOOST_CHECK(v_point_index_2 == -1);
+}
+
+BOOST_AUTO_TEST_CASE(graph_matching) {
+ Persistence_graph g(v1, v2, 0.);
+ Graph_matching m1(g);
+ m1.set_r(0.);
+ int e = 0;
+ while (m1.multi_augment())
+ ++e;
+ BOOST_CHECK(e > 0);
+ BOOST_CHECK(e <= 2 * sqrt(2 * (n1 + n2)));
+ Graph_matching m2 = m1;
+ BOOST_CHECK(!m2.multi_augment());
+ m2.set_r(upper_bound);
+ e = 0;
+ while (m2.multi_augment())
+ ++e;
+ BOOST_CHECK(e <= 2 * sqrt(2 * (n1 + n2)));
+ BOOST_CHECK(m2.perfect());
+ BOOST_CHECK(!m1.perfect());
+}
+
+BOOST_AUTO_TEST_CASE(global) {
+ std::uniform_real_distribution<double> unif1(0., upper_bound);
+ std::uniform_real_distribution<double> unif2(upper_bound / 10000., upper_bound / 100.);
+ std::default_random_engine re;
+ std::vector< std::pair<double, double> > v1, v2;
+ for (int i = 0; i < n1; i++) {
+ double a = unif1(re);
+ double b = unif1(re);
+ double x = unif2(re);
+ double y = unif2(re);
+ v1.emplace_back(std::min(a, b), std::max(a, b));
+ v2.emplace_back(std::min(a, b) + std::min(x, y), std::max(a, b) + std::max(x, y));
+ if (i % 5 == 0)
+ v1.emplace_back(std::min(a, b), std::min(a, b) + x);
+ if (i % 3 == 0)
+ v2.emplace_back(std::max(a, b), std::max(a, b) + y);
+ }
+ BOOST_CHECK(bottleneck_distance(v1, v2, 0.) <= upper_bound / 100.);
+ BOOST_CHECK(bottleneck_distance(v1, v2, upper_bound / 10000.) <= upper_bound / 100. + upper_bound / 10000.);
+ BOOST_CHECK(std::abs(bottleneck_distance(v1, v2, 0.) - bottleneck_distance(v1, v2, upper_bound / 10000.)) <= upper_bound / 10000.);
+}
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 337079b1..3779cb80 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -9,141 +9,86 @@ enable_testing()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
-find_package(Boost REQUIRED COMPONENTS system filesystem program_options chrono timer date_time REQUIRED)
+# For "make doxygen"
+set(GUDHI_USER_VERSION_DIR ${CMAKE_SOURCE_DIR})
+include(${CMAKE_MODULE_PATH}/GUDHI_doxygen_target.txt)
-if(NOT Boost_FOUND)
- message(FATAL_ERROR "NOTICE: This demo requires Boost and will not be compiled.")
+# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
+include("${CMAKE_MODULE_PATH}/GUDHI_third_party_libraries.txt")
+
+if(MSVC)
+ # Turn off some VC++ warnings
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
else()
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -pedantic")
+endif()
- # For "make doxygen"
- set(GUDHI_USER_VERSION_DIR ${CMAKE_SOURCE_DIR})
- include(${CMAKE_MODULE_PATH}/GUDHI_doxygen_target.txt)
-
- find_package(GMP)
- if(GMP_FOUND)
- message(STATUS "GMP_LIBRARIES = ${GMP_LIBRARIES}")
- INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR})
- find_package(GMPXX)
- if(GMPXX_FOUND)
- message(STATUS "GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
- INCLUDE_DIRECTORIES(${GMPXX_INCLUDE_DIR})
- endif()
- endif()
-
- # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CMAKE_CXX_FLAGS are overwritten.
- # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
- # A workaround is to include(${CGAL_USE_FILE}) before adding "-std=c++11".
- # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
- # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
- # but it implies to use cmake version 3.1 at least.
-
- # find CGAL in QUIET mode for cmake to be less verbose when CGAL is not found.
- find_package(CGAL QUIET)
- # Only CGAL versions > 4.4 supports what Gudhi uses from CGAL
- if (CGAL_VERSION VERSION_LESS 4.4.0 AND CGAL_FOUND)
- message("CGAL version ${CGAL_VERSION} is considered too old to be used by Gudhi.")
- unset(CGAL_FOUND)
- endif(CGAL_VERSION VERSION_LESS 4.4.0 AND CGAL_FOUND)
- if(CGAL_FOUND)
- message(STATUS "CGAL version: ${CGAL_VERSION}.")
- include( ${CGAL_USE_FILE} )
- endif()
-
- if(MSVC)
- # Turn off some VC++ warnings
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
- else()
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -pedantic")
- endif()
-
- if(CMAKE_BUILD_TYPE MATCHES Debug)
- message("++ Debug compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
- else()
- message("++ Release compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_RELEASE}")
- endif()
-
- set(Boost_USE_STATIC_LIBS ON)
- set(Boost_USE_MULTITHREADED ON)
- set(Boost_USE_STATIC_RUNTIME OFF)
-
- # Find TBB package for parallel sort - not mandatory, just optional.
- set(TBB_FIND_QUIETLY ON)
- find_package(TBB)
- if (TBB_FOUND)
- include(${TBB_USE_FILE})
- message("TBB found in ${TBB_LIBRARY_DIRS}")
- add_definitions(-DGUDHI_USE_TBB)
- endif()
-
- find_package(Eigen3 3.1.0)
- if (EIGEN3_FOUND)
- message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
- include( ${EIGEN3_USE_FILE} )
- #include_directories (BEFORE "../../include")
- endif (EIGEN3_FOUND)
-
- # BOOST ISSUE result_of vs C++11
- add_definitions(-DBOOST_RESULT_OF_USE_DECLTYPE)
- # BOOST ISSUE with Libraries name resolution under Windows
- add_definitions(-DBOOST_ALL_NO_LIB)
-
- INCLUDE_DIRECTORIES(${Boost_INCLUDE_DIRS})
- LINK_DIRECTORIES(${Boost_LIBRARY_DIRS})
-
- if (DEBUG_TRACES)
- message(STATUS "DEBUG_TRACES are activated")
- # For programs to be more verbose
- add_definitions(-DDEBUG_TRACES)
- endif()
-
- #---------------------------------------------------------------------------------------
- # Gudhi compilation part
- include_directories(include)
-
- add_subdirectory(example/common)
- add_subdirectory(example/Simplex_tree)
- add_subdirectory(example/Persistent_cohomology)
- add_subdirectory(example/Skeleton_blocker)
- add_subdirectory(example/Contraction)
- add_subdirectory(example/Bitmap_cubical_complex)
- add_subdirectory(example/Witness_complex)
- add_subdirectory(example/Alpha_complex)
- add_subdirectory(example/Bottleneck)
-
-
- # data points generator
- add_subdirectory(data/points/generator)
-
- # Please let GudhUI in last compilation position as QT is known to modify CMAKE_CXX_FLAGS
- # GudhUI
- add_subdirectory(GudhUI)
- #---------------------------------------------------------------------------------------
-
- #---------------------------------------------------------------------------------------
- # GUDHIConfig.cmake
- # Export the package for use from the build-tree
- # (this registers the build-tree with a global CMake-registry)
- export(PACKAGE GUDHI)
-
- message("++ make install will install ${PROJECT_NAME} in the following directory : ${CMAKE_INSTALL_PREFIX}")
- # Create the GUDHIConfig.cmake and GUDHIConfigVersion files
- set(CONF_INCLUDE_DIRS "${CMAKE_INSTALL_PREFIX}/include")
- configure_file(GUDHIConfig.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake" @ONLY)
- configure_file(GUDHIConfigVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake" @ONLY)
-
- #---------------------------------------------------------------------------------------
-
- #---------------------------------------------------------------------------------------
- # Gudhi installation part
-
- # Install the GUDHIConfig.cmake and GUDHIConfigVersion.cmake
- install(FILES
- "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake"
- "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake"
- DESTINATION share/gudhi)
-
- # install the include file on "make install"
- install(DIRECTORY include/gudhi DESTINATION include)
- #---------------------------------------------------------------------------------------
-
-endif()
+if(CMAKE_BUILD_TYPE MATCHES Debug)
+ message("++ Debug compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
+else()
+ message("++ Release compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_RELEASE}")
+endif()
+
+if (DEBUG_TRACES)
+ message(STATUS "DEBUG_TRACES are activated")
+ # For programs to be more verbose
+ add_definitions(-DDEBUG_TRACES)
+endif()
+
+#---------------------------------------------------------------------------------------
+# Gudhi compilation part
+include_directories(include)
+
+add_subdirectory(example/common)
+add_subdirectory(example/Simplex_tree)
+add_subdirectory(example/Persistent_cohomology)
+add_subdirectory(example/Skeleton_blocker)
+add_subdirectory(example/Contraction)
+add_subdirectory(example/Bitmap_cubical_complex)
+add_subdirectory(example/Witness_complex)
+add_subdirectory(example/Alpha_complex)
+add_subdirectory(example/Rips_complex)
+add_subdirectory(example/Spatial_searching)
+add_subdirectory(example/Subsampling)
+add_subdirectory(example/Tangential_complex)
+add_subdirectory(example/Bottleneck_distance)
+add_subdirectory(example/Gudhi_stat)
+
+# data points generator
+add_subdirectory(data/points/generator)
+
+# Please let GudhUI in last compilation position as QT is known to modify CMAKE_CXX_FLAGS
+# GudhUI
+add_subdirectory(GudhUI)
+
+# This variable is used by Cython CMakeLists.txt to know its path
+set(GUDHI_CYTHON_PATH "cython")
+add_subdirectory(${GUDHI_CYTHON_PATH})
+#---------------------------------------------------------------------------------------
+
+#---------------------------------------------------------------------------------------
+# GUDHIConfig.cmake
+# Export the package for use from the build-tree
+# (this registers the build-tree with a global CMake-registry)
+export(PACKAGE GUDHI)
+
+message("++ make install will install ${PROJECT_NAME} in the following directory : ${CMAKE_INSTALL_PREFIX}")
+# Create the GUDHIConfig.cmake and GUDHIConfigVersion files
+set(CONF_INCLUDE_DIRS "${CMAKE_INSTALL_PREFIX}/include")
+configure_file(GUDHIConfig.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake" @ONLY)
+configure_file(GUDHIConfigVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake" @ONLY)
+
+#---------------------------------------------------------------------------------------
+
+#---------------------------------------------------------------------------------------
+# Gudhi installation part
+
+# Install the GUDHIConfig.cmake and GUDHIConfigVersion.cmake
+install(FILES
+ "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake"
+ "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake"
+ DESTINATION share/gudhi)
+
+# install the include file on "make install"
+install(DIRECTORY include/gudhi DESTINATION include)
+#---------------------------------------------------------------------------------------
diff --git a/src/Contraction/doc/COPYRIGHT b/src/Contraction/doc/COPYRIGHT
new file mode 100644
index 00000000..1de850d7
--- /dev/null
+++ b/src/Contraction/doc/COPYRIGHT
@@ -0,0 +1,18 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): David Salinas
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Contraction/example/Garland_heckbert.cpp b/src/Contraction/example/Garland_heckbert.cpp
index 5347830c..8b5a6a6c 100644
--- a/src/Contraction/example/Garland_heckbert.cpp
+++ b/src/Contraction/example/Garland_heckbert.cpp
@@ -35,10 +35,6 @@
#include "Garland_heckbert/Error_quadric.h"
-using namespace Gudhi;
-using namespace skeleton_blocker;
-using namespace contraction;
-
struct Geometry_trait {
typedef Point_d Point;
};
@@ -46,7 +42,8 @@ struct Geometry_trait {
/**
* The vertex stored in the complex contains a quadric.
*/
-struct Garland_heckbert_traits : public Skeleton_blocker_simple_geometric_traits<Geometry_trait> {
+struct Garland_heckbert_traits
+ : public Gudhi::skeleton_blocker::Skeleton_blocker_simple_geometric_traits<Geometry_trait> {
public:
struct Garland_heckbert_vertex : public Simple_geometric_vertex {
Error_quadric<Geometry_trait::Point> quadric;
@@ -54,9 +51,9 @@ struct Garland_heckbert_traits : public Skeleton_blocker_simple_geometric_traits
typedef Garland_heckbert_vertex Graph_vertex;
};
-typedef Skeleton_blocker_geometric_complex< Garland_heckbert_traits > Complex;
-typedef Edge_profile<Complex> EdgeProfile;
-typedef Skeleton_blocker_contractor<Complex> Complex_contractor;
+using Complex = Gudhi::skeleton_blocker::Skeleton_blocker_geometric_complex< Garland_heckbert_traits >;
+using EdgeProfile = Gudhi::contraction::Edge_profile<Complex>;
+using Complex_contractor = Gudhi::contraction::Skeleton_blocker_contractor<Complex>;
/**
* How the new vertex is placed after an edge collapse : here it is placed at
@@ -68,7 +65,7 @@ class GH_placement : public Gudhi::contraction::Placement_policy<EdgeProfile> {
public:
typedef Gudhi::contraction::Placement_policy<EdgeProfile>::Placement_type Placement_type;
- GH_placement(Complex& complex) : complex_(complex) { }
+ GH_placement(Complex& complex) : complex_(complex) { (void)complex_; }
Placement_type operator()(const EdgeProfile& profile) const override {
auto sum_quad(profile.v0().quadric);
@@ -92,7 +89,7 @@ class GH_cost : public Gudhi::contraction::Cost_policy<EdgeProfile> {
public:
typedef Gudhi::contraction::Cost_policy<EdgeProfile>::Cost_type Cost_type;
- GH_cost(Complex& complex) : complex_(complex) { }
+ GH_cost(Complex& complex) : complex_(complex) { (void)complex_; }
Cost_type operator()(EdgeProfile const& profile, boost::optional<Point> const& new_point) const override {
Cost_type res;
@@ -114,7 +111,7 @@ class GH_visitor : public Gudhi::contraction::Contraction_visitor<EdgeProfile> {
Complex& complex_;
public:
- GH_visitor(Complex& complex) : complex_(complex) { }
+ GH_visitor(Complex& complex) : complex_(complex) { (void)complex_; }
// Compute quadrics for every vertex v
// The quadric of v consists in the sum of quadric
@@ -153,7 +150,7 @@ int main(int argc, char *argv[]) {
typedef Complex::Vertex_handle Vertex_handle;
// load the points
- Skeleton_blocker_off_reader<Complex> off_reader(argv[1], complex);
+ Gudhi::skeleton_blocker::Skeleton_blocker_off_reader<Complex> off_reader(argv[1], complex);
if (!off_reader.is_valid()) {
std::cerr << "Unable to read file:" << argv[1] << std::endl;
return EXIT_FAILURE;
@@ -174,7 +171,7 @@ int main(int argc, char *argv[]) {
Complex_contractor contractor(complex,
new GH_cost(complex),
new GH_placement(complex),
- contraction::make_link_valid_contraction<EdgeProfile>(),
+ Gudhi::contraction::make_link_valid_contraction<EdgeProfile>(),
new GH_visitor(complex));
std::cout << "Contract " << num_contractions << " edges" << std::endl;
@@ -186,7 +183,7 @@ int main(int argc, char *argv[]) {
complex.num_triangles() << " triangles." << std::endl;
// write simplified complex
- Skeleton_blocker_off_writer<Complex> off_writer(argv[2], complex);
+ Gudhi::skeleton_blocker::Skeleton_blocker_off_writer<Complex> off_writer(argv[2], complex);
return EXIT_SUCCESS;
}
diff --git a/src/Contraction/example/Garland_heckbert/Error_quadric.h b/src/Contraction/example/Garland_heckbert/Error_quadric.h
index 076f1be0..e7dafaa0 100644
--- a/src/Contraction/example/Garland_heckbert/Error_quadric.h
+++ b/src/Contraction/example/Garland_heckbert/Error_quadric.h
@@ -21,8 +21,8 @@
*
*/
-#ifndef ERROR_QUADRIC_H_
-#define ERROR_QUADRIC_H_
+#ifndef GARLAND_HECKBERT_ERROR_QUADRIC_H_
+#define GARLAND_HECKBERT_ERROR_QUADRIC_H_
#include <boost/optional/optional.hpp>
@@ -179,4 +179,4 @@ template <typename Point> class Error_quadric {
}
};
-#endif // ERROR_QUADRIC_H_
+#endif // GARLAND_HECKBERT_ERROR_QUADRIC_H_
diff --git a/src/Contraction/example/Rips_contraction.cpp b/src/Contraction/example/Rips_contraction.cpp
index 7f19e239..8289b1d3 100644
--- a/src/Contraction/example/Rips_contraction.cpp
+++ b/src/Contraction/example/Rips_contraction.cpp
@@ -27,19 +27,15 @@
#include <boost/timer/timer.hpp>
#include <iostream>
-using namespace Gudhi;
-using namespace skeleton_blocker;
-using namespace contraction;
-
struct Geometry_trait {
typedef Point_d Point;
};
-typedef Geometry_trait::Point Point;
-typedef Skeleton_blocker_simple_geometric_traits<Geometry_trait> Complex_geometric_traits;
-typedef Skeleton_blocker_geometric_complex< Complex_geometric_traits > Complex;
-typedef Edge_profile<Complex> Profile;
-typedef Skeleton_blocker_contractor<Complex> Complex_contractor;
+using Complex_geometric_traits = Gudhi::skeleton_blocker::Skeleton_blocker_simple_geometric_traits<Geometry_trait>;
+using Complex = Gudhi::skeleton_blocker::Skeleton_blocker_geometric_complex< Complex_geometric_traits >;
+using Profile = Gudhi::contraction::Edge_profile<Complex>;
+using Complex_contractor = Gudhi::contraction::Skeleton_blocker_contractor<Complex>;
+
template<typename ComplexType>
void build_rips(ComplexType& complex, double offset) {
@@ -62,7 +58,7 @@ int main(int argc, char *argv[]) {
Complex complex;
// load only the points
- Skeleton_blocker_off_reader<Complex> off_reader(argv[1], complex, true);
+ Gudhi::skeleton_blocker::Skeleton_blocker_off_reader<Complex> off_reader(argv[1], complex, true);
if (!off_reader.is_valid()) {
std::cerr << "Unable to read file:" << argv[1] << std::endl;
return EXIT_FAILURE;
@@ -79,10 +75,10 @@ int main(int argc, char *argv[]) {
complex.num_edges() << " edges" << std::endl;
Complex_contractor contractor(complex,
- new Edge_length_cost<Profile>,
- contraction::make_first_vertex_placement<Profile>(),
- contraction::make_link_valid_contraction<Profile>(),
- contraction::make_remove_popable_blockers_visitor<Profile>());
+ new Gudhi::contraction::Edge_length_cost<Profile>,
+ Gudhi::contraction::make_first_vertex_placement<Profile>(),
+ Gudhi::contraction::make_link_valid_contraction<Profile>(),
+ Gudhi::contraction::make_remove_popable_blockers_visitor<Profile>());
contractor.contract_edges();
std::cout << "Counting final number of simplices \n";
diff --git a/src/Doxyfile b/src/Doxyfile
index dd9a33fb..d2d0a447 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -38,7 +38,7 @@ PROJECT_NAME = "GUDHI"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "1.3.1"
+PROJECT_NUMBER = "2.0.0"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -500,7 +500,7 @@ HIDE_SCOPE_NAMES = NO
# the files that are included by a file in the documentation of that file.
# The default value is: YES.
-SHOW_INCLUDE_FILES = YES
+SHOW_INCLUDE_FILES = NO
# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
# grouped member an include statement to the documentation, telling the reader
@@ -781,10 +781,11 @@ RECURSIVE = YES
# run.
EXCLUDE = data/ \
- example/ \
- GudhUI/ \
- cmake/ \
- debian/
+ example/ \
+ GudhUI/ \
+ cmake/ \
+ src/cython/ \
+ include/gudhi_patches/
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
# directories that are symbolic links (a Unix file system feature) are excluded
@@ -845,7 +846,12 @@ IMAGE_PATH = doc/Skeleton_blocker/ \
doc/Simplex_tree/ \
doc/Persistent_cohomology/ \
doc/Witness_complex/ \
- doc/Bitmap_cubical_complex/
+ doc/Bitmap_cubical_complex/ \
+ doc/Rips_complex/ \
+ doc/Subsampling/ \
+ doc/Spatial_searching/ \
+ doc/Tangential_complex/ \
+ doc/Bottleneck_distance/
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program
@@ -2147,7 +2153,7 @@ TEMPLATE_RELATIONS = YES
# The default value is: YES.
# This tag requires that the tag HAVE_DOT is set to YES.
-INCLUDE_GRAPH = YES
+INCLUDE_GRAPH = NO
# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
# set to YES then doxygen will generate a graph for each documented file showing
@@ -2156,7 +2162,7 @@ INCLUDE_GRAPH = YES
# The default value is: YES.
# This tag requires that the tag HAVE_DOT is set to YES.
-INCLUDED_BY_GRAPH = YES
+INCLUDED_BY_GRAPH = NO
# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
# dependency graph for every global function or class method.
diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt
index a43294ea..ca2e47c1 100644
--- a/src/GudhUI/CMakeLists.txt
+++ b/src/GudhUI/CMakeLists.txt
@@ -1,46 +1,42 @@
cmake_minimum_required(VERSION 2.8)
project(GudhUI)
-find_package(Qt4)
+find_package(Qt5 COMPONENTS Widgets Xml OpenGL)
find_package(QGLViewer)
find_package(OpenGL)
-if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND )
- set( QT_USE_QTXML TRUE )
- set( QT_USE_QTMAIN TRUE )
- set( QT_USE_QTSCRIPT TRUE )
- set( QT_USE_QTOPENGL TRUE )
- SET(Boost_USE_STATIC_LIBS ON)
- SET(Boost_USE_MULTITHREAD OFF)
+if (CGAL_VERSION VERSION_EQUAL 4.8.0)
+ message(ERROR " GudhUI does not compile with CGAL 4.8.0. 4.8.1, 4.8.2 and 4.9 are OK.")
+endif()
+
+if (NOT CGAL_FOUND)
+ message(ERROR " GudhUI requires CGAL and will not be compiled.")
+endif()
+
+if (NOT Qt5_FOUND)
+ message(ERROR " GudhUI requires Qt5 and will not be compiled.")
+endif()
+
+if (NOT OPENGL_FOUND)
+ message(ERROR " GudhUI requires OpenGL and will not be compiled.")
+endif()
+
+if (NOT QGLVIEWER_FOUND)
+ message(ERROR " GudhUI requires QGLViewer and will not be compiled.")
+endif()
- include(${QT_USE_FILE})
+
+if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_VERSION VERSION_EQUAL 4.8.0)
+
+ set(CMAKE_AUTOMOC ON)
+ set(CMAKE_AUTOUIC ON)
+ set(CMAKE_INCLUDE_CURRENT_DIR ON)
+
+ SET(Boost_USE_STATIC_LIBS ON)
+ SET(Boost_USE_MULTITHREAD OFF)
include_directories (${QGLVIEWER_INCLUDE_DIR})
- include_directories(.)
- # qt : ui file, created wih Qt Designer ###############
- set(CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_SOURCE_DIR}/gui")
- qt4_wrap_ui( uis
- gui/main_window.ui
- gui/MenuEdgeContraction.ui
- gui/KNearestNeighborsMenu.ui
- gui/UniformNeighborsMenu.ui
- gui/PersistenceMenu.ui
- )
-
- set(CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_SOURCE_DIR}/gui")
- qt4_automoc(
- gui/MainWindow.cpp
- gui/Menu_k_nearest_neighbors.cpp
- gui/Menu_uniform_neighbors.cpp
- gui/Menu_edge_contraction.cpp
- gui/Menu_persistence.cpp
- )
-
- set(CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_SOURCE_DIR}/view")
- qt4_automoc(view/Viewer_instructor.cpp
- view/Viewer.cpp
- )
#####################################################################
add_executable ( GudhUI
@@ -52,10 +48,9 @@ if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND )
gui/Menu_persistence.cpp
view/Viewer_instructor.cpp
view/Viewer.cpp
- ${uis}
)
-
- target_link_libraries( GudhUI ${QT_LIBRARIES} ${QGLVIEWER_LIBRARIES} )
+ target_link_libraries( GudhUI Qt5::Widgets Qt5::Xml Qt5::OpenGL )
+ target_link_libraries( GudhUI ${QGLVIEWER_LIBRARIES} )
target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
if (TBB_FOUND)
target_link_libraries( GudhUI ${TBB_LIBRARIES})
@@ -64,5 +59,5 @@ endif()
###############################################################################
else()
- message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt4, and will not be compiled.")
+ message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt5, and will not be compiled.")
endif()
diff --git a/src/GudhUI/gui/MainWindow.h b/src/GudhUI/gui/MainWindow.h
index c8c3fcf6..15664dcb 100644
--- a/src/GudhUI/gui/MainWindow.h
+++ b/src/GudhUI/gui/MainWindow.h
@@ -23,8 +23,8 @@
#ifndef GUI_MAINWINDOW_H_
#define GUI_MAINWINDOW_H_
-// Workaround for moc-qt4 not parsing boost headers
-#include <CGAL/config.h>
+// Workaround https://svn.boost.org/trac/boost/ticket/12534
+#include <boost/container/flat_map.hpp>
#include <QMainWindow>
#include "ui_main_window.h"
diff --git a/src/GudhUI/gui/Menu_edge_contraction.h b/src/GudhUI/gui/Menu_edge_contraction.h
index 08f0bf67..2d5640e8 100644
--- a/src/GudhUI/gui/Menu_edge_contraction.h
+++ b/src/GudhUI/gui/Menu_edge_contraction.h
@@ -23,11 +23,8 @@
#ifndef GUI_MENU_EDGE_CONTRACTION_H_
#define GUI_MENU_EDGE_CONTRACTION_H_
-// Workaround for moc-qt4 not parsing boost headers
-#include <CGAL/config.h>
-
#include "gui/MainWindow.h"
-#include "gui/ui_MenuEdgeContraction.h"
+#include "ui_MenuEdgeContraction.h"
#include "model/Model.h"
diff --git a/src/GudhUI/gui/Menu_k_nearest_neighbors.h b/src/GudhUI/gui/Menu_k_nearest_neighbors.h
index 8088b768..77303b67 100644
--- a/src/GudhUI/gui/Menu_k_nearest_neighbors.h
+++ b/src/GudhUI/gui/Menu_k_nearest_neighbors.h
@@ -24,7 +24,7 @@
#define GUI_MENU_K_NEAREST_NEIGHBORS_H_
#include <QMainWindow>
-#include "gui/ui_KNearestNeighborsMenu.h"
+#include "ui_KNearestNeighborsMenu.h"
class QWidget;
diff --git a/src/GudhUI/gui/Menu_persistence.h b/src/GudhUI/gui/Menu_persistence.h
index 8c4df158..1a2a2408 100644
--- a/src/GudhUI/gui/Menu_persistence.h
+++ b/src/GudhUI/gui/Menu_persistence.h
@@ -25,7 +25,7 @@
#define GUI_MENU_PERSISTENCE_H_
#include <QMainWindow>
-#include "gui/ui_PersistenceMenu.h"
+#include "ui_PersistenceMenu.h"
class QWidget;
diff --git a/src/GudhUI/gui/Menu_uniform_neighbors.h b/src/GudhUI/gui/Menu_uniform_neighbors.h
index 0b6f65fe..61316966 100644
--- a/src/GudhUI/gui/Menu_uniform_neighbors.h
+++ b/src/GudhUI/gui/Menu_uniform_neighbors.h
@@ -25,7 +25,7 @@
#define GUI_MENU_UNIFORM_NEIGHBORS_H_
#include <QMainWindow>
-#include "gui/ui_UniformNeighborsMenu.h"
+#include "ui_UniformNeighborsMenu.h"
class Menu_uniform_neighbors : public QDialog, public Ui::UniformMenu {
Q_OBJECT
diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h
index 77e37b6c..fc284cc6 100644
--- a/src/GudhUI/model/Model.h
+++ b/src/GudhUI/model/Model.h
@@ -187,7 +187,7 @@ class Model {
}
void contract_edges(unsigned num_contractions) {
- Clock c;
+ Gudhi::Clock c;
Edge_contractor<Complex> contractor(complex_, num_contractions);
std::cout << "Time to simplify: " << c.num_seconds() << "s" << std::endl;
}
@@ -248,7 +248,7 @@ class Model {
unsigned num_simplices = 0;
int euler = 0;
int dimension = 0;
- Clock clock;
+ Gudhi::Clock clock;
for (const auto &s : complex_.complex_simplex_range()) {
num_simplices++;
dimension = (std::max)(s.dimension(), dimension);
@@ -271,7 +271,7 @@ class Model {
#ifdef _WIN32
std::cout << "Works only on linux x64 for the moment\n";
#else
- Clock clock;
+ Gudhi::Clock clock;
run_chomp();
clock.end();
#endif
diff --git a/src/GudhUI/utils/Critical_points.h b/src/GudhUI/utils/Critical_points.h
index 3021a5fe..2a18e079 100644
--- a/src/GudhUI/utils/Critical_points.h
+++ b/src/GudhUI/utils/Critical_points.h
@@ -106,6 +106,7 @@ template<typename SkBlComplex> class Critical_points {
return 0;
Edge_contractor<Complex> contractor(link, link.num_vertices() - 1);
+ (void)contractor;
if (link.num_connected_components() > 1)
// one than more CC -> not contractible
diff --git a/src/GudhUI/utils/Is_manifold.h b/src/GudhUI/utils/Is_manifold.h
index 0640ea47..d0974463 100644
--- a/src/GudhUI/utils/Is_manifold.h
+++ b/src/GudhUI/utils/Is_manifold.h
@@ -77,6 +77,7 @@ template<typename SkBlComplex> class Is_manifold {
bool is_k_sphere(Vertex_handle v, int k) {
auto link = input_complex_.link(v);
Edge_contractor<Complex> contractor(link, link.num_vertices() - 1);
+ (void)contractor;
return (is_sphere_simplex(link) == k);
}
diff --git a/src/GudhUI/utils/Persistence_compute.h b/src/GudhUI/utils/Persistence_compute.h
index 97165490..2dc03c8e 100644
--- a/src/GudhUI/utils/Persistence_compute.h
+++ b/src/GudhUI/utils/Persistence_compute.h
@@ -29,6 +29,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/distance_functions.h>
#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Rips_complex.h>
#include <vector>
@@ -69,21 +70,23 @@ template<typename SkBlComplex> class Persistence_compute {
points.emplace_back(std::move(pt_to_add));
}
+ using Simplex_tree = Gudhi::Simplex_tree<>;
+ using Filtration_value = Simplex_tree::Filtration_value;
+ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+ using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+ using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
- Graph_t prox_graph = compute_proximity_graph(points, params.threshold, euclidean_distance<Point_t>);
- Gudhi::Simplex_tree<> st;
- st.insert_graph(prox_graph);
- st.expansion(params.max_dim);
+ Rips_complex rips_complex(points, params.threshold, Euclidean_distance());
- Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::Simplex_tree<>,
- Gudhi::persistent_cohomology::Field_Zp > pcoh(st);
+ Simplex_tree st;
+ rips_complex.create_complex(st, params.max_dim);
+ Persistent_cohomology pcoh(st);
// initializes the coefficient field for homology
pcoh.init_coefficients(params.p);
// put params.min_pers
pcoh.compute_persistent_cohomology(params.min_pers);
stream << "persistence: \n";
stream << "p dimension birth death: \n";
-
pcoh.output_diagram(stream);
}
};
diff --git a/src/GudhUI/utils/Vertex_collapsor.h b/src/GudhUI/utils/Vertex_collapsor.h
index 2b36cb3a..568dab2f 100644
--- a/src/GudhUI/utils/Vertex_collapsor.h
+++ b/src/GudhUI/utils/Vertex_collapsor.h
@@ -81,6 +81,7 @@ template<typename SkBlComplex> class Vertex_collapsor {
if (link.is_cone()) return true;
if (link.num_connected_components() > 1) return false;
Edge_contractor<Complex> contractor(link, link.num_vertices() - 1);
+ (void)contractor;
return (link.num_vertices() == 1);
}
};
diff --git a/src/GudhUI/view/Viewer.h b/src/GudhUI/view/Viewer.h
index 319c8e04..797ddc53 100644
--- a/src/GudhUI/view/Viewer.h
+++ b/src/GudhUI/view/Viewer.h
@@ -24,9 +24,6 @@
#ifndef VIEW_VIEWER_H_
#define VIEW_VIEWER_H_
-// Workaround for moc-qt4 not parsing boost headers
-#include <CGAL/config.h>
-
#include <QGLViewer/qglviewer.h>
#include <vector>
diff --git a/src/GudhUI/view/Viewer_instructor.h b/src/GudhUI/view/Viewer_instructor.h
index 1da28009..05c5c1fc 100644
--- a/src/GudhUI/view/Viewer_instructor.h
+++ b/src/GudhUI/view/Viewer_instructor.h
@@ -26,9 +26,6 @@
// todo do a viewer instructor that have directely a pointer to a QGLviewer and buffer ot not triangles
-// Workaround for moc-qt4 not parsing boost headers
-#include <CGAL/config.h>
-
#include <QFileDialog>
#include <QKeyEvent>
#include <QGLViewer/camera.h>
diff --git a/src/Gudhi_stat/utilities/Landscape_bootstrap.cpp b/src/Gudhi_stat/utilities/Landscape_bootstrap.cpp
index 0827bfdf..e6e96e68 100644
--- a/src/Gudhi_stat/utilities/Landscape_bootstrap.cpp
+++ b/src/Gudhi_stat/utilities/Landscape_bootstrap.cpp
@@ -28,16 +28,14 @@
#include <gudhi/persistence_representations/Vector_distances_in_diagram.h>
//persistence part:
#include <gudhi/reader_utils.h>
-#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Rips_complex.h>
#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
-using namespace Gudhi;
-using namespace Gudhi::Gudhi_stat;
-using namespace Gudhi::persistent_cohomology;
+using Persistence_landscape = Gudhi::Gudhi_stat::Persistence_landscape;
typedef int Vertex_handle;
typedef double Filtration_value;
@@ -64,18 +62,18 @@ public:
{
points_in_subsample.push_back( this->points[ numbers_to_sample[i] ] );
}
- //construct a Rips complex based on it and compute its persistence:
- Graph_t prox_graph = compute_proximity_graph(points_in_subsample, this->threshold , euclidean_distance< std::vector< double > >);
- // Construct the Rips complex in a Simplex Tree
- Simplex_tree<Simplex_tree_options_fast_persistence> st;
- // insert the proximity graph in the simplex tree
- st.insert_graph(prox_graph);
- // expand the graph until dimension dim_max
- st.expansion(this->dim + 1);
- // Sort the simplices in the order of the filtration
- st.initialize_filtration();
- // Compute the persistence diagram of the complex
- persistent_cohomology::Persistent_cohomology<Simplex_tree<Simplex_tree_options_fast_persistence>, Field_Zp > pcoh(st);
+
+ using Stree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+ using Filtration_value = Stree::Filtration_value;
+ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+ //construct a Rips complex based on it and compute its persistence:
+ Rips_complex rips_complex(points_in_subsample, this->threshold, Euclidean_distance());
+ // Construct the Rips complex in a Simplex Tree
+ Stree st;
+ // expand the graph until dimension dim_max
+ rips_complex.create_complex(st, this->dim + 1);
+ // Compute the persistence diagram of the complex
+ Gudhi::persistent_cohomology::Persistent_cohomology<Stree, Gudhi::persistent_cohomology::Field_Zp > pcoh(st);
// initializes the coefficient field for homology
pcoh.init_coefficients( this->coeficient_field );
pcoh.compute_persistent_cohomology(this->min_persistence);
@@ -159,7 +157,7 @@ int main( int argc , char** argv )
std::cout << "Now we will read points from the file : " << filename << " and then perform " << number_of_repetitions_of_bootstrap << " times the bootstrap on it by choosing subsample of a size " << size_of_subsample << std::endl;
- std::vector< std::vector< double > > points = read_numbers_from_file_line_by_line( filename );
+ std::vector< std::vector< double > > points = Gudhi::Gudhi_stat::read_numbers_from_file_line_by_line( filename );
std::cout << "Read : " << points.size() << " points.\n";
@@ -172,7 +170,7 @@ int main( int argc , char** argv )
//CharacteristicFunction is just identity, transforming std::vector< size_t > to itself.
//DistanceBetweenPointsCharacteristics is the place were all happens. This class hace the information about the coordinates of the points, and allows to compute a Hausdorff distance between
//the collection of all points, and the subsample.
- double result = bootstrap<
+ double result = Gudhi::Gudhi_stat::bootstrap<
Persistence_landscape , //PointCloudCharacteristics, persistence landascapes constructed based on vector of
//pairs of birth--death values in a cartain dimension.
compute_persistence_landscape_of_a_point_cloud_in_certain_dimension , //CharacteristicFunction, in this case, we will need to compute persistence in a certain dimension.
diff --git a/src/Persistent_cohomology/benchmark/CMakeLists.txt b/src/Persistent_cohomology/benchmark/CMakeLists.txt
new file mode 100644
index 00000000..ea792c89
--- /dev/null
+++ b/src/Persistent_cohomology/benchmark/CMakeLists.txt
@@ -0,0 +1,14 @@
+cmake_minimum_required(VERSION 2.6)
+project(Persistent_cohomology_benchmark)
+
+
+if(GMP_FOUND)
+ if(GMPXX_FOUND)
+ add_executable ( performance_rips_persistence EXCLUDE_FROM_ALL performance_rips_persistence.cpp )
+ target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
+ if (TBB_FOUND)
+ target_link_libraries(performance_rips_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/Kl.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ endif(GMPXX_FOUND)
+endif(GMP_FOUND)
diff --git a/src/Persistent_cohomology/example/performance_rips_persistence.cpp b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp
index b4d282ac..ba752999 100644
--- a/src/Persistent_cohomology/example/performance_rips_persistence.cpp
+++ b/src/Persistent_cohomology/benchmark/performance_rips_persistence.cpp
@@ -20,20 +20,26 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include <gudhi/reader_utils.h>
-#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Rips_complex.h>
#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Persistent_cohomology/Multi_field.h>
#include <gudhi/Hasse_complex.h>
+#include <gudhi/Points_off_io.h>
#include <chrono>
#include <string>
#include <vector>
-using namespace Gudhi;
-using namespace Gudhi::persistent_cohomology;
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Multi_field = Gudhi::persistent_cohomology::Multi_field;
+using Point = std::vector<double>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
/* Compute the persistent homology of the complex cpx with coefficients in Z/pZ. */
template< typename FilteredComplex>
@@ -66,33 +72,29 @@ int main(int argc, char * argv[]) {
int elapsed_sec;
{
- std::string filepoints = "../../../data/points/Kl.txt";
+ std::string off_file_points = "Kl.off";
Filtration_value threshold = 0.27;
int dim_max = 3;
int p = 2;
int q = 1223;
- // Extract the points from the file filepoints
- typedef std::vector<double> Point_t;
- std::vector< Point_t > points;
- read_points(filepoints, points);
+ // Extract the points from the file off_file_points
+ Points_off_reader off_reader(off_file_points);
// Compute the proximity graph of the points
start = std::chrono::system_clock::now();
- Graph_t prox_graph = compute_proximity_graph(points, threshold
- , euclidean_distance<Point_t>);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), threshold, Euclidean_distance());
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
std::cout << "Compute Rips graph in " << elapsed_sec << " ms.\n";
// Construct the Rips complex in a Simplex Tree
- Simplex_tree<Simplex_tree_options_fast_persistence> st;
+ Simplex_tree st;
start = std::chrono::system_clock::now();
// insert the proximity graph in the simplex tree
- st.insert_graph(prox_graph);
// expand the graph until dimension dim_max
- st.expansion(dim_max);
+ rips_complex_from_file.create_complex(st, dim_max);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
@@ -120,7 +122,7 @@ int main(int argc, char * argv[]) {
// Convert the simplex tree into a hasse diagram
start = std::chrono::system_clock::now();
- Hasse_complex<> hcpx(st);
+ Gudhi::Hasse_complex<> hcpx(st);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
std::cout << "Convert the simplex tree into a Hasse diagram in " << elapsed_sec << " ms.\n";
@@ -152,7 +154,7 @@ timing_persistence(FilteredComplex & cpx
int elapsed_sec;
{
start = std::chrono::system_clock::now();
- Persistent_cohomology< FilteredComplex, Field_Zp > pcoh(cpx);
+ Gudhi::persistent_cohomology::Persistent_cohomology< FilteredComplex, Field_Zp > pcoh(cpx);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
std::cout << " Initialize pcoh in " << elapsed_sec << " ms.\n";
@@ -186,7 +188,7 @@ timing_persistence(FilteredComplex & cpx
int elapsed_sec;
{
start = std::chrono::system_clock::now();
- Persistent_cohomology< FilteredComplex, Multi_field > pcoh(cpx);
+ Gudhi::persistent_cohomology::Persistent_cohomology< FilteredComplex, Multi_field > pcoh(cpx);
end = std::chrono::system_clock::now();
elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
std::cout << " Initialize pcoh in " << elapsed_sec << " ms.\n";
diff --git a/src/Persistent_cohomology/doc/COPYRIGHT b/src/Persistent_cohomology/doc/COPYRIGHT
new file mode 100644
index 00000000..34345bef
--- /dev/null
+++ b/src/Persistent_cohomology/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Clément Maria
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
index 433cfd3e..e17e5926 100644
--- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
+++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
@@ -144,17 +144,23 @@ namespace persistent_cohomology {
We provide several example files: run these examples with -h for details on their use, and read the README file.
\li <a href="_persistent_cohomology_2rips_persistence_8cpp-example.html">
-Persistent_cohomology/rips_persistence.cpp</a> computes the Rips complex of a point cloud and its persistence diagram.
+Persistent_cohomology/rips_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its persistence
+diagram.
+\code $> ./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3 \endcode
+\code The complex contains 177838 simplices
+ and has dimension 3
+3 0 0 inf
+3 1 0.0983494 inf
+3 1 0.104347 inf
+3 2 0.138335 inf \endcode
\li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
-Persistent_cohomology/rips_multifield_persistence.cpp</a> computes the Rips complex of a point cloud and its
+Persistent_cohomology/rips_multifield_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its
persistence diagram with a family of field coefficients.
-\li <a href="_persistent_cohomology_2performance_rips_persistence_8cpp-example.html">
-Persistent_cohomology/performance_rips_persistence.cpp</a> provides timings for the construction of the Rips complex
-on a set of points sampling a Klein bottle in \f$\mathbb{R}^5\f$ with a simplex tree, its conversion to a
-Hasse diagram and the computation of persistent homology and multi-field persistent homology for the
-different representations.
+\li <a href="_persistent_cohomology_2rips_distance_matrix_persistence_8cpp-example.html">
+Persistent_cohomology/rips_distance_matrix_persistence.cpp</a> computes the Rips complex of a distance matrix and
+outputs its persistence diagram.
\li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
Persistent_cohomology/alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
@@ -166,6 +172,28 @@ Persistent_cohomology/alpha_complex_3d_persistence.cpp</a> computes the persiste
2 1 0.0934117 1.00003
2 2 0.56444 1.03938 \endcode
+\li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
+Here, as CGAL computes the exact values, it is slower, but it is necessary when points are on a grid
+for instance.
+\code $> ./exact_alpha_complex_3d_persistence ../../data/points/sphere3D_pts_on_grid.off 2 0.1 \endcode
+\code Simplex_tree dim: 3
+2 0 0 inf
+2 2 0.0002 0.2028 \endcode
+
+\li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the weighted alpha complex on points sampling from an OFF file
+and a weights file.
+\code $> ./weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off
+../../data/points/tore3D_300.weights 2 0.45 \endcode
+\code Simplex_tree dim: 3
+2 -0 0 inf
+2 1 0.0682162 1.0001
+2 1 0.0934117 1.00003
+2 2 0.56444 1.03938 \endcode
+
\li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
Persistent_cohomology/alpha_complex_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/p\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index d97d1b63..a6b698c3 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -1,19 +1,21 @@
cmake_minimum_required(VERSION 2.6)
project(Persistent_cohomology_examples)
-# problem with Visual Studio link on Boost program_options
-add_definitions( -DBOOST_ALL_NO_LIB )
-add_definitions( -DBOOST_ALL_DYN_LINK )
-
add_executable(plain_homology plain_homology.cpp)
target_link_libraries(plain_homology ${Boost_SYSTEM_LIBRARY})
add_executable(persistence_from_simple_simplex_tree persistence_from_simple_simplex_tree.cpp)
target_link_libraries(persistence_from_simple_simplex_tree ${Boost_SYSTEM_LIBRARY})
+add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
+target_link_libraries(rips_distance_matrix_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
add_executable(rips_persistence rips_persistence.cpp)
target_link_libraries(rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+add_executable(rips_persistence_step_by_step rips_persistence_step_by_step.cpp)
+target_link_libraries(rips_persistence_step_by_step ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
add_executable(rips_persistence_via_boundary_matrix rips_persistence_via_boundary_matrix.cpp)
target_link_libraries(rips_persistence_via_boundary_matrix ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
@@ -23,62 +25,82 @@ target_link_libraries(persistence_from_file ${Boost_SYSTEM_LIBRARY} ${Boost_PROG
if (TBB_FOUND)
target_link_libraries(plain_homology ${TBB_LIBRARIES})
target_link_libraries(persistence_from_simple_simplex_tree ${TBB_LIBRARIES})
+ target_link_libraries(rips_distance_matrix_persistence ${TBB_LIBRARIES})
target_link_libraries(rips_persistence ${TBB_LIBRARIES})
+ target_link_libraries(rips_persistence_step_by_step ${TBB_LIBRARIES})
target_link_libraries(rips_persistence_via_boundary_matrix ${TBB_LIBRARIES})
target_link_libraries(persistence_from_file ${TBB_LIBRARIES})
endif()
add_test(plain_homology ${CMAKE_CURRENT_BINARY_DIR}/plain_homology)
add_test(persistence_from_simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_simple_simplex_tree 1 0)
-add_test(rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 3 -m 100)
-add_test(rips_persistence_via_boundary_matrix_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence_via_boundary_matrix ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.txt -r 0.3 -d 3 -p 3 -m 100)
-add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 2 -m 0)
-add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100)
+add_test(rips_distance_matrix ${CMAKE_CURRENT_BINARY_DIR}/rips_distance_matrix_persistence
+ ${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv -r 1.0 -d 3 -p 3 -m 0)
+add_test(rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3)
+add_test(rips_persistence_step_by_step_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence_step_by_step
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3)
+add_test(rips_persistence_via_boundary_matrix_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence_via_boundary_matrix
+ ${CMAKE_SOURCE_DIR}/data/points/Kl.off -r 0.16 -d 3 -p 3 -m 100)
+add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file
+ ${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/bunny_5000_complex.fsc -p 2 -m 0)
+add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file
+ ${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/bunny_5000_complex.fsc -p 3 -m 100)
if(GMP_FOUND)
if(GMPXX_FOUND)
add_executable(rips_multifield_persistence rips_multifield_persistence.cpp )
- target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
- add_executable ( performance_rips_persistence performance_rips_persistence.cpp )
- target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
+ target_link_libraries(rips_multifield_persistence
+ ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
if (TBB_FOUND)
target_link_libraries(rips_multifield_persistence ${TBB_LIBRARIES})
- target_link_libraries(performance_rips_persistence ${TBB_LIBRARIES})
endif(TBB_FOUND)
-
- add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 2 -q 71 -m 100)
+ add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2 -q 71)
endif(GMPXX_FOUND)
endif(GMP_FOUND)
if(CGAL_FOUND)
add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp)
target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ add_executable(exact_alpha_complex_3d_persistence exact_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(exact_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ add_executable(weighted_alpha_complex_3d_persistence weighted_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(exact_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${TBB_LIBRARIES})
endif(TBB_FOUND)
- add_test(alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off 2 0.45)
-
-
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (EIGEN3_FOUND)
- add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
- target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
- target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
-
- add_executable(custom_persistence_sort custom_persistence_sort.cpp)
- target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
-
- if (TBB_FOUND)
- target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
- target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
- target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES})
- endif(TBB_FOUND)
- add_test(alpha_complex_persistence_2_0_45 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -m 0.45 -p 2)
- add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/periodic_alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off ${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt 2 0)
- add_test(custom_persistence_sort ${CMAKE_CURRENT_BINARY_DIR}/custom_persistence_sort)
- endif(EIGEN3_FOUND)
- endif (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ add_test(alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_3d_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off 2 0.45)
+ add_test(exact_alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/exact_alpha_complex_3d_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off 2 0.45)
+ add_test(weighted_alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/weighted_alpha_complex_3d_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.weights 2 0.45)
+
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
+ target_link_libraries(alpha_complex_persistence
+ ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+
+ add_executable(custom_persistence_sort custom_persistence_sort.cpp)
+ target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+ add_test(alpha_complex_persistence_2_0_45 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -m 0.45 -p 2)
+ add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/periodic_alpha_complex_3d_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off ${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt 2 0)
+ add_test(custom_persistence_sort ${CMAKE_CURRENT_BINARY_DIR}/custom_persistence_sort)
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
endif(CGAL_FOUND)
diff --git a/src/Persistent_cohomology/example/README b/src/Persistent_cohomology/example/README
index 7803e5ab..2ac79398 100644
--- a/src/Persistent_cohomology/example/README
+++ b/src/Persistent_cohomology/example/README
@@ -10,13 +10,13 @@ Example of use of RIPS:
Computation of the persistent homology with Z/2Z coefficients of the Rips complex on points
sampling a Klein bottle:
-./rips_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -m 100
+./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2
output:
-210 0 0 inf
-210 1 0.0702103 inf
-2 1 0.0702103 inf
-2 2 0.159992 inf
+2 0 0 inf
+2 1 0.0983494 inf
+2 1 0.104347 inf
+2 2 0.138335 inf
Every line is of this format: p1*...*pr dim b d
@@ -29,31 +29,45 @@ where
with Z/3Z coefficients:
-./rips_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 3 -m 100
+./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3
output:
-3 0 0 inf
-3 1 0.0702103 inf
+3 0 0 inf
+3 1 0.0983494 inf
+3 1 0.104347 inf
+3 2 0.138335 inf
and the computation with Z/2Z and Z/3Z coefficients simultaneously:
-./rips_multifield_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 3 -m 100
+./rips_multifield_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.12 -d 3 -p 2 -q 3
output:
-6 0 0 inf
-6 1 0.0702103 inf
-2 1 0.0702103 inf
-2 2 0.159992 inf
+6 0 0 inf
+6 1 0.0983494 inf
+6 1 0.104347 inf
+6 2 0.138335 inf
+6 0 0 0.122545
+6 0 0 0.121171
+6 0 0 0.120964
+6 0 0 0.12057
+6 0 0 0.12047
+6 0 0 0.120414
and finally the computation with all Z/pZ for 2 <= p <= 71 (20 first prime numbers):
- ./rips_multifield_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 71 -m 100
+ ./rips_multifield_persistence ../../data/points/Kl.off -r 0.25 -m 0.5 -d 3 -p 2 -q 71
output:
-557940830126698960967415390 0 0 inf
-557940830126698960967415390 1 0.0702103 inf
-2 1 0.0702103 inf
-2 2 0.159992 inf
+557940830126698960967415390 0 0 inf
+557940830126698960967415390 1 0.0983494 inf
+557940830126698960967415390 1 0.104347 inf
+557940830126698960967415390 2 0.138335 inf
+557940830126698960967415390 0 0 0.122545
+557940830126698960967415390 0 0 0.121171
+557940830126698960967415390 0 0 0.120964
+557940830126698960967415390 0 0 0.12057
+557940830126698960967415390 0 0 0.12047
+557940830126698960967415390 0 0 0.120414
***********************************************************************************************************************
Example of use of ALPHA:
diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_helper.h b/src/Persistent_cohomology/example/alpha_complex_3d_helper.h
new file mode 100644
index 00000000..7865e4ec
--- /dev/null
+++ b/src/Persistent_cohomology/example/alpha_complex_3d_helper.h
@@ -0,0 +1,76 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2014 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef ALPHA_COMPLEX_3D_HELPER_H_
+#define ALPHA_COMPLEX_3D_HELPER_H_
+
+template<class Vertex_list, class Cell_handle>
+Vertex_list from_cell(const Cell_handle& ch) {
+ Vertex_list the_list;
+ for (auto i = 0; i < 4; i++) {
+#ifdef DEBUG_TRACES
+ std::cout << "from cell[" << i << "]=" << ch->vertex(i)->point() << std::endl;
+#endif // DEBUG_TRACES
+ the_list.push_back(ch->vertex(i));
+ }
+ return the_list;
+}
+
+template<class Vertex_list, class Facet>
+Vertex_list from_facet(const Facet& fct) {
+ Vertex_list the_list;
+ for (auto i = 0; i < 4; i++) {
+ if (fct.second != i) {
+#ifdef DEBUG_TRACES
+ std::cout << "from facet=[" << i << "]" << fct.first->vertex(i)->point() << std::endl;
+#endif // DEBUG_TRACES
+ the_list.push_back(fct.first->vertex(i));
+ }
+ }
+ return the_list;
+}
+
+template<class Vertex_list, class Edge_3>
+Vertex_list from_edge(const Edge_3& edg) {
+ Vertex_list the_list;
+ for (auto i = 0; i < 4; i++) {
+ if ((edg.second == i) || (edg.third == i)) {
+#ifdef DEBUG_TRACES
+ std::cout << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl;
+#endif // DEBUG_TRACES
+ the_list.push_back(edg.first->vertex(i));
+ }
+ }
+ return the_list;
+}
+
+template<class Vertex_list, class Vertex_handle>
+Vertex_list from_vertex(const Vertex_handle& vh) {
+ Vertex_list the_list;
+#ifdef DEBUG_TRACES
+ std::cout << "from vertex=" << vh->point() << std::endl;
+#endif // DEBUG_TRACES
+ the_list.push_back(vh);
+ return the_list;
+}
+
+#endif // ALPHA_COMPLEX_3D_HELPER_H_
diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp
index 20142ab2..fd227b82 100644
--- a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp
+++ b/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2014 INRIA Saclay (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,13 +20,14 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/variant.hpp>
+
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
-#include <boost/variant.hpp>
+#include <gudhi/Points_3D_off_io.h>
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
-#include <CGAL/Regular_triangulation_3.h>
-#include <CGAL/Regular_triangulation_euclidean_traits_3.h>
+#include <CGAL/Delaunay_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
#include <CGAL/iterator.h>
@@ -39,85 +40,42 @@
#include <list>
#include <vector>
+#include "alpha_complex_3d_helper.h"
+
// Alpha_shape_3 templates type definitions
-typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel;
-typedef CGAL::Regular_triangulation_euclidean_traits_3<Kernel> Gt;
-typedef CGAL::Alpha_shape_vertex_base_3<Gt> Vb;
-typedef CGAL::Alpha_shape_cell_base_3<Gt> Fb;
-typedef CGAL::Triangulation_data_structure_3<Vb, Fb> Tds;
-typedef CGAL::Regular_triangulation_3<Gt, Tds> Triangulation_3;
-typedef CGAL::Alpha_shape_3<Triangulation_3> Alpha_shape_3;
+using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel;
+using Vb = CGAL::Alpha_shape_vertex_base_3<Kernel>;
+using Fb = CGAL::Alpha_shape_cell_base_3<Kernel>;
+using Tds = CGAL::Triangulation_data_structure_3<Vb, Fb>;
+using Triangulation_3 = CGAL::Delaunay_triangulation_3<Kernel, Tds>;
+using Alpha_shape_3 = CGAL::Alpha_shape_3<Triangulation_3>;
// From file type definition
-typedef Kernel::Point_3 Point_3;
+using Point_3 = Kernel::Point_3;
// filtration with alpha values needed type definition
-typedef Alpha_shape_3::FT Alpha_value_type;
-typedef CGAL::Object Object;
-typedef CGAL::Dispatch_output_iterator<
-CGAL::cpp11::tuple<Object, Alpha_value_type>,
-CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
- std::back_insert_iterator< std::vector<Alpha_value_type> > > > Dispatch;
-typedef Alpha_shape_3::Cell_handle Cell_handle;
-typedef Alpha_shape_3::Facet Facet;
-typedef Alpha_shape_3::Edge Edge_3;
-typedef std::list<Alpha_shape_3::Vertex_handle> Vertex_list;
+using Alpha_value_type = Alpha_shape_3::FT;
+using Object = CGAL::Object;
+using Dispatch = CGAL::Dispatch_output_iterator<
+ CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
+ std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Cell_handle = Alpha_shape_3::Cell_handle;
+using Facet = Alpha_shape_3::Facet;
+using Edge_3 = Alpha_shape_3::Edge;
+using Vertex_handle = Alpha_shape_3::Vertex_handle;
+using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
// gudhi type definition
-typedef Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence> ST;
-typedef ST::Vertex_handle Simplex_tree_vertex;
-typedef std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex > Alpha_shape_simplex_tree_map;
-typedef std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex> Alpha_shape_simplex_tree_pair;
-typedef std::vector< Simplex_tree_vertex > Simplex_tree_vector_vertex;
-typedef Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp > PCOH;
-
-Vertex_list from(const Cell_handle& ch) {
- Vertex_list the_list;
- for (auto i = 0; i < 4; i++) {
-#ifdef DEBUG_TRACES
- std::cout << "from cell[" << i << "]=" << ch->vertex(i)->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(ch->vertex(i));
- }
- return the_list;
-}
-
-Vertex_list from(const Facet& fct) {
- Vertex_list the_list;
- for (auto i = 0; i < 4; i++) {
- if (fct.second != i) {
-#ifdef DEBUG_TRACES
- std::cout << "from facet=[" << i << "]" << fct.first->vertex(i)->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(fct.first->vertex(i));
- }
- }
- return the_list;
-}
-
-Vertex_list from(const Edge_3& edg) {
- Vertex_list the_list;
- for (auto i = 0; i < 4; i++) {
- if ((edg.second == i) || (edg.third == i)) {
-#ifdef DEBUG_TRACES
- std::cout << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(edg.first->vertex(i));
- }
- }
- return the_list;
-}
-
-Vertex_list from(const Alpha_shape_3::Vertex_handle& vh) {
- Vertex_list the_list;
-#ifdef DEBUG_TRACES
- std::cout << "from vertex=" << vh->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(vh);
- return the_list;
-}
-
-void usage(char * const progName) {
+using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = ST::Filtration_value;
+using Simplex_tree_vertex = ST::Vertex_handle;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
+using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
+using PCOH = Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp >;
+
+void usage(const std::string& progName) {
std::cerr << "Usage: " << progName <<
" path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
exit(-1);
@@ -133,17 +91,24 @@ int main(int argc, char * const argv[]) {
int coeff_field_characteristic = atoi(argv[2]);
Filtration_value min_persistence = 0.0;
- int returnedScanValue = sscanf(argv[3], "%lf", &min_persistence);
+ int returnedScanValue = sscanf(argv[3], "%f", &min_persistence);
if ((returnedScanValue == EOF) || (min_persistence < -1.0)) {
std::cerr << "Error: " << argv[3] << " is not correct\n";
usage(argv[0]);
}
- std::vector<Gt::Weighted_point> lp;
- lp.emplace_back(Point_3(0,0,0),0);
- lp.emplace_back(Point_3(0,0,1),0);
- lp.emplace_back(Point_3(0,1,0),.2);
- lp.emplace_back(Point_3(1,0,0),0);
+ // Read points from file
+ std::string offInputFile(argv[1]);
+ // Read the OFF file (input file name given as parameter) and triangulate points
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read file " << offInputFile << std::endl;
+ usage(argv[0]);
+ }
+
+ // Retrieve the triangulation
+ std::vector<Point_3> lp = off_reader.get_point_cloud();
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
@@ -178,30 +143,29 @@ int main(int argc, char * const argv[]) {
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
- vertex_list = from(*cell);
+ vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
if (dim_max < 3) {
// Cell is of dim 3
dim_max = 3;
}
} else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
- vertex_list = from(*facet);
+ vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
if (dim_max < 2) {
// Facet is of dim 2
dim_max = 2;
}
} else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
- vertex_list = from(*edge);
+ vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
if (dim_max < 1) {
// Edge_3 is of dim 1
dim_max = 1;
}
- } else if (const Alpha_shape_3::Vertex_handle * vertex =
- CGAL::object_cast<Alpha_shape_3::Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle * vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
- vertex_list = from(*vertex);
+ vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
// Construction of the vector of simplex_tree vertex from list of alpha_shapes vertex
Simplex_tree_vector_vertex the_simplex_tree;
diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
index cb181936..9e84e91f 100644
--- a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
+++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
@@ -4,11 +4,16 @@
#include <gudhi/Alpha_complex.h>
#include <gudhi/Persistent_cohomology.h>
+// to construct a simplex_tree from alpha complex
+#include <gudhi/Simplex_tree.h>
#include <iostream>
#include <string>
#include <limits> // for numeric_limits
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+
void program_options(int argc, char * argv[]
, std::string & off_file_points
, std::string & output_file_diag
@@ -30,35 +35,38 @@ int main(int argc, char **argv) {
// Init of an alpha complex from an OFF file
// ----------------------------------------------------------------------------
using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
- Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_points, alpha_square_max_value);
-
- // ----------------------------------------------------------------------------
- // Display information about the alpha complex
- // ----------------------------------------------------------------------------
- std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
- " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
- alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
-
- // Sort the simplices in the order of the filtration
- alpha_complex_from_file.initialize_filtration();
-
- std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl;
- // Compute the persistence diagram of the complex
- Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alpha_complex::Alpha_complex<Kernel>,
- Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file);
- // initializes the coefficient field for homology
- pcoh.init_coefficients(coeff_field_characteristic);
-
- pcoh.compute_persistent_cohomology(min_persistence);
-
- // Output the diagram in filediag
- if (output_file_diag.empty()) {
- pcoh.output_diagram();
- } else {
- std::cout << "Result in file: " << output_file_diag << std::endl;
- std::ofstream out(output_file_diag);
- pcoh.output_diagram(out);
- out.close();
+ Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_points);
+
+ Simplex_tree simplex;
+ if (alpha_complex_from_file.create_complex(simplex, alpha_square_max_value)) {
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Simplicial complex is of dimension " << simplex.dimension() <<
+ " - " << simplex.num_simplices() << " simplices - " <<
+ simplex.num_vertices() << " vertices." << std::endl;
+
+ // Sort the simplices in the order of the filtration
+ simplex.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << simplex.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ Gudhi::persistent_cohomology::Persistent_cohomology< Simplex_tree,
+ Gudhi::persistent_cohomology::Field_Zp > pcoh(simplex);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
}
return 0;
diff --git a/src/Persistent_cohomology/example/custom_persistence_sort.cpp b/src/Persistent_cohomology/example/custom_persistence_sort.cpp
index 9af38611..64f2a4dc 100644
--- a/src/Persistent_cohomology/example/custom_persistence_sort.cpp
+++ b/src/Persistent_cohomology/example/custom_persistence_sort.cpp
@@ -27,6 +27,8 @@
#include <gudhi/Alpha_complex.h>
#include <gudhi/Persistent_cohomology.h>
+// to construct a simplex_tree from alpha complex
+#include <gudhi/Simplex_tree.h>
#include <iostream>
#include <iterator>
@@ -38,6 +40,9 @@
using Kernel = CGAL::Epick_d< CGAL::Dimension_tag<3> >;
using Point = Kernel::Point_d;
using Alpha_complex = Gudhi::alpha_complex::Alpha_complex<Kernel>;
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology< Simplex_tree,
+ Gudhi::persistent_cohomology::Field_Zp >;
std::vector<Point> random_points() {
// Instanciate a random point generator
@@ -60,7 +65,7 @@ std::vector<Point> random_points() {
* Compare two intervals by dimension, then by length.
*/
struct cmp_intervals_by_dim_then_length {
- explicit cmp_intervals_by_dim_then_length(Alpha_complex * sc)
+ explicit cmp_intervals_by_dim_then_length(Simplex_tree * sc)
: sc_(sc) { }
template<typename Persistent_interval>
@@ -71,46 +76,62 @@ struct cmp_intervals_by_dim_then_length {
else
return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2)));
}
- Alpha_complex* sc_;
+ Simplex_tree* sc_;
};
int main(int argc, char **argv) {
std::vector<Point> points = random_points();
+ std::cout << "Points size=" << points.size() << std::endl;
// Alpha complex persistence computation from generated points
- Alpha_complex alpha_complex_from_points(points, 0.6);
-
- using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology< Alpha_complex,
- Gudhi::persistent_cohomology::Field_Zp >;
- Persistent_cohomology pcoh(alpha_complex_from_points);
-
- // initializes the coefficient field for homology - Z/3Z
- pcoh.init_coefficients(3);
- pcoh.compute_persistent_cohomology(0.2);
-
- // Custom sort and output persistence
- cmp_intervals_by_dim_then_length cmp(&alpha_complex_from_points);
- auto persistent_pairs = pcoh.get_persistent_pairs();
- std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp);
- for (auto pair : persistent_pairs) {
- std::cout << alpha_complex_from_points.dimension(get<0>(pair)) << " "
- << alpha_complex_from_points.filtration(get<0>(pair)) << " "
- << alpha_complex_from_points.filtration(get<1>(pair)) << std::endl;
+ Alpha_complex alpha_complex_from_points(points);
+ std::cout << "alpha_complex_from_points" << std::endl;
+
+ Simplex_tree simplex;
+ std::cout << "simplex" << std::endl;
+ if (alpha_complex_from_points.create_complex(simplex, 0.6)) {
+ std::cout << "simplex" << std::endl;
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Simplicial complex is of dimension " << simplex.dimension() <<
+ " - " << simplex.num_simplices() << " simplices - " <<
+ simplex.num_vertices() << " vertices." << std::endl;
+
+ // Sort the simplices in the order of the filtration
+ simplex.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << simplex.dimension() << std::endl;
+
+ Persistent_cohomology pcoh(simplex);
+
+ // initializes the coefficient field for homology - Z/3Z
+ pcoh.init_coefficients(3);
+ pcoh.compute_persistent_cohomology(0.2);
+
+ // Custom sort and output persistence
+ cmp_intervals_by_dim_then_length cmp(&simplex);
+ auto persistent_pairs = pcoh.get_persistent_pairs();
+ std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp);
+ for (auto pair : persistent_pairs) {
+ std::cout << simplex.dimension(get<0>(pair)) << " "
+ << simplex.filtration(get<0>(pair)) << " "
+ << simplex.filtration(get<1>(pair)) << std::endl;
+ }
+
+ // Persistent Betti numbers
+ std::cout << "The persistent Betti numbers in interval [0.40, 0.41] are : ";
+ for (int dim = 0; dim < simplex.dimension(); dim++)
+ std::cout << "b" << dim << " = " << pcoh.persistent_betti_number(dim, 0.40, 0.41) << " ; ";
+ std::cout << std::endl;
+
+ // Betti numbers
+ std::vector<int> betti_numbers = pcoh.betti_numbers();
+ std::cout << "The Betti numbers are : ";
+ for (std::size_t i = 0; i < betti_numbers.size(); i++)
+ std::cout << "b" << i << " = " << betti_numbers[i] << " ; ";
+ std::cout << std::endl;
}
-
- // Persistent Betti numbers
- std::cout << "The persistent Betti numbers in interval [0.40, 0.41] are : ";
- for (int dim = 0; dim < alpha_complex_from_points.dimension(); dim++)
- std::cout << "b" << dim << " = " << pcoh.persistent_betti_number(dim, 0.40, 0.41) << " ; ";
- std::cout << std::endl;
-
- // Betti numbers
- std::vector<int> betti_numbers = pcoh.betti_numbers();
- std::cout << "The Betti numbers are : ";
- for (std::size_t i = 0; i < betti_numbers.size(); i++)
- std::cout << "b" << i << " = " << betti_numbers[i] << " ; ";
- std::cout << std::endl;
-
return 0;
}
diff --git a/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp
new file mode 100644
index 00000000..8a335075
--- /dev/null
+++ b/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp
@@ -0,0 +1,245 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2014 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <boost/variant.hpp>
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_3D_off_io.h>
+
+#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
+#include <CGAL/Delaunay_triangulation_3.h>
+#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/iterator.h>
+
+#include <fstream>
+#include <cmath>
+#include <string>
+#include <tuple>
+#include <map>
+#include <utility>
+#include <list>
+#include <vector>
+
+#include "alpha_complex_3d_helper.h"
+
+// Alpha_shape_3 templates type definitions
+using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel;
+using Exact_tag = CGAL::Tag_true;
+using Vb = CGAL::Alpha_shape_vertex_base_3<Kernel, CGAL::Default, Exact_tag>;
+using Fb = CGAL::Alpha_shape_cell_base_3<Kernel, CGAL::Default, Exact_tag>;
+using Tds = CGAL::Triangulation_data_structure_3<Vb, Fb>;
+using Triangulation_3 = CGAL::Delaunay_triangulation_3<Kernel, Tds>;
+using Alpha_shape_3 = CGAL::Alpha_shape_3<Triangulation_3, Exact_tag>;
+
+// From file type definition
+using Point_3 = Kernel::Point_3;
+
+// filtration with alpha values needed type definition
+using Alpha_value_type = Alpha_shape_3::FT;
+using Object = CGAL::Object;
+using Dispatch = CGAL::Dispatch_output_iterator<
+ CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
+ std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Cell_handle = Alpha_shape_3::Cell_handle;
+using Facet = Alpha_shape_3::Facet;
+using Edge_3 = Alpha_shape_3::Edge;
+using Vertex_handle = Alpha_shape_3::Vertex_handle;
+using Vertex_list = std::list<Vertex_handle>;
+
+// gudhi type definition
+using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = ST::Filtration_value;
+using Simplex_tree_vertex = ST::Vertex_handle;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
+using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
+using PCOH = Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp >;
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName <<
+ " path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+ exit(-1);
+}
+
+int main(int argc, char * const argv[]) {
+ // program args management
+ if (argc != 4) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
+ usage(argv[0]);
+ }
+
+ int coeff_field_characteristic = atoi(argv[2]);
+
+ Filtration_value min_persistence = 0.0;
+ int returnedScanValue = sscanf(argv[3], "%f", &min_persistence);
+ if ((returnedScanValue == EOF) || (min_persistence < -1.0)) {
+ std::cerr << "Error: " << argv[3] << " is not correct\n";
+ usage(argv[0]);
+ }
+
+ // Read points from file
+ std::string offInputFile(argv[1]);
+ // Read the OFF file (input file name given as parameter) and triangulate points
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read file " << offInputFile << std::endl;
+ usage(argv[0]);
+ }
+
+ // Retrieve the triangulation
+ std::vector<Point_3> lp = off_reader.get_point_cloud();
+
+ // alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
+ Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
+#ifdef DEBUG_TRACES
+ std::cout << "Alpha shape computed in GENERAL mode" << std::endl;
+#endif // DEBUG_TRACES
+
+ // filtration with alpha values from alpha shape
+ std::vector<Object> the_objects;
+ std::vector<Alpha_value_type> the_alpha_values;
+
+ Dispatch disp = CGAL::dispatch_output<Object, Alpha_value_type>(std::back_inserter(the_objects),
+ std::back_inserter(the_alpha_values));
+
+ as.filtration_with_alpha_values(disp);
+#ifdef DEBUG_TRACES
+ std::cout << "filtration_with_alpha_values returns : " << the_objects.size() << " objects" << std::endl;
+#endif // DEBUG_TRACES
+
+ Alpha_shape_3::size_type count_vertices = 0;
+ Alpha_shape_3::size_type count_edges = 0;
+ Alpha_shape_3::size_type count_facets = 0;
+ Alpha_shape_3::size_type count_cells = 0;
+
+ // Loop on objects vector
+ Vertex_list vertex_list;
+ ST simplex_tree;
+ Alpha_shape_simplex_tree_map map_cgal_simplex_tree;
+ std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
+ int dim_max = 0;
+ Filtration_value filtration_max = 0.0;
+ for (auto object_iterator : the_objects) {
+ // Retrieve Alpha shape vertex list from object
+ if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
+ count_cells++;
+ if (dim_max < 3) {
+ // Cell is of dim 3
+ dim_max = 3;
+ }
+ } else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ vertex_list = from_facet<Vertex_list, Facet>(*facet);
+ count_facets++;
+ if (dim_max < 2) {
+ // Facet is of dim 2
+ dim_max = 2;
+ }
+ } else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
+ count_edges++;
+ if (dim_max < 1) {
+ // Edge_3 is of dim 1
+ dim_max = 1;
+ }
+ } else if (const Vertex_handle * vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ count_vertices++;
+ vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
+ }
+ // Construction of the vector of simplex_tree vertex from list of alpha_shapes vertex
+ Simplex_tree_vector_vertex the_simplex_tree;
+ for (auto the_alpha_shape_vertex : vertex_list) {
+ Alpha_shape_simplex_tree_map::iterator the_map_iterator = map_cgal_simplex_tree.find(the_alpha_shape_vertex);
+ if (the_map_iterator == map_cgal_simplex_tree.end()) {
+ // alpha shape not found
+ Simplex_tree_vertex vertex = map_cgal_simplex_tree.size();
+#ifdef DEBUG_TRACES
+ std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] not found - insert " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ the_simplex_tree.push_back(vertex);
+ map_cgal_simplex_tree.insert(Alpha_shape_simplex_tree_pair(the_alpha_shape_vertex, vertex));
+ } else {
+ // alpha shape found
+ Simplex_tree_vertex vertex = the_map_iterator->second;
+#ifdef DEBUG_TRACES
+ std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] found in " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ the_simplex_tree.push_back(vertex);
+ }
+ }
+ // Construction of the simplex_tree
+ // you can also use the_alpha_value_iterator->exact()
+ Filtration_value filtr = /*std::sqrt*/CGAL::to_double(the_alpha_value_iterator->exact());
+#ifdef DEBUG_TRACES
+ std::cout << "filtration = " << filtr << std::endl;
+#endif // DEBUG_TRACES
+ if (filtr > filtration_max) {
+ filtration_max = filtr;
+ }
+ simplex_tree.insert_simplex(the_simplex_tree, filtr);
+ if (the_alpha_value_iterator != the_alpha_values.end())
+ ++the_alpha_value_iterator;
+ else
+ std::cout << "This shall not happen" << std::endl;
+ }
+ simplex_tree.set_filtration(filtration_max);
+ simplex_tree.set_dimension(dim_max);
+
+#ifdef DEBUG_TRACES
+ std::cout << "vertices \t\t" << count_vertices << std::endl;
+ std::cout << "edges \t\t" << count_edges << std::endl;
+ std::cout << "facets \t\t" << count_facets << std::endl;
+ std::cout << "cells \t\t" << count_cells << std::endl;
+
+
+ std::cout << "Information of the Simplex Tree: " << std::endl;
+ std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
+ std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
+ std::cout << " Dimension = " << simplex_tree.dimension() << " ";
+ std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
+#endif // DEBUG_TRACES
+
+#ifdef DEBUG_TRACES
+ std::cout << "Iterator on vertices: " << std::endl;
+ for (auto vertex : simplex_tree.complex_vertex_range()) {
+ std::cout << vertex << " ";
+ }
+#endif // DEBUG_TRACES
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ PCOH pcoh(simplex_tree);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ pcoh.output_diagram();
+
+ return 0;
+}
diff --git a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp
index a199fea1..8928cfc2 100644
--- a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2014 INRIA Saclay (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,10 +20,11 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/variant.hpp>
+
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_3D_off_io.h>
-#include <boost/variant.hpp>
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Periodic_3_Delaunay_triangulation_traits_3.h>
@@ -39,6 +40,9 @@
#include <utility>
#include <list>
#include <vector>
+#include <cstdlib>
+
+#include "alpha_complex_3d_helper.h"
// Traits
using K = CGAL::Exact_predicates_inexact_constructions_kernel;
@@ -66,10 +70,12 @@ using Dispatch = CGAL::Dispatch_output_iterator<
using Cell_handle = Alpha_shape_3::Cell_handle;
using Facet = Alpha_shape_3::Facet;
using Edge_3 = Alpha_shape_3::Edge;
+using Vertex_handle = Alpha_shape_3::Vertex_handle;
using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
// gudhi type definition
using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = ST::Filtration_value;
using Simplex_tree_vertex = ST::Vertex_handle;
using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
@@ -77,52 +83,6 @@ using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<
ST, Gudhi::persistent_cohomology::Field_Zp >;
-Vertex_list from(const Cell_handle& ch) {
- Vertex_list the_list;
- for (auto i = 0; i < 4; i++) {
-#ifdef DEBUG_TRACES
- std::cout << "from cell[" << i << "]=" << ch->vertex(i)->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(ch->vertex(i));
- }
- return the_list;
-}
-
-Vertex_list from(const Facet& fct) {
- Vertex_list the_list;
- for (auto i = 0; i < 4; i++) {
- if (fct.second != i) {
-#ifdef DEBUG_TRACES
- std::cout << "from facet=[" << i << "]" << fct.first->vertex(i)->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(fct.first->vertex(i));
- }
- }
- return the_list;
-}
-
-Vertex_list from(const Edge_3& edg) {
- Vertex_list the_list;
- for (auto i = 0; i < 4; i++) {
- if ((edg.second == i) || (edg.third == i)) {
-#ifdef DEBUG_TRACES
- std::cout << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(edg.first->vertex(i));
- }
- }
- return the_list;
-}
-
-Vertex_list from(const Alpha_shape_3::Vertex_handle& vh) {
- Vertex_list the_list;
-#ifdef DEBUG_TRACES
- std::cout << "from vertex=" << vh->point() << std::endl;
-#endif // DEBUG_TRACES
- the_list.push_back(vh);
- return the_list;
-}
-
void usage(char * const progName) {
std::cerr << "Usage: " << progName <<
" path_to_file_graph path_to_iso_cuboid_3_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
@@ -136,19 +96,8 @@ int main(int argc, char * const argv[]) {
usage(argv[0]);
}
- int coeff_field_characteristic = 0;
- int returnedScanValue = sscanf(argv[3], "%d", &coeff_field_characteristic);
- if ((returnedScanValue == EOF) || (coeff_field_characteristic <= 0)) {
- std::cerr << "Error: " << argv[3] << " is not correct\n";
- usage(argv[0]);
- }
-
- Filtration_value min_persistence = 0.0;
- returnedScanValue = sscanf(argv[4], "%lf", &min_persistence);
- if ((returnedScanValue == EOF) || (min_persistence < -1.0)) {
- std::cerr << "Error: " << argv[4] << " is not correct\n";
- usage(argv[0]);
- }
+ int coeff_field_characteristic = atoi(argv[3]);
+ Filtration_value min_persistence = strtof(argv[4], nullptr);
// Read points from file
std::string offInputFile(argv[1]);
@@ -212,21 +161,21 @@ int main(int argc, char * const argv[]) {
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
- vertex_list = from(*cell);
+ vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
if (dim_max < 3) {
// Cell is of dim 3
dim_max = 3;
}
} else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
- vertex_list = from(*facet);
+ vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
if (dim_max < 2) {
// Facet is of dim 2
dim_max = 2;
}
} else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
- vertex_list = from(*edge);
+ vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
if (dim_max < 1) {
// Edge_3 is of dim 1
@@ -235,7 +184,7 @@ int main(int argc, char * const argv[]) {
} else if (const Alpha_shape_3::Vertex_handle * vertex =
CGAL::object_cast<Alpha_shape_3::Vertex_handle>(&object_iterator)) {
count_vertices++;
- vertex_list = from(*vertex);
+ vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
// Construction of the vector of simplex_tree vertex from list of alpha_shapes vertex
Simplex_tree_vector_vertex the_simplex_tree;
diff --git a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
index ba772f04..7ca9410a 100644
--- a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -29,13 +29,12 @@
#include <utility>
#include <vector>
-using namespace Gudhi;
-using namespace Gudhi::persistent_cohomology;
-
-typedef std::vector< Vertex_handle > typeVectorVertex;
-typedef std::pair<typeVectorVertex, Filtration_value> typeSimplex;
-typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool;
-typedef Simplex_tree<> typeST;
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using typeVectorVertex = std::vector< Simplex_tree::Vertex_handle >;
void usage(char * const progName) {
std::cerr << "Usage: " << progName << " coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
@@ -66,7 +65,7 @@ int main(int argc, char * const argv[]) {
// TEST OF INSERTION
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST OF INSERTION" << std::endl;
- typeST st;
+ Simplex_tree st;
// ++ FIRST
std::cout << " - INSERT (0,1,2)" << std::endl;
@@ -166,7 +165,7 @@ int main(int argc, char * const argv[]) {
std::cout << "**************************************************************" << std::endl;
// Compute the persistence diagram of the complex
- persistent_cohomology::Persistent_cohomology< Simplex_tree<>, Field_Zp > pcoh(st);
+ Persistent_cohomology pcoh(st);
// initializes the coefficient field for homology
pcoh.init_coefficients(coeff_field_characteristic);
diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp
index ae82c817..50f692f2 100644
--- a/src/Persistent_cohomology/example/plain_homology.cpp
+++ b/src/Persistent_cohomology/example/plain_homology.cpp
@@ -27,13 +27,11 @@
#include <vector>
#include <cstdint> // for std::uint8_t
-using namespace Gudhi;
-
/* We could perfectly well use the default Simplex_tree<> (which uses
* Simplex_tree_options_full_featured), the following simply demonstrates
* how to save on storage by not storing a filtration value. */
-struct MyOptions : Simplex_tree_options_full_featured {
+struct MyOptions : Gudhi::Simplex_tree_options_full_featured {
// Implicitly use 0 as filtration value for all simplices
static const bool store_filtration = false;
// The persistence algorithm needs this
@@ -43,7 +41,10 @@ struct MyOptions : Simplex_tree_options_full_featured {
// Maximum number of simplices to compute persistence is 2^8 - 1 = 255. One is reserved for null_key
typedef std::uint8_t Simplex_key;
};
-typedef Simplex_tree<MyOptions> ST;
+
+using ST = Gudhi::Simplex_tree<MyOptions>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<ST, Field_Zp>;
int main() {
ST st;
@@ -70,7 +71,7 @@ int main() {
st.initialize_filtration();
// Class for homology computation
- persistent_cohomology::Persistent_cohomology<ST, persistent_cohomology::Field_Zp> pcoh(st);
+ Persistent_cohomology pcoh(st);
// Initialize the coefficient field Z/2Z for homology
pcoh.init_coefficients(2);
diff --git a/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp b/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp
new file mode 100644
index 00000000..8517e7f6
--- /dev/null
+++ b/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp
@@ -0,0 +1,144 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Pawel Dlotko, Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/reader_utils.h>
+
+#include <boost/program_options.hpp>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using Distance_matrix = std::vector<std::vector<Filtration_value>>;
+
+void program_options(int argc, char * argv[]
+ , std::string & csv_matrix_file
+ , std::string & filediag
+ , Filtration_value & threshold
+ , int & dim_max
+ , int & p
+ , Filtration_value & min_persistence);
+
+int main(int argc, char * argv[]) {
+ std::string csv_matrix_file;
+ std::string filediag;
+ Filtration_value threshold;
+ int dim_max;
+ int p;
+ Filtration_value min_persistence;
+
+ program_options(argc, argv, csv_matrix_file, filediag, threshold, dim_max, p, min_persistence);
+
+ Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_matrix_file);
+ Rips_complex rips_complex_from_file(distances, threshold);
+
+ // Construct the Rips complex in a Simplex Tree
+ Simplex_tree simplex_tree;
+
+ rips_complex_from_file.create_complex(simplex_tree, dim_max);
+ std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(simplex_tree);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(p);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (filediag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::ofstream out(filediag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+ return 0;
+}
+
+void program_options(int argc, char * argv[]
+ , std::string & csv_matrix_file
+ , std::string & filediag
+ , Filtration_value & threshold
+ , int & dim_max
+ , int & p
+ , Filtration_value & min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&csv_matrix_file),
+ "Name of file containing a distance matrix. Can be square or lower triangular matrix. Separator is ';'.");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")
+ ("max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Rips complex construction.")
+ ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Rips complex we want to compute.")
+ ("field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")
+ ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a Rips complex defined on a set of distance matrix.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
index c5cd775d..7674b5a5 100644
--- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
+++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Clément Maria
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,26 +20,29 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include <gudhi/reader_utils.h>
-#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Rips_complex.h>
#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Persistent_cohomology/Multi_field.h>
+#include <gudhi/Points_off_io.h>
#include <boost/program_options.hpp>
#include <string>
#include <vector>
-using namespace Gudhi;
-using namespace Gudhi::persistent_cohomology;
-
-typedef int Vertex_handle;
-typedef double Filtration_value;
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Multi_field = Gudhi::persistent_cohomology::Multi_field;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Multi_field >;
+using Point = std::vector<double>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
void program_options(int argc, char * argv[]
- , std::string & filepoints
+ , std::string & off_file_points
, std::string & filediag
, Filtration_value & threshold
, int & dim_max
@@ -48,7 +51,7 @@ void program_options(int argc, char * argv[]
, Filtration_value & min_persistence);
int main(int argc, char * argv[]) {
- std::string filepoints;
+ std::string off_file_points;
std::string filediag;
Filtration_value threshold;
int dim_max;
@@ -56,33 +59,26 @@ int main(int argc, char * argv[]) {
int max_p;
Filtration_value min_persistence;
- program_options(argc, argv, filepoints, filediag, threshold, dim_max, min_p, max_p, min_persistence);
-
- // Extract the points from the file filepoints
- typedef std::vector<double> Point_t;
- std::vector< Point_t > points;
- read_points(filepoints, points);
+ program_options(argc, argv, off_file_points, filediag, threshold, dim_max, min_p, max_p, min_persistence);
- // Compute the proximity graph of the points
- Graph_t prox_graph = compute_proximity_graph(points, threshold
- , euclidean_distance<Point_t>);
+ Points_off_reader off_reader(off_file_points);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), threshold, Euclidean_distance());
// Construct the Rips complex in a Simplex Tree
- typedef Simplex_tree<Simplex_tree_options_fast_persistence> ST;
- ST st;
- // insert the proximity graph in the simplex tree
- st.insert_graph(prox_graph);
- // expand the graph until dimension dim_max
- st.expansion(dim_max);
+ Simplex_tree simplex_tree;
+
+ rips_complex_from_file.create_complex(simplex_tree, dim_max);
+ std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
// Sort the simplices in the order of the filtration
- st.initialize_filtration();
+ simplex_tree.initialize_filtration();
// Compute the persistence diagram of the complex
- Persistent_cohomology<ST, Multi_field > pcoh(st);
+ Persistent_cohomology pcoh(simplex_tree);
// initializes the coefficient field for homology
pcoh.init_coefficients(min_p, max_p);
- // compute persistent homology, disgarding persistent features of life shorter than min_persistence
+
pcoh.compute_persistent_cohomology(min_persistence);
// Output the diagram in filediag
@@ -98,7 +94,7 @@ int main(int argc, char * argv[]) {
}
void program_options(int argc, char * argv[]
- , std::string & filepoints
+ , std::string & off_file_points
, std::string & filediag
, Filtration_value & threshold
, int & dim_max
@@ -108,8 +104,8 @@ void program_options(int argc, char * argv[]
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
hidden.add_options()
- ("input-file", po::value<std::string>(&filepoints),
- "Name of file containing a point set. Format is one point per line: X1 ... Xd \n");
+ ("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
po::options_description visible("Allowed options");
visible.add_options()
diff --git a/src/Persistent_cohomology/example/rips_persistence.cpp b/src/Persistent_cohomology/example/rips_persistence.cpp
index cab49395..c6378de7 100644
--- a/src/Persistent_cohomology/example/rips_persistence.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Clément Maria
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,11 +20,11 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include <gudhi/reader_utils.h>
-#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Rips_complex.h>
#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_off_io.h>
#include <boost/program_options.hpp>
@@ -32,14 +32,17 @@
#include <vector>
#include <limits> // infinity
-using namespace Gudhi;
-using namespace Gudhi::persistent_cohomology;
-
-typedef int Vertex_handle;
-typedef double Filtration_value;
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using Point = std::vector<double>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
void program_options(int argc, char * argv[]
- , std::string & filepoints
+ , std::string & off_file_points
, std::string & filediag
, Filtration_value & threshold
, int & dim_max
@@ -47,40 +50,30 @@ void program_options(int argc, char * argv[]
, Filtration_value & min_persistence);
int main(int argc, char * argv[]) {
- std::string filepoints;
+ std::string off_file_points;
std::string filediag;
Filtration_value threshold;
int dim_max;
int p;
Filtration_value min_persistence;
- program_options(argc, argv, filepoints, filediag, threshold, dim_max, p, min_persistence);
-
- // Extract the points from the file filepoints
- typedef std::vector<double> Point_t;
- std::vector< Point_t > points;
- read_points(filepoints, points);
+ program_options(argc, argv, off_file_points, filediag, threshold, dim_max, p, min_persistence);
- // Compute the proximity graph of the points
- Graph_t prox_graph = compute_proximity_graph(points, threshold
- , euclidean_distance<Point_t>);
+ Points_off_reader off_reader(off_file_points);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), threshold, Euclidean_distance());
// Construct the Rips complex in a Simplex Tree
- typedef Simplex_tree<Simplex_tree_options_fast_persistence> ST;
- ST st;
- // insert the proximity graph in the simplex tree
- st.insert_graph(prox_graph);
- // expand the graph until dimension dim_max
- st.expansion(dim_max);
+ Simplex_tree simplex_tree;
- std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
- std::cout << " and has dimension " << st.dimension() << " \n";
+ rips_complex_from_file.create_complex(simplex_tree, dim_max);
+ std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
// Sort the simplices in the order of the filtration
- st.initialize_filtration();
+ simplex_tree.initialize_filtration();
// Compute the persistence diagram of the complex
- persistent_cohomology::Persistent_cohomology<ST, Field_Zp > pcoh(st);
+ Persistent_cohomology pcoh(simplex_tree);
// initializes the coefficient field for homology
pcoh.init_coefficients(p);
@@ -99,7 +92,7 @@ int main(int argc, char * argv[]) {
}
void program_options(int argc, char * argv[]
- , std::string & filepoints
+ , std::string & off_file_points
, std::string & filediag
, Filtration_value & threshold
, int & dim_max
@@ -108,15 +101,16 @@ void program_options(int argc, char * argv[]
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
hidden.add_options()
- ("input-file", po::value<std::string>(&filepoints),
- "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+ ("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
po::options_description visible("Allowed options", 100);
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
"Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ ("max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")
("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
"Maximal dimension of the Rips complex we want to compute.")
diff --git a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
new file mode 100644
index 00000000..b159c62e
--- /dev/null
+++ b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
@@ -0,0 +1,217 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clément Maria
+ *
+ * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_off_io.h>
+
+#include <boost/program_options.hpp>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+#include <utility> // for pair
+#include <map>
+
+// ----------------------------------------------------------------------------
+// rips_persistence_step_by_step is an example of each step that is required to
+// build a Rips over a Simplex_tree. Please refer to rips_persistence to see
+// how to do the same thing with the Rips_complex wrapper for less detailed
+// steps.
+// ----------------------------------------------------------------------------
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Vertex_handle = Simplex_tree::Vertex_handle;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Graph_t = boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
+, boost::property < vertex_filtration_t, Filtration_value >
+, boost::property < edge_filtration_t, Filtration_value >
+>;
+using Edge_t = std::pair< Vertex_handle, Vertex_handle >;
+
+template< typename InputPointRange, typename Distance >
+Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold, Distance distance);
+
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using Point = std::vector<double>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , std::string & filediag
+ , Filtration_value & threshold
+ , int & dim_max
+ , int & p
+ , Filtration_value & min_persistence);
+
+int main(int argc, char * argv[]) {
+ std::string off_file_points;
+ std::string filediag;
+ Filtration_value threshold;
+ int dim_max;
+ int p;
+ Filtration_value min_persistence;
+
+ program_options(argc, argv, off_file_points, filediag, threshold, dim_max, p, min_persistence);
+
+ // Extract the points from the file filepoints
+ Points_off_reader off_reader(off_file_points);
+
+ // Compute the proximity graph of the points
+ Graph_t prox_graph = compute_proximity_graph(off_reader.get_point_cloud(), threshold
+ , Euclidean_distance());
+
+ // Construct the Rips complex in a Simplex Tree
+ Simplex_tree st;
+ // insert the proximity graph in the simplex tree
+ st.insert_graph(prox_graph);
+ // expand the graph until dimension dim_max
+ st.expansion(dim_max);
+
+ std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << st.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ st.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(st);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(p);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (filediag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::ofstream out(filediag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+
+ return 0;
+}
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , std::string & filediag
+ , Filtration_value & threshold
+ , int & dim_max
+ , int & p
+ , Filtration_value & min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")
+ ("max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Rips complex construction.")
+ ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Rips complex we want to compute.")
+ ("field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")
+ ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a Rips complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
+
+/** Output the proximity graph of the points.
+ *
+ * If points contains n elements, the proximity graph is the graph
+ * with n vertices, and an edge [u,v] iff the distance function between
+ * points u and v is smaller than threshold.
+ *
+ * The type PointCloud furnishes .begin() and .end() methods, that return
+ * iterators with value_type Point.
+ */
+template< typename InputPointRange, typename Distance >
+Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold, Distance distance) {
+ std::vector< Edge_t > edges;
+ std::vector< Filtration_value > edges_fil;
+
+ Vertex_handle idx_u, idx_v;
+ Filtration_value fil;
+ idx_u = 0;
+ for (auto it_u = points.begin(); it_u != points.end(); ++it_u) {
+ idx_v = idx_u + 1;
+ for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) {
+ fil = distance(*it_u, *it_v);
+ if (fil <= threshold) {
+ edges.emplace_back(idx_u, idx_v);
+ edges_fil.push_back(fil);
+ }
+ }
+ ++idx_u;
+ }
+
+ Graph_t skel_graph(edges.begin()
+ , edges.end()
+ , edges_fil.begin()
+ , idx_u); // number of points labeled from 0 to idx_u-1
+
+ auto vertex_prop = boost::get(vertex_filtration_t(), skel_graph);
+
+ boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
+ for (std::tie(vi, vi_end) = boost::vertices(skel_graph);
+ vi != vi_end; ++vi) {
+ boost::put(vertex_prop, *vi, 0.);
+ }
+
+ return skel_graph;
+}
diff --git a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
index 4c6656f5..63da9847 100644
--- a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
@@ -4,8 +4,7 @@
*
* Author(s): Clément Maria, Marc Glisse
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France),
- * 2015 INRIA Saclay Île de France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -21,12 +20,12 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include <gudhi/reader_utils.h>
-#include <gudhi/graph_simplicial_complex.h>
-#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Rips_complex.h>
#include <gudhi/Hasse_complex.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/distance_functions.h>
#include <boost/program_options.hpp>
@@ -44,14 +43,16 @@
// //
////////////////////////////////////////////////////////////////
-using namespace Gudhi;
-using namespace Gudhi::persistent_cohomology;
-
-typedef int Vertex_handle;
-typedef double Filtration_value;
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Point = std::vector<double>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
void program_options(int argc, char * argv[]
- , std::string & filepoints
+ , std::string & off_file_points
, std::string & filediag
, Filtration_value & threshold
, int & dim_max
@@ -59,30 +60,21 @@ void program_options(int argc, char * argv[]
, Filtration_value & min_persistence);
int main(int argc, char * argv[]) {
- std::string filepoints;
+ std::string off_file_points;
std::string filediag;
Filtration_value threshold;
int dim_max;
int p;
Filtration_value min_persistence;
- program_options(argc, argv, filepoints, filediag, threshold, dim_max, p, min_persistence);
-
- // Extract the points from the file filepoints
- typedef std::vector<double> Point_t;
- std::vector< Point_t > points;
- read_points(filepoints, points);
+ program_options(argc, argv, off_file_points, filediag, threshold, dim_max, p, min_persistence);
- // Compute the proximity graph of the points
- Graph_t prox_graph = compute_proximity_graph(points, threshold
- , euclidean_distance<Point_t>);
+ Points_off_reader off_reader(off_file_points);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), threshold, Euclidean_distance());
// Construct the Rips complex in a Simplex Tree
- Simplex_tree<>& st = *new Simplex_tree<>;
- // insert the proximity graph in the simplex tree
- st.insert_graph(prox_graph);
- // expand the graph until dimension dim_max
- st.expansion(dim_max);
+ Simplex_tree& st = *new Simplex_tree;
+ rips_complex_from_file.create_complex(st, dim_max);
std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
std::cout << " and has dimension " << st.dimension() << " \n";
@@ -99,7 +91,7 @@ int main(int argc, char * argv[]) {
st.assign_key(sh, count++);
// Convert to a more convenient representation.
- Hasse_complex<> hcpx(st);
+ Gudhi::Hasse_complex<> hcpx(st);
#ifdef GUDHI_USE_TBB
ts.terminate();
@@ -109,7 +101,7 @@ int main(int argc, char * argv[]) {
delete &st;
// Compute the persistence diagram of the complex
- persistent_cohomology::Persistent_cohomology< Hasse_complex<>, Field_Zp > pcoh(hcpx);
+ Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::Hasse_complex<>, Field_Zp > pcoh(hcpx);
// initializes the coefficient field for homology
pcoh.init_coefficients(p);
@@ -126,7 +118,7 @@ int main(int argc, char * argv[]) {
}
void program_options(int argc, char * argv[]
- , std::string & filepoints
+ , std::string & off_file_points
, std::string & filediag
, Filtration_value & threshold
, int & dim_max
@@ -135,7 +127,7 @@ void program_options(int argc, char * argv[]
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
hidden.add_options()
- ("input-file", po::value<std::string>(&filepoints),
+ ("input-file", po::value<std::string>(&off_file_points),
"Name of file containing a point set. Format is one point per line: X1 ... Xd ");
po::options_description visible("Allowed options", 100);
diff --git a/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp
new file mode 100644
index 00000000..34b90933
--- /dev/null
+++ b/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp
@@ -0,0 +1,263 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2014 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <boost/variant.hpp>
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_3D_off_io.h>
+
+#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
+#include <CGAL/Regular_triangulation_euclidean_traits_3.h>
+#include <CGAL/Regular_triangulation_3.h>
+#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/iterator.h>
+
+#include <fstream>
+#include <cmath>
+#include <string>
+#include <tuple>
+#include <map>
+#include <utility>
+#include <list>
+#include <vector>
+#include <cstdlib>
+
+#include "alpha_complex_3d_helper.h"
+
+// Traits
+using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel;
+using Gt = CGAL::Regular_triangulation_euclidean_traits_3<Kernel>;
+using Vb = CGAL::Alpha_shape_vertex_base_3<Gt>;
+using Fb = CGAL::Alpha_shape_cell_base_3<Gt>;
+using Tds = CGAL::Triangulation_data_structure_3<Vb, Fb>;
+using Triangulation_3 = CGAL::Regular_triangulation_3<Gt, Tds>;
+using Alpha_shape_3 = CGAL::Alpha_shape_3<Triangulation_3>;
+
+// From file type definition
+using Point_3 = Gt::Bare_point;
+using Weighted_point_3 = Gt::Weighted_point;
+
+// filtration with alpha values needed type definition
+using Alpha_value_type = Alpha_shape_3::FT;
+using Object = CGAL::Object;
+using Dispatch = CGAL::Dispatch_output_iterator<
+ CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
+ std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Cell_handle = Alpha_shape_3::Cell_handle;
+using Facet = Alpha_shape_3::Facet;
+using Edge_3 = Alpha_shape_3::Edge;
+using Vertex_handle = Alpha_shape_3::Vertex_handle;
+using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
+
+// gudhi type definition
+using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = ST::Filtration_value;
+using Simplex_tree_vertex = ST::Vertex_handle;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
+using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<
+ ST, Gudhi::persistent_cohomology::Field_Zp >;
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName <<
+ " path_to_file_graph path_to_weight_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+ exit(-1);
+}
+
+int main(int argc, char * const argv[]) {
+ // program args management
+ if (argc != 5) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
+ usage(argv[0]);
+ }
+
+ int coeff_field_characteristic = atoi(argv[3]);
+ Filtration_value min_persistence = strtof(argv[4], nullptr);
+
+ // Read points from file
+ std::string offInputFile(argv[1]);
+ // Read the OFF file (input file name given as parameter) and triangulate points
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read file " << offInputFile << std::endl;
+ usage(argv[0]);
+ }
+
+ // Retrieve the triangulation
+ std::vector<Point_3> lp = off_reader.get_point_cloud();
+
+ // Read weights information from file
+ std::ifstream weights_ifstr(argv[2]);
+ std::vector<Weighted_point_3> wp;
+ if (weights_ifstr.good()) {
+ double weight = 0.0;
+ std::size_t index = 0;
+ // Attempt read the weight in a double format, return false if it fails
+ while ((weights_ifstr >> weight) && (index < lp.size())) {
+ wp.push_back(Weighted_point_3(lp[index], weight));
+ index++;
+ }
+ if (index != lp.size()) {
+ std::cerr << "Bad number of weights in file " << argv[2] << std::endl;
+ usage(argv[0]);
+ }
+ } else {
+ std::cerr << "Unable to read file " << argv[2] << std::endl;
+ usage(argv[0]);
+ }
+
+ // alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
+ Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
+#ifdef DEBUG_TRACES
+ std::cout << "Alpha shape computed in GENERAL mode" << std::endl;
+#endif // DEBUG_TRACES
+
+ // filtration with alpha values from alpha shape
+ std::vector<Object> the_objects;
+ std::vector<Alpha_value_type> the_alpha_values;
+
+ Dispatch disp = CGAL::dispatch_output<Object, Alpha_value_type>(std::back_inserter(the_objects),
+ std::back_inserter(the_alpha_values));
+
+ as.filtration_with_alpha_values(disp);
+#ifdef DEBUG_TRACES
+ std::cout << "filtration_with_alpha_values returns : " << the_objects.size() << " objects" << std::endl;
+#endif // DEBUG_TRACES
+
+ Alpha_shape_3::size_type count_vertices = 0;
+ Alpha_shape_3::size_type count_edges = 0;
+ Alpha_shape_3::size_type count_facets = 0;
+ Alpha_shape_3::size_type count_cells = 0;
+
+ // Loop on objects vector
+ Vertex_list vertex_list;
+ ST simplex_tree;
+ Alpha_shape_simplex_tree_map map_cgal_simplex_tree;
+ std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
+ int dim_max = 0;
+ Filtration_value filtration_max = 0.0;
+ for (auto object_iterator : the_objects) {
+ // Retrieve Alpha shape vertex list from object
+ if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
+ count_cells++;
+ if (dim_max < 3) {
+ // Cell is of dim 3
+ dim_max = 3;
+ }
+ } else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ vertex_list = from_facet<Vertex_list, Facet>(*facet);
+ count_facets++;
+ if (dim_max < 2) {
+ // Facet is of dim 2
+ dim_max = 2;
+ }
+ } else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
+ count_edges++;
+ if (dim_max < 1) {
+ // Edge_3 is of dim 1
+ dim_max = 1;
+ }
+ } else if (const Alpha_shape_3::Vertex_handle * vertex =
+ CGAL::object_cast<Alpha_shape_3::Vertex_handle>(&object_iterator)) {
+ count_vertices++;
+ vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
+ }
+ // Construction of the vector of simplex_tree vertex from list of alpha_shapes vertex
+ Simplex_tree_vector_vertex the_simplex_tree;
+ for (auto the_alpha_shape_vertex : vertex_list) {
+ Alpha_shape_simplex_tree_map::iterator the_map_iterator = map_cgal_simplex_tree.find(the_alpha_shape_vertex);
+ if (the_map_iterator == map_cgal_simplex_tree.end()) {
+ // alpha shape not found
+ Simplex_tree_vertex vertex = map_cgal_simplex_tree.size();
+#ifdef DEBUG_TRACES
+ std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] not found - insert " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ the_simplex_tree.push_back(vertex);
+ map_cgal_simplex_tree.insert(Alpha_shape_simplex_tree_pair(the_alpha_shape_vertex, vertex));
+ } else {
+ // alpha shape found
+ Simplex_tree_vertex vertex = the_map_iterator->second;
+#ifdef DEBUG_TRACES
+ std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] found in " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ the_simplex_tree.push_back(vertex);
+ }
+ }
+ // Construction of the simplex_tree
+ Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator);
+#ifdef DEBUG_TRACES
+ std::cout << "filtration = " << filtr << std::endl;
+#endif // DEBUG_TRACES
+ if (filtr > filtration_max) {
+ filtration_max = filtr;
+ }
+ simplex_tree.insert_simplex(the_simplex_tree, filtr);
+ if (the_alpha_value_iterator != the_alpha_values.end())
+ ++the_alpha_value_iterator;
+ else
+ std::cout << "This shall not happen" << std::endl;
+ }
+ simplex_tree.set_filtration(filtration_max);
+ simplex_tree.set_dimension(dim_max);
+
+#ifdef DEBUG_TRACES
+ std::cout << "vertices \t\t" << count_vertices << std::endl;
+ std::cout << "edges \t\t" << count_edges << std::endl;
+ std::cout << "facets \t\t" << count_facets << std::endl;
+ std::cout << "cells \t\t" << count_cells << std::endl;
+
+
+ std::cout << "Information of the Simplex Tree: " << std::endl;
+ std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
+ std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
+ std::cout << " Dimension = " << simplex_tree.dimension() << " ";
+ std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
+#endif // DEBUG_TRACES
+
+#ifdef DEBUG_TRACES
+ std::cout << "Iterator on vertices: " << std::endl;
+ for (auto vertex : simplex_tree.complex_vertex_range()) {
+ std::cout << vertex << " ";
+ }
+#endif // DEBUG_TRACES
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(simplex_tree, true);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ pcoh.output_diagram();
+
+ return 0;
+}
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
index 3568561a..6f194f57 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
@@ -110,7 +110,7 @@ class Persistent_cohomology {
cell_pool_() {
if (cpx_->num_simplices() > std::numeric_limits<Simplex_key>::max()) {
// num_simplices must be strictly lower than the limit, because a value is reserved for null_key.
- throw std::out_of_range ("The number of simplices is more than Simplex_key type numeric limit.");
+ throw std::out_of_range("The number of simplices is more than Simplex_key type numeric limit.");
}
Simplex_key idx_fil = 0;
for (auto sh : cpx_->filtration_simplex_range()) {
@@ -300,8 +300,7 @@ class Persistent_cohomology {
// with multiplicity. We used to sum the coefficients directly in
// annotations_in_boundary by using a map, we now do it later.
typedef std::pair<Column *, int> annotation_t;
- // Danger: not thread-safe!
- static std::vector<annotation_t> annotations_in_boundary;
+ thread_local std::vector<annotation_t> annotations_in_boundary;
annotations_in_boundary.clear();
int sign = 1 - 2 * (dim_sigma % 2); // \in {-1,1} provides the sign in the
// alternate sum in the boundary.
@@ -623,7 +622,7 @@ class Persistent_cohomology {
*/
std::vector<int> betti_numbers() const {
// Init Betti numbers vector with zeros until Simplicial complex dimension
- std::vector<int> betti_numbers(cpx_->dimension(), 0);
+ std::vector<int> betti_numbers(dim_max_, 0);
for (auto pair : persistent_pairs_) {
// Count never ended persistence intervals
@@ -662,8 +661,7 @@ class Persistent_cohomology {
*/
std::vector<int> persistent_betti_numbers(Filtration_value from, Filtration_value to) const {
// Init Betti numbers vector with zeros until Simplicial complex dimension
- std::vector<int> betti_numbers(cpx_->dimension(), 0);
-
+ std::vector<int> betti_numbers(dim_max_, 0);
for (auto pair : persistent_pairs_) {
// Count persistence intervals that covers the given interval
// null_simplex test : if the function is called with to=+infinity, we still get something useful. And it will
@@ -709,6 +707,22 @@ class Persistent_cohomology {
return persistent_pairs_;
}
+ /** @brief Returns persistence intervals for a given dimension.
+ * @param[in] dimension Dimension to get the birth and death pairs from.
+ * @return A vector of persistence intervals (birth and death) on a fixed dimension.
+ */
+ std::vector< std::pair< Filtration_value , Filtration_value > >
+ intervals_in_dimension(int dimension) {
+ std::vector< std::pair< Filtration_value , Filtration_value > > result;
+ // auto && pair, to avoid unnecessary copying
+ for (auto && pair : persistent_pairs_) {
+ if (cpx_->dimension(get<0>(pair)) == dimension) {
+ result.emplace_back(cpx_->filtration(get<0>(pair)), cpx_->filtration(get<1>(pair)));
+ }
+ }
+ return result;
+ }
+
private:
/*
* Structure representing a cocycle.
diff --git a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
index 40221005..0ed3fddf 100644
--- a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
+++ b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
@@ -84,6 +84,8 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
// 2 1 0 inf
// means that in Z/2Z-homology, the Betti numbers are b0=2 and b1=1.
+ std::cout << "BETTI NUMBERS" << std::endl;
+
BOOST_CHECK(pcoh.betti_number(0) == 2);
BOOST_CHECK(pcoh.betti_number(1) == 1);
BOOST_CHECK(pcoh.betti_number(2) == 0);
@@ -93,6 +95,8 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
BOOST_CHECK(bns[0] == 2);
BOOST_CHECK(bns[1] == 1);
BOOST_CHECK(bns[2] == 0);
+
+ std::cout << "GET PERSISTENT PAIRS" << std::endl;
// Custom sort and output persistence
cmp_intervals_by_dim_then_length<Mini_simplex_tree> cmp(&st);
@@ -115,6 +119,33 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
BOOST_CHECK(st.dimension(get<0>(persistent_pairs[2])) == 0);
BOOST_CHECK(st.filtration(get<0>(persistent_pairs[2])) == 0);
BOOST_CHECK(get<1>(persistent_pairs[2]) == st.null_simplex());
+
+ std::cout << "INTERVALS IN DIMENSION" << std::endl;
+
+ auto intervals_in_dimension_0 = pcoh.intervals_in_dimension(0);
+ std::cout << "intervals_in_dimension_0.size() = " << intervals_in_dimension_0.size() << std::endl;
+ for (std::size_t i = 0; i < intervals_in_dimension_0.size(); i++)
+ std::cout << "intervals_in_dimension_0[" << i << "] = [" << intervals_in_dimension_0[i].first << "," <<
+ intervals_in_dimension_0[i].second << "]" << std::endl;
+ BOOST_CHECK(intervals_in_dimension_0.size() == 2);
+ BOOST_CHECK(intervals_in_dimension_0[0].first == 0);
+ BOOST_CHECK(intervals_in_dimension_0[0].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
+ BOOST_CHECK(intervals_in_dimension_0[1].first == 0);
+ BOOST_CHECK(intervals_in_dimension_0[1].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
+
+
+ auto intervals_in_dimension_1 = pcoh.intervals_in_dimension(1);
+ std::cout << "intervals_in_dimension_1.size() = " << intervals_in_dimension_1.size() << std::endl;
+ for (std::size_t i = 0; i < intervals_in_dimension_1.size(); i++)
+ std::cout << "intervals_in_dimension_1[" << i << "] = [" << intervals_in_dimension_1[i].first << "," <<
+ intervals_in_dimension_1[i].second << "]" << std::endl;
+ BOOST_CHECK(intervals_in_dimension_1.size() == 1);
+ BOOST_CHECK(intervals_in_dimension_1[0].first == 0);
+ BOOST_CHECK(intervals_in_dimension_1[0].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
+
+ auto intervals_in_dimension_2 = pcoh.intervals_in_dimension(2);
+ std::cout << "intervals_in_dimension_2.size() = " << intervals_in_dimension_2.size() << std::endl;
+ BOOST_CHECK(intervals_in_dimension_2.size() == 0);
}
using Simplex_tree = Gudhi::Simplex_tree<>;
@@ -231,4 +262,30 @@ BOOST_AUTO_TEST_CASE( betti_numbers )
BOOST_CHECK(st.dimension(get<0>(persistent_pairs[2])) == 0);
BOOST_CHECK(st.filtration(get<0>(persistent_pairs[2])) == 1);
BOOST_CHECK(get<1>(persistent_pairs[2]) == st.null_simplex());
+
+ std::cout << "INTERVALS IN DIMENSION" << std::endl;
+
+ auto intervals_in_dimension_0 = pcoh.intervals_in_dimension(0);
+ std::cout << "intervals_in_dimension_0.size() = " << intervals_in_dimension_0.size() << std::endl;
+ for (std::size_t i = 0; i < intervals_in_dimension_0.size(); i++)
+ std::cout << "intervals_in_dimension_0[" << i << "] = [" << intervals_in_dimension_0[i].first << "," <<
+ intervals_in_dimension_0[i].second << "]" << std::endl;
+ BOOST_CHECK(intervals_in_dimension_0.size() == 2);
+ BOOST_CHECK(intervals_in_dimension_0[0].first == 2);
+ BOOST_CHECK(intervals_in_dimension_0[0].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
+ BOOST_CHECK(intervals_in_dimension_0[1].first == 1);
+ BOOST_CHECK(intervals_in_dimension_0[1].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
+
+ auto intervals_in_dimension_1 = pcoh.intervals_in_dimension(1);
+ std::cout << "intervals_in_dimension_1.size() = " << intervals_in_dimension_1.size() << std::endl;
+ for (std::size_t i = 0; i < intervals_in_dimension_1.size(); i++)
+ std::cout << "intervals_in_dimension_1[" << i << "] = [" << intervals_in_dimension_1[i].first << "," <<
+ intervals_in_dimension_1[i].second << "]" << std::endl;
+ BOOST_CHECK(intervals_in_dimension_1.size() == 1);
+ BOOST_CHECK(intervals_in_dimension_1[0].first == 4);
+ BOOST_CHECK(intervals_in_dimension_1[0].second == std::numeric_limits<Mini_simplex_tree::Filtration_value>::infinity());
+
+ auto intervals_in_dimension_2 = pcoh.intervals_in_dimension(2);
+ std::cout << "intervals_in_dimension_2.size() = " << intervals_in_dimension_2.size() << std::endl;
+ BOOST_CHECK(intervals_in_dimension_2.size() == 0);
}
diff --git a/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp b/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
index 703682e1..1a6e3296 100644
--- a/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
+++ b/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
@@ -21,7 +21,7 @@ using namespace boost::unit_test;
typedef Simplex_tree<> typeST;
-std::string test_rips_persistence(int min_coefficient, int max_coefficient, int min_persistence) {
+std::string test_rips_persistence(int min_coefficient, int max_coefficient, double min_persistence) {
// file name is given as parameter from CMakeLists.txt
const std::string inputFile(framework::master_test_suite().argv[1]);
@@ -74,7 +74,7 @@ void test_rips_persistence_in_dimension(int min_dimension, int max_dimension) {
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST OF RIPS_PERSISTENT_COHOMOLOGY_MULTI_FIELD MIN_DIM=" << min_dimension << " MAX_DIM=" << max_dimension << " MIN_PERS=0" << std::endl;
- std::string str_rips_persistence = test_rips_persistence(min_dimension, max_dimension, static_cast<Filtration_value> (0.0));
+ std::string str_rips_persistence = test_rips_persistence(min_dimension, max_dimension, 0.0);
std::cout << "str_rips_persistence=" << str_rips_persistence << std::endl;
BOOST_CHECK(str_rips_persistence.find(value0) != std::string::npos); // Check found
diff --git a/src/Rips_complex/concept/SimplicialComplexForRips.h b/src/Rips_complex/concept/SimplicialComplexForRips.h
new file mode 100644
index 00000000..7dab0615
--- /dev/null
+++ b/src/Rips_complex/concept/SimplicialComplexForRips.h
@@ -0,0 +1,54 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CONCEPT_RIPS_COMPLEX_SIMPLICIAL_COMPLEX_FOR_RIPS_H_
+#define CONCEPT_RIPS_COMPLEX_SIMPLICIAL_COMPLEX_FOR_RIPS_H_
+
+namespace Gudhi {
+
+namespace rips_complex {
+
+/** \brief The concept SimplicialComplexForRips describes the requirements for a type to implement a simplicial
+ * complex, that can be created from a `Rips_complex`. The only available model for the moment is the `Simplex_tree`.
+ */
+struct SimplicialComplexForRips {
+ /** \brief Type used to store the filtration values of the simplicial complex. */
+ typedef unspecified Filtration_value;
+
+ /** \brief Inserts a given `Gudhi::rips_complex::Rips_complex::OneSkeletonGraph` in the simplicial complex. */
+ template<class OneSkeletonGraph>
+ void insert_graph(const OneSkeletonGraph& skel_graph);
+
+ /** \brief Expands the simplicial complex containing only its one skeleton until a given maximal dimension as
+ * explained in \ref ripsdefinition. */
+ void expansion(int max_dim);
+
+ /** \brief Returns the number of vertices in the simplicial complex. */
+ std::size_t num_vertices();
+
+};
+
+} // namespace rips_complex
+
+} // namespace Gudhi
+
+#endif // CONCEPT_RIPS_COMPLEX_SIMPLICIAL_COMPLEX_FOR_RIPS_H_
diff --git a/src/Rips_complex/doc/COPYRIGHT b/src/Rips_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..594b7d03
--- /dev/null
+++ b/src/Rips_complex/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Clément Maria, Pawel Dlotko, Vincent Rouvreau
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Rips_complex/doc/Intro_rips_complex.h b/src/Rips_complex/doc/Intro_rips_complex.h
new file mode 100644
index 00000000..8e374c09
--- /dev/null
+++ b/src/Rips_complex/doc/Intro_rips_complex.h
@@ -0,0 +1,158 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clément Maria, Pawel Dlotko, Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_RIPS_COMPLEX_INTRO_RIPS_COMPLEX_H_
+#define DOC_RIPS_COMPLEX_INTRO_RIPS_COMPLEX_H_
+
+namespace Gudhi {
+
+namespace rips_complex {
+
+/** \defgroup rips_complex Rips complex
+ *
+ * \author Clément Maria, Pawel Dlotko, Vincent Rouvreau
+ *
+ * @{
+ *
+ * \section ripsdefinition Rips complex definition
+ *
+ * Rips_complex
+ * <a target="_blank" href="https://en.wikipedia.org/wiki/Vietoris%E2%80%93Rips_complex">(Wikipedia)</a> is a
+ * one skeleton graph that allows to construct a
+ * <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
+ * from it.
+ * The input can be a point cloud with a given distance function, or a distance matrix.
+ *
+ * The filtration value of each edge is computed from a user-given distance function, or directly from the distance
+ * matrix.
+ *
+ * All edges that have a filtration value strictly greater than a given threshold value are not inserted into
+ * the complex.
+ *
+ * When creating a simplicial complex from this one skeleton graph, Rips inserts the one skeleton graph into the data
+ * structure, and then expands the simplicial complex when required.
+ *
+ * Vertex name correspond to the index of the point in the given range (aka. the point cloud).
+ *
+ * \image html "rips_complex_representation.png" "Rips-complex one skeleton graph representation"
+ *
+ * On this example, as edges (4,5), (4,6) and (5,6) are in the complex, simplex (4,5,6) is added with the filtration
+ * value set with \f$max(filtration(4,5), filtration(4,6), filtration(5,6))\f$.
+ * And so on for simplex (0,1,2,3).
+ *
+ * If the Rips_complex interfaces are not detailed enough for your need, please refer to
+ * <a href="_persistent_cohomology_2rips_persistence_step_by_step_8cpp-example.html">
+ * rips_persistence_step_by_step.cpp</a> example, where the graph construction over the Simplex_tree is more detailed.
+ *
+ * \section ripspointsdistance Point cloud and distance function
+ *
+ * \subsection ripspointscloudexample Example from a point cloud and a distance function
+ *
+ * This example builds the one skeleton graph from the given points, threshold value, and distance function.
+ * Then it creates a `Simplex_tree` with it.
+ *
+ * Then, it is asked to display information about the simplicial complex.
+ *
+ * \include Rips_complex/example_one_skeleton_rips_from_points.cpp
+ *
+ * When launching (Rips maximal distance between 2 points is 12.0, is expanded until dimension 1 - one skeleton graph
+ * in other words):
+ *
+ * \code $> ./oneskeletonripspoints
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Rips_complex/one_skeleton_rips_for_doc.txt
+ *
+ * \subsection ripsoffexample Example from OFF file
+ *
+ * This example builds the Rips_complex from the given points in an OFF file, threshold value, and distance
+ * function.
+ * Then it creates a `Simplex_tree` with it.
+ *
+ *
+ * Then, it is asked to display information about the Rips complex.
+ *
+ * \include Rips_complex/example_rips_complex_from_off_file.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./ripsoffreader ../../data/points/alphacomplexdoc.off 12.0 3
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Rips_complex/full_skeleton_rips_for_doc.txt
+ *
+ *
+ *
+ * \section ripsdistancematrix Distance matrix
+ *
+ * \subsection ripsdistancematrixexample Example from a distance matrix
+ *
+ * This example builds the one skeleton graph from the given distance matrix and threshold value.
+ * Then it creates a `Simplex_tree` with it.
+ *
+ * Then, it is asked to display information about the simplicial complex.
+ *
+ * \include Rips_complex/example_one_skeleton_rips_from_distance_matrix.cpp
+ *
+ * When launching (Rips maximal distance between 2 points is 1.0, is expanded until dimension 1 - one skeleton graph
+ * with other words):
+ *
+ * \code $> ./oneskeletonripsdistance
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Rips_complex/one_skeleton_rips_for_doc.txt
+ *
+ * \subsection ripscsvdistanceexample Example from a distance matrix read in a csv file
+ *
+ * This example builds the one skeleton graph from the given distance matrix read in a csv file and threshold value.
+ * Then it creates a `Simplex_tree` with it.
+ *
+ *
+ * Then, it is asked to display information about the Rips complex.
+ *
+ * \include Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./ripscsvdistancereader ../../data/distance_matrix/full_square_distance_matrix.csv 1.0 3
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Rips_complex/full_skeleton_rips_for_doc.txt
+ *
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup rips_complex
+
+} // namespace rips_complex
+
+} // namespace Gudhi
+
+#endif // DOC_RIPS_COMPLEX_INTRO_RIPS_COMPLEX_H_
diff --git a/src/Rips_complex/doc/rips_complex_representation.ipe b/src/Rips_complex/doc/rips_complex_representation.ipe
new file mode 100644
index 00000000..7f6028f4
--- /dev/null
+++ b/src/Rips_complex/doc/rips_complex_representation.ipe
@@ -0,0 +1,326 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20160928121844"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<path layer="alpha" matrix="1 0 0 1 0 -8" fill="darkblue">
+109.771 601.912 m
+159.595 601.797 l
+140.058 541.915 l
+h
+</path>
+<path matrix="1 0 0 1 0 -8" fill="darkblue">
+79.8776 552.169 m
+109.756 601.699 l
+139.812 542.209 l
+h
+</path>
+<path matrix="1 0 0 1 0 -8" fill="lightblue">
+69.8453 682.419 m
+159.925 712.208 l
+90.12 732.039 l
+h
+</path>
+<text matrix="1 0 0 1 -230.178 14.1775" transformations="translations" pos="380 530" stroke="seagreen" type="label" width="68.836" height="8.307" depth="2.32" valign="baseline" size="large">Rips complex</text>
+<text matrix="1 0 0 1 -212.333 10.6762" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -209.478 4.0238" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+60 710 m
+40 660 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+40 660 m
+130 690 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+130 690 m
+60 710 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+40 660 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+80 580 m
+130 580 l
+130 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+130 580 m
+110 520 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+110 520 m
+50 530 l
+50 530 l
+50 530 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+50 530 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+130 580 m
+130 690 l
+</path>
+<use matrix="1 0 0 1 -209.478 4.0238" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 -40 -16" stroke="black" pen="heavier">
+150.038 609.9 m
+179.929 549.727 l
+</path>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<path stroke="black" pen="heavier">
+158.7 593.269 m
+81.4925 544.805 l
+</path>
+<path matrix="1 0 0 1 -17.9662 -17.9662" stroke="gray">
+256.324 639.958 m
+370.055 639.958 l
+</path>
+<path matrix="1 0 0 1 -17.9662 -17.9662" stroke="gray">
+56.8567 0 0 56.8567 313.217 639.756 e
+</path>
+<use matrix="1 0 0 1 52.1387 -98.0941" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="gray" fill="white"/>
+<use matrix="1 0 0 1 -61.4926 -98.0942" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="gray" fill="white"/>
+<text matrix="1 0 0 1 -26.6167 -33.2708" transformations="translations" pos="295.735 657.944" stroke="gray" type="label" width="63.374" height="6.926" depth="1.93" valign="baseline">Rips threshold</text>
+</page>
+</ipe>
diff --git a/src/Rips_complex/doc/rips_complex_representation.png b/src/Rips_complex/doc/rips_complex_representation.png
new file mode 100644
index 00000000..e901d92e
--- /dev/null
+++ b/src/Rips_complex/doc/rips_complex_representation.png
Binary files differ
diff --git a/src/Rips_complex/doc/rips_one_skeleton.ipe b/src/Rips_complex/doc/rips_one_skeleton.ipe
new file mode 100644
index 00000000..3a35970c
--- /dev/null
+++ b/src/Rips_complex/doc/rips_one_skeleton.ipe
@@ -0,0 +1,326 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20160928130224"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<path layer="alpha" matrix="1 0 0 1 0 -8" stroke="0">
+109.771 601.912 m
+159.595 601.797 l
+140.058 541.915 l
+h
+</path>
+<path matrix="1 0 0 1 0 -8" stroke="0">
+79.8776 552.169 m
+109.756 601.699 l
+139.812 542.209 l
+h
+</path>
+<path matrix="1 0 0 1 0.665417 -8.66542" stroke="0">
+69.8453 682.419 m
+159.925 712.208 l
+90.12 732.039 l
+h
+</path>
+<text matrix="1 0 0 1 -230.178 14.1775" transformations="translations" pos="380 530" stroke="seagreen" type="label" width="98.916" height="8.307" depth="2.32" valign="baseline" size="large">One skeleton graph</text>
+<text matrix="1 0 0 1 -212.333 10.6762" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -209.478 4.0238" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -210.178 14.1775" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 30.6497 14.0396" stroke="black" pen="heavier">
+60 710 m
+40 660 l
+</path>
+<path matrix="1 0 0 1 30.3739 13.9018" stroke="black" pen="heavier">
+40 660 m
+130 690 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+130 690 m
+60 710 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+40 660 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+80 580 m
+130 580 l
+130 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+130 580 m
+110 520 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+110 520 m
+50 530 l
+50 530 l
+50 530 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+50 530 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 14.1775" stroke="black" pen="heavier">
+130 580 m
+130 690 l
+</path>
+<use matrix="1 0 0 1 -209.478 4.0238" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 -40 -16" stroke="black" pen="heavier">
+150.038 609.9 m
+179.929 549.727 l
+</path>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 14.1775" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<path stroke="black" pen="heavier">
+158.7 593.269 m
+81.4925 544.805 l
+</path>
+<path matrix="1 0 0 1 -17.9662 -17.9662" stroke="gray">
+256.324 639.958 m
+370.055 639.958 l
+</path>
+<path matrix="1 0 0 1 -17.9662 -17.9662" stroke="gray">
+56.8567 0 0 56.8567 313.217 639.756 e
+</path>
+<use matrix="1 0 0 1 52.1387 -98.0941" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="gray" fill="white"/>
+<use matrix="1 0 0 1 -61.4926 -98.0942" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="gray" fill="white"/>
+<text matrix="1 0 0 1 -26.6167 -33.2708" transformations="translations" pos="295.735 657.944" stroke="gray" type="label" width="63.374" height="6.926" depth="1.93" valign="baseline">Rips threshold</text>
+</page>
+</ipe>
diff --git a/src/Rips_complex/doc/rips_one_skeleton.png b/src/Rips_complex/doc/rips_one_skeleton.png
new file mode 100644
index 00000000..1028770e
--- /dev/null
+++ b/src/Rips_complex/doc/rips_one_skeleton.png
Binary files differ
diff --git a/src/Rips_complex/example/CMakeLists.txt b/src/Rips_complex/example/CMakeLists.txt
new file mode 100644
index 00000000..070ac710
--- /dev/null
+++ b/src/Rips_complex/example/CMakeLists.txt
@@ -0,0 +1,47 @@
+cmake_minimum_required(VERSION 2.6)
+project(Rips_complex_examples)
+
+# Point cloud
+add_executable ( ripsoffreader example_rips_complex_from_off_file.cpp )
+target_link_libraries(ripsoffreader ${Boost_SYSTEM_LIBRARY})
+
+add_executable ( oneskeletonripspoints example_one_skeleton_rips_from_points.cpp )
+target_link_libraries(oneskeletonripspoints ${Boost_SYSTEM_LIBRARY})
+
+# Distance matrix
+add_executable ( oneskeletonripsdistance example_one_skeleton_rips_from_distance_matrix.cpp )
+target_link_libraries(oneskeletonripsdistance ${Boost_SYSTEM_LIBRARY})
+
+add_executable ( ripscsvdistancereader example_rips_complex_from_csv_distance_matrix_file.cpp )
+target_link_libraries(ripscsvdistancereader ${Boost_SYSTEM_LIBRARY})
+
+if (TBB_FOUND)
+ target_link_libraries(ripsoffreader ${TBB_LIBRARIES})
+ target_link_libraries(oneskeletonripspoints ${TBB_LIBRARIES})
+ target_link_libraries(oneskeletonripsdistance ${TBB_LIBRARIES})
+ target_link_libraries(ripscsvdistancereader ${TBB_LIBRARIES})
+endif()
+
+add_test(oneskeletonripspoints ${CMAKE_CURRENT_BINARY_DIR}/oneskeletonripspoints)
+add_test(oneskeletonripsdistance ${CMAKE_CURRENT_BINARY_DIR}/oneskeletonripsdistance)
+
+# Do not forget to copy test files in current binary dir
+file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+add_test(ripsoffreader_doc_12_1 ${CMAKE_CURRENT_BINARY_DIR}/ripsoffreader alphacomplexdoc.off 12.0 1 ${CMAKE_CURRENT_BINARY_DIR}/ripsoffreader_result_12_1.txt)
+add_test(ripsoffreader_doc_12_3 ${CMAKE_CURRENT_BINARY_DIR}/ripsoffreader alphacomplexdoc.off 12.0 3 ${CMAKE_CURRENT_BINARY_DIR}/ripsoffreader_result_12_3.txt)
+
+file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+add_test(ripscsvdistancereader_doc_12_1 ${CMAKE_CURRENT_BINARY_DIR}/ripscsvdistancereader full_square_distance_matrix.csv 12.0 1 ${CMAKE_CURRENT_BINARY_DIR}/ripscsvreader_result_12_1.txt)
+add_test(ripscsvdistancereader_doc_12_3 ${CMAKE_CURRENT_BINARY_DIR}/ripscsvdistancereader full_square_distance_matrix.csv 12.0 3 ${CMAKE_CURRENT_BINARY_DIR}/ripscsvreader_result_12_3.txt)
+
+
+if (DIFF_PATH)
+ # Do not forget to copy test results files in current binary dir
+ file(COPY "one_skeleton_rips_for_doc.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY "full_skeleton_rips_for_doc.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+ add_test(ripsoffreader_doc_12_1_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/ripsoffreader_result_12_1.txt ${CMAKE_CURRENT_BINARY_DIR}/one_skeleton_rips_for_doc.txt)
+ add_test(ripsoffreader_doc_12_3_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/ripsoffreader_result_12_3.txt ${CMAKE_CURRENT_BINARY_DIR}/full_skeleton_rips_for_doc.txt)
+ add_test(ripscsvreader_doc_12_1_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/ripscsvreader_result_12_1.txt ${CMAKE_CURRENT_BINARY_DIR}/one_skeleton_rips_for_doc.txt)
+ add_test(ripscsvreader_doc_12_3_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/ripscsvreader_result_12_3.txt ${CMAKE_CURRENT_BINARY_DIR}/full_skeleton_rips_for_doc.txt)
+endif()
diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp
new file mode 100644
index 00000000..bbc3c755
--- /dev/null
+++ b/src/Rips_complex/example/example_one_skeleton_rips_from_distance_matrix.cpp
@@ -0,0 +1,58 @@
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+#include <limits> // for std::numeric_limits
+
+int main() {
+ // Type definitions
+ using Simplex_tree = Gudhi::Simplex_tree<>;
+ using Filtration_value = Simplex_tree::Filtration_value;
+ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+ using Distance_matrix = std::vector<std::vector<Filtration_value>>;
+
+ // User defined distance matrix is:
+ // | 0 0.94 0.77 0.99 0.11 |
+ // | 0.94 0 0.26 0.99 0.39 |
+ // | 0.77 0.26 0 0.28 0.97 |
+ // | 0.99 0.99 0.28 0 0.30 |
+ // | 0.11 0.39 0.97 0.30 0 |
+
+ Distance_matrix distances;
+ distances.push_back({});
+ distances.push_back({0.94});
+ distances.push_back({0.77, 0.26});
+ distances.push_back({0.99, 0.99, 0.28});
+ distances.push_back({0.11, 0.39, 0.97, 0.30});
+
+ // ----------------------------------------------------------------------------
+ // Init of a Rips complex from points
+ // ----------------------------------------------------------------------------
+ double threshold = 1.0;
+ Rips_complex rips_complex_from_points(distances, threshold);
+
+ Simplex_tree stree;
+ rips_complex_from_points.create_complex(stree, 1);
+ // ----------------------------------------------------------------------------
+ // Display information about the one skeleton Rips complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Rips complex is of dimension " << stree.dimension() <<
+ " - " << stree.num_simplices() << " simplices - " <<
+ stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
+ std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+
+ return 0;
+}
diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp
new file mode 100644
index 00000000..3fd69ebc
--- /dev/null
+++ b/src/Rips_complex/example/example_one_skeleton_rips_from_points.cpp
@@ -0,0 +1,52 @@
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+#include <limits> // for std::numeric_limits
+
+int main() {
+ // Type definitions
+ using Point = std::vector<double>;
+ using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+ using Filtration_value = Simplex_tree::Filtration_value;
+ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+
+ std::vector<Point> points;
+ points.push_back({1.0, 1.0});
+ points.push_back({7.0, 0.0});
+ points.push_back({4.0, 6.0});
+ points.push_back({9.0, 6.0});
+ points.push_back({0.0, 14.0});
+ points.push_back({2.0, 19.0});
+ points.push_back({9.0, 17.0});
+
+ // ----------------------------------------------------------------------------
+ // Init of a Rips complex from points
+ // ----------------------------------------------------------------------------
+ double threshold = 12.0;
+ Rips_complex rips_complex_from_points(points, threshold, Euclidean_distance());
+
+ Simplex_tree stree;
+ rips_complex_from_points.create_complex(stree, 1);
+ // ----------------------------------------------------------------------------
+ // Display information about the one skeleton Rips complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Rips complex is of dimension " << stree.dimension() <<
+ " - " << stree.num_simplices() << " simplices - " <<
+ stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
+ std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ return 0;
+}
diff --git a/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp b/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
new file mode 100644
index 00000000..7ae8126f
--- /dev/null
+++ b/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
@@ -0,0 +1,72 @@
+#include <gudhi/Rips_complex.h>
+// to construct Rips_complex from a csv file representing a distance matrix
+#include <gudhi/reader_utils.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char * const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.csv threshold dim_max [ouput_file.txt]\n";
+ std::cerr << " i.e.: " << progName << " ../../data/distance_matrix/full_square_distance_matrix.csv 1.0 3\n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 4) && (argc != 5)) usage(argc, (argv[0] - 1));
+
+ std::string csv_file_name(argv[1]);
+ double threshold = atof(argv[2]);
+ int dim_max = atoi(argv[3]);
+
+ // Type definitions
+ using Simplex_tree = Gudhi::Simplex_tree<>;
+ using Filtration_value = Simplex_tree::Filtration_value;
+ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+ using Distance_matrix = std::vector<std::vector<Filtration_value>>;
+
+ // ----------------------------------------------------------------------------
+ // Init of a Rips complex from a distance matrix in a csv file
+ // Default separator is ';'
+ // ----------------------------------------------------------------------------
+ Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
+ Rips_complex rips_complex_from_file(distances, threshold);
+
+ std::streambuf* streambufffer;
+ std::ofstream ouput_file_stream;
+
+ if (argc == 5) {
+ ouput_file_stream.open(std::string(argv[4]));
+ streambufffer = ouput_file_stream.rdbuf();
+ } else {
+ streambufffer = std::cout.rdbuf();
+ }
+
+ Simplex_tree stree;
+ rips_complex_from_file.create_complex(stree, dim_max);
+ std::ostream output_stream(streambufffer);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the Rips complex
+ // ----------------------------------------------------------------------------
+ output_stream << "Rips complex is of dimension " << stree.dimension() <<
+ " - " << stree.num_simplices() << " simplices - " <<
+ stree.num_vertices() << " vertices." << std::endl;
+
+ output_stream << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
+ std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ output_stream << " ( ";
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ output_stream << vertex << " ";
+ }
+ output_stream << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
+ output_stream << std::endl;
+ }
+
+ ouput_file_stream.close();
+ return 0;
+}
diff --git a/src/Rips_complex/example/example_rips_complex_from_off_file.cpp b/src/Rips_complex/example/example_rips_complex_from_off_file.cpp
new file mode 100644
index 00000000..a1e4e255
--- /dev/null
+++ b/src/Rips_complex/example/example_rips_complex_from_off_file.cpp
@@ -0,0 +1,71 @@
+#include <gudhi/Rips_complex.h>
+// to construct Rips_complex from a OFF file of points
+#include <gudhi/Points_off_io.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char * const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off threshold dim_max [ouput_file.txt]\n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0\n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 4) && (argc != 5)) usage(argc, (argv[0] - 1));
+
+ std::string off_file_name(argv[1]);
+ double threshold = atof(argv[2]);
+ int dim_max = atoi(argv[3]);
+
+ // Type definitions
+ using Point = std::vector<float>;
+ using Simplex_tree = Gudhi::Simplex_tree<>;
+ using Filtration_value = Simplex_tree::Filtration_value;
+ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+
+ // ----------------------------------------------------------------------------
+ // Init of a Rips complex from an OFF file
+ // ----------------------------------------------------------------------------
+ Gudhi::Points_off_reader<Point> off_reader(off_file_name);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), threshold, Euclidean_distance());
+
+ std::streambuf* streambufffer;
+ std::ofstream ouput_file_stream;
+
+ if (argc == 5) {
+ ouput_file_stream.open(std::string(argv[4]));
+ streambufffer = ouput_file_stream.rdbuf();
+ } else {
+ streambufffer = std::cout.rdbuf();
+ }
+
+ Simplex_tree stree;
+ rips_complex_from_file.create_complex(stree, dim_max);
+ std::ostream output_stream(streambufffer);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the Rips complex
+ // ----------------------------------------------------------------------------
+ output_stream << "Rips complex is of dimension " << stree.dimension() <<
+ " - " << stree.num_simplices() << " simplices - " <<
+ stree.num_vertices() << " vertices." << std::endl;
+
+ output_stream << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" <<
+ std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ output_stream << " ( ";
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ output_stream << vertex << " ";
+ }
+ output_stream << ") -> " << "[" << stree.filtration(f_simplex) << "] ";
+ output_stream << std::endl;
+ }
+
+ ouput_file_stream.close();
+ return 0;
+}
diff --git a/src/Rips_complex/example/full_skeleton_rips_for_doc.txt b/src/Rips_complex/example/full_skeleton_rips_for_doc.txt
new file mode 100644
index 00000000..55de4ab8
--- /dev/null
+++ b/src/Rips_complex/example/full_skeleton_rips_for_doc.txt
@@ -0,0 +1,26 @@
+Rips complex is of dimension 3 - 24 simplices - 7 vertices.
+Iterator on Rips complex simplices in the filtration order, with [filtration value]:
+ ( 0 ) -> [0]
+ ( 1 ) -> [0]
+ ( 2 ) -> [0]
+ ( 3 ) -> [0]
+ ( 4 ) -> [0]
+ ( 5 ) -> [0]
+ ( 6 ) -> [0]
+ ( 3 2 ) -> [5]
+ ( 5 4 ) -> [5.38516]
+ ( 2 0 ) -> [5.83095]
+ ( 1 0 ) -> [6.08276]
+ ( 3 1 ) -> [6.32456]
+ ( 2 1 ) -> [6.7082]
+ ( 2 1 0 ) -> [6.7082]
+ ( 3 2 1 ) -> [6.7082]
+ ( 6 5 ) -> [7.28011]
+ ( 4 2 ) -> [8.94427]
+ ( 3 0 ) -> [9.43398]
+ ( 3 1 0 ) -> [9.43398]
+ ( 3 2 0 ) -> [9.43398]
+ ( 3 2 1 0 ) -> [9.43398]
+ ( 6 4 ) -> [9.48683]
+ ( 6 5 4 ) -> [9.48683]
+ ( 6 3 ) -> [11]
diff --git a/src/Rips_complex/example/one_skeleton_rips_for_doc.txt b/src/Rips_complex/example/one_skeleton_rips_for_doc.txt
new file mode 100644
index 00000000..706512a5
--- /dev/null
+++ b/src/Rips_complex/example/one_skeleton_rips_for_doc.txt
@@ -0,0 +1,20 @@
+Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+Iterator on Rips complex simplices in the filtration order, with [filtration value]:
+ ( 0 ) -> [0]
+ ( 1 ) -> [0]
+ ( 2 ) -> [0]
+ ( 3 ) -> [0]
+ ( 4 ) -> [0]
+ ( 5 ) -> [0]
+ ( 6 ) -> [0]
+ ( 3 2 ) -> [5]
+ ( 5 4 ) -> [5.38516]
+ ( 2 0 ) -> [5.83095]
+ ( 1 0 ) -> [6.08276]
+ ( 3 1 ) -> [6.32456]
+ ( 2 1 ) -> [6.7082]
+ ( 6 5 ) -> [7.28011]
+ ( 4 2 ) -> [8.94427]
+ ( 3 0 ) -> [9.43398]
+ ( 6 4 ) -> [9.48683]
+ ( 6 3 ) -> [11]
diff --git a/src/Rips_complex/include/gudhi/Rips_complex.h b/src/Rips_complex/include/gudhi/Rips_complex.h
new file mode 100644
index 00000000..1e4b76a7
--- /dev/null
+++ b/src/Rips_complex/include/gudhi/Rips_complex.h
@@ -0,0 +1,185 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clément Maria, Pawel Dlotko, Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef RIPS_COMPLEX_H_
+#define RIPS_COMPLEX_H_
+
+#include <gudhi/Debug_utils.h>
+#include <gudhi/graph_simplicial_complex.h>
+
+#include <boost/graph/adjacency_list.hpp>
+
+#include <iostream>
+#include <vector>
+#include <map>
+#include <string>
+#include <limits> // for numeric_limits
+#include <utility> // for pair<>
+
+
+namespace Gudhi {
+
+namespace rips_complex {
+
+/**
+ * \class Rips_complex
+ * \brief Rips complex data structure.
+ *
+ * \ingroup rips_complex
+ *
+ * \details
+ * The data structure is a one skeleton graph, or Rips graph, containing edges when the edge length is less or equal
+ * to a given threshold. Edge length is computed from a user given point cloud with a given distance function, or a
+ * distance matrix.
+ *
+ * \tparam Filtration_value is the type used to store the filtration values of the simplicial complex.
+ */
+template<typename Filtration_value>
+class Rips_complex {
+ public:
+ /**
+ * \brief Type of the one skeleton graph stored inside the Rips complex structure.
+ */
+ typedef typename boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
+ , boost::property < vertex_filtration_t, Filtration_value >
+ , boost::property < edge_filtration_t, Filtration_value >> OneSkeletonGraph;
+
+ private:
+ typedef int Vertex_handle;
+
+ public:
+ /** \brief Rips_complex constructor from a list of points.
+ *
+ * @param[in] points Range of points.
+ * @param[in] threshold Rips value.
+ * @param[in] distance distance function that returns a `Filtration_value` from 2 given points.
+ *
+ * \tparam ForwardPointRange must be a range for which `std::begin` and `std::end` return input iterators on a
+ * point.
+ *
+ * \tparam Distance furnishes `operator()(const Point& p1, const Point& p2)`, where
+ * `Point` is a point from the `ForwardPointRange`, and that returns a `Filtration_value`.
+ */
+ template<typename ForwardPointRange, typename Distance >
+ Rips_complex(const ForwardPointRange& points, Filtration_value threshold, Distance distance) {
+ compute_proximity_graph(points, threshold, distance);
+ }
+
+ /** \brief Rips_complex constructor from a distance matrix.
+ *
+ * @param[in] distance_matrix Range of distances.
+ * @param[in] threshold Rips value.
+ *
+ * \tparam DistanceMatrix must have a `size()` method and on which `distance_matrix[i][j]` returns
+ * the distance between points \f$i\f$ and \f$j\f$ as long as \f$ 0 \leqslant i < j \leqslant
+ * distance\_matrix.size().\f$
+ */
+ template<typename DistanceMatrix>
+ Rips_complex(const DistanceMatrix& distance_matrix, Filtration_value threshold) {
+ compute_proximity_graph(boost::irange((size_t)0, distance_matrix.size()), threshold,
+ [&](size_t i, size_t j){return distance_matrix[j][i];});
+ }
+
+ /** \brief Initializes the simplicial complex from the Rips graph and expands it until a given maximal
+ * dimension.
+ *
+ * \tparam SimplicialComplexForRips must meet `SimplicialComplexForRips` concept.
+ *
+ * @param[in] complex SimplicialComplexForRips to be created.
+ * @param[in] dim_max graph expansion for Rips until this given maximal dimension.
+ * @exception std::invalid_argument In debug mode, if `complex.num_vertices()` does not return 0.
+ *
+ */
+ template <typename SimplicialComplexForRips>
+ void create_complex(SimplicialComplexForRips& complex, int dim_max) {
+ GUDHI_CHECK(complex.num_vertices() == 0,
+ std::invalid_argument("Rips_complex::create_complex - simplicial complex is not empty"));
+
+ // insert the proximity graph in the simplicial complex
+ complex.insert_graph(rips_skeleton_graph_);
+ // expand the graph until dimension dim_max
+ complex.expansion(dim_max);
+ }
+
+ private:
+ /** \brief Computes the proximity graph of the points.
+ *
+ * If points contains n elements, the proximity graph is the graph with n vertices, and an edge [u,v] iff the
+ * distance function between points u and v is smaller than threshold.
+ *
+ * \tparam ForwardPointRange furnishes `.begin()` and `.end()`
+ * methods.
+ *
+ * \tparam Distance furnishes `operator()(const Point& p1, const Point& p2)`, where
+ * `Point` is a point from the `ForwardPointRange`, and that returns a `Filtration_value`.
+ */
+ template< typename ForwardPointRange, typename Distance >
+ void compute_proximity_graph(const ForwardPointRange& points, Filtration_value threshold,
+ Distance distance) {
+ std::vector< std::pair< Vertex_handle, Vertex_handle > > edges;
+ std::vector< Filtration_value > edges_fil;
+
+ // Compute the proximity graph of the points.
+ // If points contains n elements, the proximity graph is the graph with n vertices, and an edge [u,v] iff the
+ // distance function between points u and v is smaller than threshold.
+ // --------------------------------------------------------------------------------------------
+ // Creates the vector of edges and its filtration values (returned by distance function)
+ Vertex_handle idx_u = 0;
+ for (auto it_u = std::begin(points); it_u != std::end(points); ++it_u, ++idx_u) {
+ Vertex_handle idx_v = idx_u + 1;
+ for (auto it_v = it_u + 1; it_v != std::end(points); ++it_v, ++idx_v) {
+ Filtration_value fil = distance(*it_u, *it_v);
+ if (fil <= threshold) {
+ edges.emplace_back(idx_u, idx_v);
+ edges_fil.push_back(fil);
+ }
+ }
+ }
+
+ // --------------------------------------------------------------------------------------------
+ // Creates the proximity graph from edges and sets the property with the filtration value.
+ // Number of points is labeled from 0 to idx_u-1
+ // --------------------------------------------------------------------------------------------
+ // Do not use : rips_skeleton_graph_ = OneSkeletonGraph(...) -> deep copy of the graph (boost graph is not
+ // move-enabled)
+ rips_skeleton_graph_.~OneSkeletonGraph();
+ new(&rips_skeleton_graph_)OneSkeletonGraph(edges.begin(), edges.end(), edges_fil.begin(), idx_u);
+
+ auto vertex_prop = boost::get(vertex_filtration_t(), rips_skeleton_graph_);
+
+ using vertex_iterator = typename boost::graph_traits<OneSkeletonGraph>::vertex_iterator;
+ vertex_iterator vi, vi_end;
+ for (std::tie(vi, vi_end) = boost::vertices(rips_skeleton_graph_);
+ vi != vi_end; ++vi) {
+ boost::put(vertex_prop, *vi, 0.);
+ }
+ }
+
+ private:
+ OneSkeletonGraph rips_skeleton_graph_;
+};
+
+} // namespace rips_complex
+
+} // namespace Gudhi
+
+#endif // RIPS_COMPLEX_H_
diff --git a/src/Rips_complex/test/CMakeLists.txt b/src/Rips_complex/test/CMakeLists.txt
new file mode 100644
index 00000000..87bad2ed
--- /dev/null
+++ b/src/Rips_complex/test/CMakeLists.txt
@@ -0,0 +1,25 @@
+cmake_minimum_required(VERSION 2.6)
+project(Rips_complex_tests)
+
+if (GCOVR_PATH)
+ # for gcovr to make coverage reports - Corbera Jenkins plugin
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
+endif()
+if (GPROF_PATH)
+ # for gprof to make coverage reports - Jenkins
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
+endif()
+
+add_executable ( rips_complex_UT test_rips_complex.cpp )
+target_link_libraries(rips_complex_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(rips_complex_UT ${TBB_LIBRARIES})
+endif()
+
+# Do not forget to copy test files in current binary dir
+file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+add_test(rips_complex_UT ${CMAKE_CURRENT_BINARY_DIR}/rips_complex_UT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/rips_complex_UT.xml --log_level=test_suite --report_level=no)
diff --git a/src/Rips_complex/test/README b/src/Rips_complex/test/README
new file mode 100644
index 00000000..28236b52
--- /dev/null
+++ b/src/Rips_complex/test/README
@@ -0,0 +1,12 @@
+To compile:
+***********
+
+cmake .
+make
+
+To launch with details:
+***********************
+
+./rips_complex_UT --report_level=detailed --log_level=all
+
+ ==> echo $? returns 0 in case of success (non-zero otherwise)
diff --git a/src/Rips_complex/test/test_rips_complex.cpp b/src/Rips_complex/test/test_rips_complex.cpp
new file mode 100644
index 00000000..ae68ba0d
--- /dev/null
+++ b/src/Rips_complex/test/test_rips_complex.cpp
@@ -0,0 +1,353 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "rips_complex"
+#include <boost/test/unit_test.hpp>
+
+#include <cmath> // float comparison
+#include <limits>
+#include <string>
+#include <vector>
+#include <algorithm> // std::max
+
+#include <gudhi/Rips_complex.h>
+// to construct Rips_complex from a OFF file of points
+#include <gudhi/Points_off_io.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/reader_utils.h>
+
+// Type definitions
+using Point = std::vector<double>;
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Simplex_tree::Filtration_value>;
+using Distance_matrix = std::vector<std::vector<Filtration_value>>;
+
+bool are_almost_the_same(float a, float b) {
+ return std::fabs(a - b) < std::numeric_limits<float>::epsilon();
+}
+
+BOOST_AUTO_TEST_CASE(RIPS_DOC_OFF_file) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of a Rips complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double rips_threshold = 12.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - Rips threshold=" <<
+ rips_threshold << "==========" << std::endl;
+
+ Gudhi::Points_off_reader<Point> off_reader(off_file_name);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), rips_threshold, Euclidean_distance());
+
+ const int DIMENSION_1 = 1;
+ Simplex_tree st;
+ rips_complex_from_file.create_complex(st, DIMENSION_1);
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ BOOST_CHECK(st.dimension() == DIMENSION_1);
+
+ const int NUMBER_OF_VERTICES = 7;
+ std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ BOOST_CHECK(st.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ BOOST_CHECK(st.num_simplices() == 18);
+
+ // Check filtration values of vertices is 0.0
+ for (auto f_simplex : st.skeleton_simplex_range(0)) {
+ BOOST_CHECK(st.filtration(f_simplex) == 0.0);
+ }
+
+ // Check filtration values of edges
+ for (auto f_simplex : st.skeleton_simplex_range(DIMENSION_1)) {
+ if (DIMENSION_1 == st.dimension(f_simplex)) {
+ std::vector<Point> vp;
+ std::cout << "vertex = (";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << ",";
+ vp.push_back(off_reader.get_point_cloud().at(vertex));
+ }
+ std::cout << ") - distance =" << Euclidean_distance()(vp.at(0), vp.at(1)) <<
+ " - filtration =" << st.filtration(f_simplex) << std::endl;
+ BOOST_CHECK(vp.size() == 2);
+ BOOST_CHECK(are_almost_the_same(st.filtration(f_simplex), Euclidean_distance()(vp.at(0), vp.at(1))));
+ }
+ }
+
+ const int DIMENSION_2 = 2;
+ Simplex_tree st2;
+ rips_complex_from_file.create_complex(st2, DIMENSION_2);
+ std::cout << "st2.dimension()=" << st2.dimension() << std::endl;
+ BOOST_CHECK(st2.dimension() == DIMENSION_2);
+
+ std::cout << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
+ BOOST_CHECK(st2.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
+ BOOST_CHECK(st2.num_simplices() == 23);
+
+ Simplex_tree::Filtration_value f01 = st2.filtration(st2.find({0, 1}));
+ Simplex_tree::Filtration_value f02 = st2.filtration(st2.find({0, 2}));
+ Simplex_tree::Filtration_value f12 = st2.filtration(st2.find({1, 2}));
+ Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
+ std::cout << "f012= " << f012 << " | f01= " << f01 << " - f02= " << f02 << " - f12= " << f12 << std::endl;
+ BOOST_CHECK(are_almost_the_same(f012, std::max(f01, std::max(f02,f12))));
+
+ Simplex_tree::Filtration_value f45 = st2.filtration(st2.find({4, 5}));
+ Simplex_tree::Filtration_value f56 = st2.filtration(st2.find({5, 6}));
+ Simplex_tree::Filtration_value f46 = st2.filtration(st2.find({4, 6}));
+ Simplex_tree::Filtration_value f456 = st2.filtration(st2.find({4, 5, 6}));
+ std::cout << "f456= " << f456 << " | f45= " << f45 << " - f56= " << f56 << " - f46= " << f46 << std::endl;
+ BOOST_CHECK(are_almost_the_same(f456, std::max(f45, std::max(f56,f46))));
+
+ const int DIMENSION_3 = 3;
+ Simplex_tree st3;
+ rips_complex_from_file.create_complex(st3, DIMENSION_3);
+ std::cout << "st3.dimension()=" << st3.dimension() << std::endl;
+ BOOST_CHECK(st3.dimension() == DIMENSION_3);
+
+ std::cout << "st3.num_vertices()=" << st3.num_vertices() << std::endl;
+ BOOST_CHECK(st3.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st3.num_simplices()=" << st3.num_simplices() << std::endl;
+ BOOST_CHECK(st3.num_simplices() == 24);
+
+ Simplex_tree::Filtration_value f123 = st3.filtration(st3.find({1, 2, 3}));
+ Simplex_tree::Filtration_value f013 = st3.filtration(st3.find({0, 1, 3}));
+ Simplex_tree::Filtration_value f023 = st3.filtration(st3.find({0, 2, 3}));
+ Simplex_tree::Filtration_value f0123 = st3.filtration(st3.find({0, 1, 2, 3}));
+ std::cout << "f0123= " << f0123 << " | f012= " << f012 << " - f123= " << f123 << " - f013= " << f013 <<
+ " - f023= " << f023 << std::endl;
+ BOOST_CHECK(are_almost_the_same(f0123, std::max(f012, std::max(f123, std::max(f013, f023)))));
+
+}
+
+using Vector_of_points = std::vector<Point>;
+
+bool is_point_in_list(Vector_of_points points_list, Point point) {
+ for (auto& point_in_list : points_list) {
+ if (point_in_list == point) {
+ return true; // point found
+ }
+ }
+ return false; // point not found
+}
+
+class Custom_square_euclidean_distance {
+ public:
+ template< typename Point >
+ auto operator()(const Point& p1, const Point& p2) -> typename Point::value_type {
+ auto it1 = p1.begin();
+ auto it2 = p2.begin();
+ typename Point::value_type dist = 0.;
+ for (; it1 != p1.end(); ++it1, ++it2) {
+ typename Point::value_type tmp = (*it1) - (*it2);
+ dist += tmp*tmp;
+ }
+ return dist;
+ }
+};
+
+BOOST_AUTO_TEST_CASE(Rips_complex_from_points) {
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+ std::vector<double> coords = { 0.0, 0.0, 0.0, 1.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 0.0, 1.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 1.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 1.0, 0.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+
+ // ----------------------------------------------------------------------------
+ // Init of a Rips complex from the list of points
+ // ----------------------------------------------------------------------------
+ Rips_complex rips_complex_from_points(points, 2.0, Custom_square_euclidean_distance());
+
+ std::cout << "========== Rips_complex_from_points ==========" << std::endl;
+ Simplex_tree st;
+ const int DIMENSION = 3;
+ rips_complex_from_points.create_complex(st, DIMENSION);
+
+ // Another way to check num_simplices
+ std::cout << "Iterator on Rips complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ int num_simplices = 0;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ num_simplices++;
+ std::cout << " ( ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << st.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ BOOST_CHECK(num_simplices == 15);
+ std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ BOOST_CHECK(st.num_simplices() == 15);
+
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ BOOST_CHECK(st.dimension() == DIMENSION);
+ std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ BOOST_CHECK(st.num_vertices() == 4);
+
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
+ switch (st.dimension(f_simplex)) {
+ case 0:
+ BOOST_CHECK(are_almost_the_same(st.filtration(f_simplex), 0.0));
+ break;
+ case 1:
+ case 2:
+ case 3:
+ BOOST_CHECK(are_almost_the_same(st.filtration(f_simplex), 2.0));
+ break;
+ default:
+ BOOST_CHECK(false); // Shall not happen
+ break;
+ }
+ }
+}
+
+BOOST_AUTO_TEST_CASE(Rips_doc_csv_file) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of a Rips complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string csv_file_name("full_square_distance_matrix.csv");
+ double rips_threshold = 12.0;
+ std::cout << "========== CSV FILE NAME = " << csv_file_name << " - Rips threshold=" <<
+ rips_threshold << "==========" << std::endl;
+
+ Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
+ Rips_complex rips_complex_from_file(distances, rips_threshold);
+
+ const int DIMENSION_1 = 1;
+ Simplex_tree st;
+ rips_complex_from_file.create_complex(st, DIMENSION_1);
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ BOOST_CHECK(st.dimension() == DIMENSION_1);
+
+ const int NUMBER_OF_VERTICES = 7;
+ std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ BOOST_CHECK(st.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ BOOST_CHECK(st.num_simplices() == 18);
+
+ // Check filtration values of vertices is 0.0
+ for (auto f_simplex : st.skeleton_simplex_range(0)) {
+ BOOST_CHECK(st.filtration(f_simplex) == 0.0);
+ }
+
+ // Check filtration values of edges
+ for (auto f_simplex : st.skeleton_simplex_range(DIMENSION_1)) {
+ if (DIMENSION_1 == st.dimension(f_simplex)) {
+ std::vector<Simplex_tree::Vertex_handle> vvh;
+ std::cout << "vertex = (";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << ",";
+ vvh.push_back(vertex);
+ }
+ std::cout << ") - filtration =" << st.filtration(f_simplex) << std::endl;
+ BOOST_CHECK(vvh.size() == 2);
+ BOOST_CHECK(are_almost_the_same(st.filtration(f_simplex), distances[vvh.at(0)][vvh.at(1)]));
+ }
+ }
+
+ const int DIMENSION_2 = 2;
+ Simplex_tree st2;
+ rips_complex_from_file.create_complex(st2, DIMENSION_2);
+ std::cout << "st2.dimension()=" << st2.dimension() << std::endl;
+ BOOST_CHECK(st2.dimension() == DIMENSION_2);
+
+ std::cout << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
+ BOOST_CHECK(st2.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
+ BOOST_CHECK(st2.num_simplices() == 23);
+
+ Simplex_tree::Filtration_value f01 = st2.filtration(st2.find({0, 1}));
+ Simplex_tree::Filtration_value f02 = st2.filtration(st2.find({0, 2}));
+ Simplex_tree::Filtration_value f12 = st2.filtration(st2.find({1, 2}));
+ Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
+ std::cout << "f012= " << f012 << " | f01= " << f01 << " - f02= " << f02 << " - f12= " << f12 << std::endl;
+ BOOST_CHECK(are_almost_the_same(f012, std::max(f01, std::max(f02,f12))));
+
+ Simplex_tree::Filtration_value f45 = st2.filtration(st2.find({4, 5}));
+ Simplex_tree::Filtration_value f56 = st2.filtration(st2.find({5, 6}));
+ Simplex_tree::Filtration_value f46 = st2.filtration(st2.find({4, 6}));
+ Simplex_tree::Filtration_value f456 = st2.filtration(st2.find({4, 5, 6}));
+ std::cout << "f456= " << f456 << " | f45= " << f45 << " - f56= " << f56 << " - f46= " << f46 << std::endl;
+ BOOST_CHECK(are_almost_the_same(f456, std::max(f45, std::max(f56,f46))));
+
+ const int DIMENSION_3 = 3;
+ Simplex_tree st3;
+ rips_complex_from_file.create_complex(st3, DIMENSION_3);
+ std::cout << "st3.dimension()=" << st3.dimension() << std::endl;
+ BOOST_CHECK(st3.dimension() == DIMENSION_3);
+
+ std::cout << "st3.num_vertices()=" << st3.num_vertices() << std::endl;
+ BOOST_CHECK(st3.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st3.num_simplices()=" << st3.num_simplices() << std::endl;
+ BOOST_CHECK(st3.num_simplices() == 24);
+
+ Simplex_tree::Filtration_value f123 = st3.filtration(st3.find({1, 2, 3}));
+ Simplex_tree::Filtration_value f013 = st3.filtration(st3.find({0, 1, 3}));
+ Simplex_tree::Filtration_value f023 = st3.filtration(st3.find({0, 2, 3}));
+ Simplex_tree::Filtration_value f0123 = st3.filtration(st3.find({0, 1, 2, 3}));
+ std::cout << "f0123= " << f0123 << " | f012= " << f012 << " - f123= " << f123 << " - f013= " << f013 <<
+ " - f023= " << f023 << std::endl;
+ BOOST_CHECK(are_almost_the_same(f0123, std::max(f012, std::max(f123, std::max(f013, f023)))));
+
+}
+
+#ifdef GUDHI_DEBUG
+BOOST_AUTO_TEST_CASE(Rips_create_complex_throw) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of a Rips complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double rips_threshold = 12.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - Rips threshold=" <<
+ rips_threshold << "==========" << std::endl;
+
+ Gudhi::Points_off_reader<Point> off_reader(off_file_name);
+ Rips_complex rips_complex_from_file(off_reader.get_point_cloud(), rips_threshold, Euclidean_distance());
+
+ Simplex_tree stree;
+ std::vector<int> simplex = {0, 1, 2};
+ stree.insert_simplex_and_subfaces(simplex);
+ std::cout << "Check exception throw in debug mode" << std::endl;
+ // throw excpt because stree is not empty
+ BOOST_CHECK_THROW (rips_complex_from_file.create_complex(stree, 1), std::invalid_argument);
+}
+#endif
diff --git a/src/Simplex_tree/doc/COPYRIGHT b/src/Simplex_tree/doc/COPYRIGHT
new file mode 100644
index 00000000..34345bef
--- /dev/null
+++ b/src/Simplex_tree/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Clément Maria
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h
index be061785..940dd694 100644
--- a/src/Simplex_tree/doc/Intro_simplex_tree.h
+++ b/src/Simplex_tree/doc/Intro_simplex_tree.h
@@ -66,8 +66,8 @@ Expand the simplex tree in 3.8e-05 s.
Information of the Simplex Tree:
Number of vertices = 10 Number of simplices = 98 \endcode
*
- * \li <a href="_simplex_tree_2simplex_tree_from_alpha_shapes_3_8cpp-example.html">
- * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp</a> - Simplex tree is computed and displayed from a 3D alpha
+ * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
+ * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a> - Simplex tree is computed and displayed from a 3D alpha
* complex (Requires CGAL, GMP and GMPXX to be installed)
*
*
diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt
index 9314a805..e5285591 100644
--- a/src/Simplex_tree/example/CMakeLists.txt
+++ b/src/Simplex_tree/example/CMakeLists.txt
@@ -5,8 +5,10 @@ add_executable ( simplex_tree_from_cliques_of_graph simplex_tree_from_cliques_of
if (TBB_FOUND)
target_link_libraries(simplex_tree_from_cliques_of_graph ${TBB_LIBRARIES})
endif()
-add_test(simplex_tree_from_cliques_of_graph_2 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_cliques_of_graph ${CMAKE_SOURCE_DIR}/data/points/Klein_bottle_complex.txt 2)
-add_test(simplex_tree_from_cliques_of_graph_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_cliques_of_graph ${CMAKE_SOURCE_DIR}/data/points/Klein_bottle_complex.txt 3)
+add_test(simplex_tree_from_cliques_of_graph_2 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_cliques_of_graph
+ ${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/Klein_bottle_complex.fsc 2)
+add_test(simplex_tree_from_cliques_of_graph_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_cliques_of_graph
+ ${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/Klein_bottle_complex.fsc 3)
add_executable ( simple_simplex_tree simple_simplex_tree.cpp )
if (TBB_FOUND)
@@ -20,10 +22,12 @@ add_test(mini_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/mini_simplex_tree)
# An example with Simplex-tree using CGAL alpha_shapes_3
if(GMP_FOUND AND CGAL_FOUND)
- add_executable ( simplex_tree_from_alpha_shapes_3 simplex_tree_from_alpha_shapes_3.cpp )
- target_link_libraries(simplex_tree_from_alpha_shapes_3 ${GMP_LIBRARIES} ${CGAL_LIBRARY} ${Boost_SYSTEM_LIBRARY})
+ add_executable ( alpha_shapes_3_simplex_tree_from_off_file example_alpha_shapes_3_simplex_tree_from_off_file.cpp )
+ target_link_libraries(alpha_shapes_3_simplex_tree_from_off_file ${GMP_LIBRARIES} ${CGAL_LIBRARY} ${Boost_SYSTEM_LIBRARY})
if (TBB_FOUND)
- target_link_libraries(simplex_tree_from_alpha_shapes_3 ${TBB_LIBRARIES})
+ target_link_libraries(alpha_shapes_3_simplex_tree_from_off_file ${TBB_LIBRARIES})
endif()
- add_test(simplex_tree_from_alpha_shapes_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_alpha_shapes_3 ${CMAKE_SOURCE_DIR}/data/points/bunny_5000)
+ add_test(alpha_shapes_3_simplex_tree_from_off_file
+ ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_3_simplex_tree_from_off_file
+ ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.off)
endif()
diff --git a/src/Simplex_tree/example/simplex_tree_from_alpha_shapes_3.cpp b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
index 49d358ab..ff2eebcb 100644
--- a/src/Simplex_tree/example/simplex_tree_from_alpha_shapes_3.cpp
+++ b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
@@ -20,8 +20,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include <gudhi/graph_simplicial_complex.h>
#include <gudhi/Simplex_tree.h>
+#include <gudhi/Points_3D_off_io.h>
+
#include <boost/variant.hpp>
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
@@ -118,24 +119,21 @@ int main(int argc, char * const argv[]) {
// program args management
if (argc != 2) {
std::cerr << "Usage: " << argv[0]
- << " path_to_file_graph \n";
+ << " path_to_off_file \n";
return 0;
}
// Read points from file
- std::string filegraph = argv[1];
- std::list<Point> lp;
- std::ifstream is(filegraph.c_str());
- int n;
- is >> n;
-#ifdef DEBUG_TRACES
- std::cout << "Reading " << n << " points " << std::endl;
-#endif // DEBUG_TRACES
- Point p;
- for (; n > 0; n--) {
- is >> p;
- lp.push_back(p);
+ std::string offInputFile(argv[1]);
+ // Read the OFF file (input file name given as parameter) and triangulate points
+ Gudhi::Points_3D_off_reader<Point> off_reader(offInputFile);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read file " << argv[1] << std::endl;
+ return 0;
}
+ // Retrieve the triangulation
+ std::vector<Point> lp = off_reader.get_point_cloud();
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
diff --git a/src/Simplex_tree/example/mini_simplex_tree.cpp b/src/Simplex_tree/example/mini_simplex_tree.cpp
index 7e48aaaf..ad99df23 100644
--- a/src/Simplex_tree/example/mini_simplex_tree.cpp
+++ b/src/Simplex_tree/example/mini_simplex_tree.cpp
@@ -24,19 +24,18 @@
#include <iostream>
#include <initializer_list>
-using namespace Gudhi;
-
-struct MyOptions : Simplex_tree_options_full_featured {
+struct MyOptions : Gudhi::Simplex_tree_options_full_featured {
// Not doing persistence, so we don't need those
static const bool store_key = false;
static const bool store_filtration = false;
// I have few vertices
typedef short Vertex_handle;
};
-typedef Simplex_tree<MyOptions> ST;
+
+using ST = Gudhi::Simplex_tree<MyOptions>;
// Dictionary should be private, but for now this is the easiest way.
-static_assert(sizeof(ST::Dictionary::value_type) < sizeof(Simplex_tree<>::Dictionary::value_type),
+static_assert(sizeof(ST::Dictionary::value_type) < sizeof(Gudhi::Simplex_tree<>::Dictionary::value_type),
"Not storing the filtration and key should save some space");
int main() {
diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp
index 5146b906..60f9a35e 100644
--- a/src/Simplex_tree/example/simple_simplex_tree.cpp
+++ b/src/Simplex_tree/example/simple_simplex_tree.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -27,10 +27,11 @@
#include <utility> // for pair
#include <vector>
-using namespace Gudhi;
-
-typedef std::vector< Vertex_handle > typeVectorVertex;
-typedef std::pair< Simplex_tree<>::Simplex_handle, bool > typePairSimplexBool;
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Vertex_handle = Simplex_tree::Vertex_handle;
+using Filtration_value = Simplex_tree::Filtration_value;
+using typeVectorVertex = std::vector< Vertex_handle >;
+using typePairSimplexBool = std::pair< Simplex_tree::Simplex_handle, bool >;
int main(int argc, char * const argv[]) {
const Filtration_value FIRST_FILTRATION_VALUE = 0.1;
@@ -42,7 +43,7 @@ int main(int argc, char * const argv[]) {
std::cout << "********************************************************************" << std::endl;
std::cout << "EXAMPLE OF SIMPLE INSERTION" << std::endl;
// Construct the Simplex Tree
- Simplex_tree<> simplexTree;
+ Simplex_tree simplexTree;
/* Simplex to be inserted: */
/* 1 */
@@ -212,7 +213,7 @@ int main(int argc, char * const argv[]) {
// ------------------------------------------------------------------------------------------------------------------
// Find in the simplex_tree
// ------------------------------------------------------------------------------------------------------------------
- Simplex_tree<>::Simplex_handle simplexFound = simplexTree.find(secondSimplexVector);
+ Simplex_tree::Simplex_handle simplexFound = simplexTree.find(secondSimplexVector);
std::cout << "**************IS THE SIMPLEX {1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
std::cout << "***+ YES IT IS!\n";
diff --git a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp
index 58085014..d1b8b2de 100644
--- a/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp
+++ b/src/Simplex_tree/example/simplex_tree_from_cliques_of_graph.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Clément Maria
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -26,9 +26,16 @@
#include <iostream>
#include <ctime>
#include <string>
+#include <utility> // for std::pair
using namespace Gudhi;
+typedef int Vertex_handle;
+typedef double Filtration_value;
+typedef boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS,
+ boost::property < vertex_filtration_t, Filtration_value >,
+ boost::property < edge_filtration_t, Filtration_value > > Graph_t;
+
int main(int argc, char * const argv[]) {
if (argc != 3) {
std::cerr << "Usage: " << argv[0]
@@ -43,7 +50,7 @@ int main(int argc, char * const argv[]) {
Simplex_tree<> st;
start = clock();
- auto g = read_graph(filegraph);
+ auto g = read_graph<Graph_t, Filtration_value, Vertex_handle>(filegraph);
// insert the graph in the simplex tree as 1-skeleton
st.insert_graph(g);
end = clock();
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 63e3f0e5..317bce23 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -1029,7 +1029,7 @@ class Simplex_tree {
Dictionary_it next = siblings->members().begin();
++next;
- static std::vector<std::pair<Vertex_handle, Node> > inter; // static, not thread-safe.
+ thread_local std::vector<std::pair<Vertex_handle, Node> > inter;
for (Dictionary_it s_h = siblings->members().begin();
s_h != siblings->members().end(); ++s_h, ++next) {
Simplex_handle root_sh = find_vertex(s_h->first);
diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
index 28bf202b..b06d7ec9 100644
--- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
@@ -1,4 +1,5 @@
#include <iostream>
+#include <fstream>
#include <string>
#include <algorithm>
#include <utility> // std::pair, std::make_pair
@@ -19,19 +20,19 @@ using namespace Gudhi;
typedef boost::mpl::list<Simplex_tree<>, Simplex_tree<Simplex_tree_options_fast_persistence>> list_of_tested_variants;
-const Vertex_handle DEFAULT_VERTEX_HANDLE = (const Vertex_handle) - 1;
-const Filtration_value DEFAULT_FILTRATION_VALUE = (const Filtration_value) 0.0;
template<class typeST>
void test_empty_simplex_tree(typeST& tst) {
- BOOST_CHECK(tst.null_vertex() == DEFAULT_VERTEX_HANDLE);
- BOOST_CHECK(tst.filtration() == DEFAULT_FILTRATION_VALUE);
+ typedef typename typeST::Vertex_handle Vertex_handle;
+ const Vertex_handle DEFAULT_VERTEX_VALUE = Vertex_handle(- 1);
+ BOOST_CHECK(tst.null_vertex() == DEFAULT_VERTEX_VALUE);
+ BOOST_CHECK(tst.filtration() == 0.0);
BOOST_CHECK(tst.num_vertices() == (size_t) 0);
BOOST_CHECK(tst.num_simplices() == (size_t) 0);
typename typeST::Siblings* STRoot = tst.root();
BOOST_CHECK(STRoot != nullptr);
BOOST_CHECK(STRoot->oncles() == nullptr);
- BOOST_CHECK(STRoot->parent() == DEFAULT_VERTEX_HANDLE);
+ BOOST_CHECK(STRoot->parent() == DEFAULT_VERTEX_VALUE);
BOOST_CHECK(tst.dimension() == -1);
}
@@ -59,7 +60,7 @@ void test_iterators_on_empty_simplex_tree(typeST& tst) {
BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_when_empty, typeST, list_of_tested_variants) {
typedef std::pair<typename typeST::Simplex_handle, bool> typePairSimplexBool;
- typedef std::vector<Vertex_handle> typeVectorVertex;
+ typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST OF DEFAULT CONSTRUCTOR" << std::endl;
@@ -72,8 +73,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_when_empty, typeST, list_of_tested_va
std::cout << "TEST OF EMPTY INSERTION" << std::endl;
typeVectorVertex simplexVectorEmpty;
BOOST_CHECK(simplexVectorEmpty.empty() == true);
- typePairSimplexBool returnEmptyValue = st.insert_simplex(simplexVectorEmpty,
- DEFAULT_FILTRATION_VALUE);
+ typePairSimplexBool returnEmptyValue = st.insert_simplex(simplexVectorEmpty, 0.0);
BOOST_CHECK(returnEmptyValue.first == typename typeST::Simplex_handle(nullptr));
BOOST_CHECK(returnEmptyValue.second == true);
@@ -141,12 +141,13 @@ void test_simplex_tree_contains(typeST& simplexTree, typeSimplex& simplex, int p
template<class typeST, class typePairSimplexBool>
void test_simplex_tree_insert_returns_true(const typePairSimplexBool& returnValue) {
BOOST_CHECK(returnValue.second == true);
- typename typeST::Simplex_handle shReturned = returnValue.first; // Simplex_handle = boost::container::flat_map< Vertex_handle, Node >::iterator
+ // Simplex_handle = boost::container::flat_map< typeST::Vertex_handle, Node >::iterator
+ typename typeST::Simplex_handle shReturned = returnValue.first;
BOOST_CHECK(shReturned != typename typeST::Simplex_handle(nullptr));
}
// Global variables
-Filtration_value max_fil = DEFAULT_FILTRATION_VALUE;
+double max_fil = 0.0;
int dim_max = -1;
template<class typeST, class Filtration_value>
@@ -179,8 +180,9 @@ void set_and_test_simplex_tree_dim_fil(typeST& simplexTree, int vectorSize, cons
}
BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_variants) {
+ typedef typename typeST::Filtration_value Filtration_value;
typedef std::pair<typename typeST::Simplex_handle, bool> typePairSimplexBool;
- typedef std::vector<Vertex_handle> typeVectorVertex;
+ typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
typedef std::pair<typeVectorVertex, Filtration_value> typeSimplex;
const Filtration_value FIRST_FILTRATION_VALUE = 0.1;
const Filtration_value SECOND_FILTRATION_VALUE = 0.2;
@@ -188,7 +190,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
const Filtration_value FOURTH_FILTRATION_VALUE = 0.4;
// reset since we run the test several times
dim_max = -1;
- max_fil = DEFAULT_FILTRATION_VALUE;
+ max_fil = 0.0;
// TEST OF INSERTION
std::cout << "********************************************************************" << std::endl;
@@ -303,7 +305,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
returnValue = st.insert_simplex(tenthSimplex.first, tenthSimplex.second);
BOOST_CHECK(returnValue.second == false);
- typename typeST::Simplex_handle shReturned = returnValue.first; // Simplex_handle = boost::container::flat_map< Vertex_handle, Node >::iterator
+ // Simplex_handle = boost::container::flat_map< typeST::Vertex_handle, Node >::iterator
+ typename typeST::Simplex_handle shReturned = returnValue.first;
BOOST_CHECK(shReturned == typename typeST::Simplex_handle(nullptr));
BOOST_CHECK(st.num_vertices() == (size_t) 4); // Not incremented !!
BOOST_CHECK(st.dimension() == dim_max);
@@ -317,7 +320,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
returnValue = st.insert_simplex(eleventhSimplex.first, eleventhSimplex.second);
BOOST_CHECK(returnValue.second == false);
- shReturned = returnValue.first; // Simplex_handle = boost::container::flat_map< Vertex_handle, Node >::iterator
+ // Simplex_handle = boost::container::flat_map< typeST::Vertex_handle, Node >::iterator
+ shReturned = returnValue.first;
BOOST_CHECK(shReturned == typename typeST::Simplex_handle(nullptr));
BOOST_CHECK(st.num_vertices() == (size_t) 4); // Not incremented !!
BOOST_CHECK(st.dimension() == dim_max);
@@ -375,8 +379,8 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_of_tested_variants) {
typedef std::pair<typename typeST::Simplex_handle, bool> typePairSimplexBool;
- typedef std::vector<Vertex_handle> typeVectorVertex;
- typedef std::pair<typeVectorVertex, Filtration_value> typeSimplex;
+ typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
+ typedef std::pair<typeVectorVertex, typename typeST::Filtration_value> typeSimplex;
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST OF RECURSIVE INSERTION" << std::endl;
typeST st;
@@ -394,7 +398,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// Check it is well inserted
BOOST_CHECK(true == returnValue.second);
position = 0;
- std::sort(SimplexVector1.begin(), SimplexVector1.end(), std::greater<Vertex_handle>());
+ std::sort(SimplexVector1.begin(), SimplexVector1.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector1[position] << std::endl;
@@ -413,7 +417,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// Check it is well inserted
BOOST_CHECK(true == returnValue.second);
position = 0;
- std::sort(SimplexVector2.begin(), SimplexVector2.end(), std::greater<Vertex_handle>());
+ std::sort(SimplexVector2.begin(), SimplexVector2.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector2[position] << std::endl;
@@ -432,7 +436,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// Check it is well inserted
BOOST_CHECK(true == returnValue.second);
position = 0;
- std::sort(SimplexVector3.begin(), SimplexVector3.end(), std::greater<Vertex_handle>());
+ std::sort(SimplexVector3.begin(), SimplexVector3.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector3[position] << std::endl;
@@ -462,7 +466,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// Check it is well inserted
BOOST_CHECK(true == returnValue.second);
position = 0;
- std::sort(SimplexVector5.begin(), SimplexVector5.end(), std::greater<Vertex_handle>());
+ std::sort(SimplexVector5.begin(), SimplexVector5.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector5[position] << std::endl;
@@ -481,7 +485,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// Check it is well inserted
BOOST_CHECK(true == returnValue.second);
position = 0;
- std::sort(SimplexVector6.begin(), SimplexVector6.end(), std::greater<Vertex_handle>());
+ std::sort(SimplexVector6.begin(), SimplexVector6.end(), std::greater<typename typeST::Vertex_handle>());
for (auto vertex : st.simplex_vertex_range(returnValue.first)) {
// Check returned Simplex_handle
std::cout << "vertex = " << vertex << " | vector[" << position << "] = " << SimplexVector6[position] << std::endl;
@@ -504,12 +508,12 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
/* A facet [3,4,5] */
/* A cell [0,1,6,7] */
- typeSimplex simplexPair1 = std::make_pair(SimplexVector1, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair2 = std::make_pair(SimplexVector2, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair3 = std::make_pair(SimplexVector3, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair4 = std::make_pair(SimplexVector4, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair5 = std::make_pair(SimplexVector5, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair6 = std::make_pair(SimplexVector6, DEFAULT_FILTRATION_VALUE);
+ typeSimplex simplexPair1 = std::make_pair(SimplexVector1, 0.0);
+ typeSimplex simplexPair2 = std::make_pair(SimplexVector2, 0.0);
+ typeSimplex simplexPair3 = std::make_pair(SimplexVector3, 0.0);
+ typeSimplex simplexPair4 = std::make_pair(SimplexVector4, 0.0);
+ typeSimplex simplexPair5 = std::make_pair(SimplexVector5, 0.0);
+ typeSimplex simplexPair6 = std::make_pair(SimplexVector6, 0.0);
test_simplex_tree_contains(st, simplexPair1, 6); // (2,1,0) is in position 6
test_simplex_tree_contains(st, simplexPair2, 7); // (3) is in position 7
test_simplex_tree_contains(st, simplexPair3, 8); // (3,0) is in position 8
@@ -600,7 +604,7 @@ void test_cofaces(typeST& st, const std::vector<Vertex_handle>& expected, int di
}
BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_variants) {
- typedef std::vector<Vertex_handle> typeVectorVertex;
+ typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST COFACE ALGORITHM" << std::endl;
typeST st;
@@ -629,7 +633,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
// FIXME
st.set_dimension(3);
- std::vector<Vertex_handle> simplex_result;
+ std::vector<typename typeST::Vertex_handle> simplex_result;
std::vector<typename typeST::Simplex_handle> result;
std::cout << "First test - Star of (3):" << std::endl;
@@ -649,7 +653,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
result.push_back(st.find(simplex_result));
simplex_result.clear();
- std::vector<Vertex_handle> vertex = {3};
+ std::vector<typename typeST::Vertex_handle> vertex = {3};
test_cofaces(st, vertex, 0, result);
vertex.clear();
result.clear();
@@ -699,7 +703,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
}
BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_variants) {
- typedef std::vector<Vertex_handle> typeVectorVertex;
+ typedef std::vector<typename typeST::Vertex_handle> typeVectorVertex;
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST COPY MOVE CONSTRUCTORS" << std::endl;
typeST st;
@@ -771,12 +775,11 @@ void test_simplex_is_vertex(typeST& st, typename typeST::Simplex_handle sh, type
BOOST_AUTO_TEST_CASE(non_contiguous) {
typedef Simplex_tree<> typeST;
- typedef typeST::Vertex_handle Vertex_handle;
typedef typeST::Simplex_handle Simplex_handle;
std::cout << "********************************************************************" << std::endl;
std::cout << "TEST NON-CONTIGUOUS VERTICES" << std::endl;
typeST st;
- Vertex_handle e[] = {3,-7};
+ typeST::Vertex_handle e[] = {3,-7};
std::cout << "Insert" << std::endl;
st.insert_simplex_and_subfaces(e);
BOOST_CHECK(st.num_vertices() == 2);
diff --git a/src/Skeleton_blocker/doc/COPYRIGHT b/src/Skeleton_blocker/doc/COPYRIGHT
new file mode 100644
index 00000000..1de850d7
--- /dev/null
+++ b/src/Skeleton_blocker/doc/COPYRIGHT
@@ -0,0 +1,18 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): David Salinas
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Spatial_searching/doc/Intro_spatial_searching.h b/src/Spatial_searching/doc/Intro_spatial_searching.h
new file mode 100644
index 00000000..23705378
--- /dev/null
+++ b/src/Spatial_searching/doc/Intro_spatial_searching.h
@@ -0,0 +1,62 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_SPATIAL_SEARCHING_INTRO_SPATIAL_SEARCHING_H_
+#define DOC_SPATIAL_SEARCHING_INTRO_SPATIAL_SEARCHING_H_
+
+// needs namespaces for Doxygen to link on classes
+namespace Gudhi {
+namespace spatial_searching {
+
+/** \defgroup spatial_searching Spatial_searching
+ *
+ * \author Cl&eacute;ment Jamin
+ *
+ * @{
+ *
+ * \section introduction Introduction
+ *
+ * This Gudhi component is a wrapper around
+ * <a target="_blank" href="http://doc.cgal.org/latest/Spatial_searching/index.html">CGAL dD spatial searching algorithms</a>.
+ * It provides a simplified API to perform (approximate) neighbor searches. Contrary to CGAL default behavior, the tree
+ * does not store the points themselves, but stores indices.
+ *
+ * For more details about the data structure or the algorithms, or for more advanced usages, reading
+ * <a target="_blank" href="http://doc.cgal.org/latest/Spatial_searching/index.html">CGAL documentation</a>
+ * is highly recommended.
+ *
+ * \section spatial_searching_examples Example
+ *
+ * This example generates 500 random points, then performs queries for nearest and farthest points using different methods.
+ *
+ * \include Spatial_searching/example_spatial_searching.cpp
+ *
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup spatial_searching
+
+} // namespace spatial_searching
+
+} // namespace Gudhi
+
+#endif // DOC_SPATIAL_SEARCHING_INTRO_SPATIAL_SEARCHING_H_
diff --git a/src/Spatial_searching/example/CMakeLists.txt b/src/Spatial_searching/example/CMakeLists.txt
new file mode 100644
index 00000000..6238a0ec
--- /dev/null
+++ b/src/Spatial_searching/example/CMakeLists.txt
@@ -0,0 +1,13 @@
+cmake_minimum_required(VERSION 2.6)
+project(Spatial_searching_examples)
+
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ if (EIGEN3_FOUND)
+ add_executable( Spatial_searching_example_spatial_searching example_spatial_searching.cpp )
+ target_link_libraries(Spatial_searching_example_spatial_searching ${CGAL_LIBRARY})
+ add_test(Spatial_searching_example_spatial_searching
+ ${CMAKE_CURRENT_BINARY_DIR}/Spatial_searching_example_spatial_searching)
+ endif()
+ endif()
+endif()
diff --git a/src/Spatial_searching/example/example_spatial_searching.cpp b/src/Spatial_searching/example/example_spatial_searching.cpp
new file mode 100644
index 00000000..14b324ae
--- /dev/null
+++ b/src/Spatial_searching/example/example_spatial_searching.cpp
@@ -0,0 +1,52 @@
+#include <gudhi/Kd_tree_search.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+
+namespace gss = Gudhi::spatial_searching;
+
+int main(void) {
+ typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > K;
+ typedef typename K::Point_d Point;
+ typedef std::vector<Point> Points;
+
+ typedef gss::Kd_tree_search<K, Points> Points_ds;
+
+ CGAL::Random rd;
+
+ Points points;
+ for (int i = 0; i < 500; ++i)
+ points.push_back(Point(rd.get_double(-1., 1), rd.get_double(-1., 1), rd.get_double(-1., 1), rd.get_double(-1., 1)));
+
+ Points_ds points_ds(points);
+
+ // 10-nearest neighbor query
+ std::cout << "10 nearest neighbors from points[20]:\n";
+ auto knn_range = points_ds.query_k_nearest_neighbors(points[20], 10, true);
+ for (auto const& nghb : knn_range)
+ std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
+
+ // Incremental nearest neighbor query
+ std::cout << "Incremental nearest neighbors:\n";
+ auto inn_range = points_ds.query_incremental_nearest_neighbors(points[45]);
+ // Get the neighbors in distance order until we hit the first point
+ for (auto ins_iterator = inn_range.begin(); ins_iterator->first != 0; ++ins_iterator)
+ std::cout << ins_iterator->first << " (sq. dist. = " << ins_iterator->second << ")\n";
+
+ // 10-farthest neighbor query
+ std::cout << "10 farthest neighbors from points[20]:\n";
+ auto kfn_range = points_ds.query_k_farthest_neighbors(points[20], 10, true);
+ for (auto const& nghb : kfn_range)
+ std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
+
+ // Incremental farthest neighbor query
+ std::cout << "Incremental farthest neighbors:\n";
+ auto ifn_range = points_ds.query_incremental_farthest_neighbors(points[45]);
+ // Get the neighbors in distance reverse order until we hit the first point
+ for (auto ifs_iterator = ifn_range.begin(); ifs_iterator->first != 0; ++ifs_iterator)
+ std::cout << ifs_iterator->first << " (sq. dist. = " << ifs_iterator->second << ")\n";
+
+ return 0;
+}
diff --git a/src/Spatial_searching/include/gudhi/Kd_tree_search.h b/src/Spatial_searching/include/gudhi/Kd_tree_search.h
new file mode 100644
index 00000000..6728d56e
--- /dev/null
+++ b/src/Spatial_searching/include/gudhi/Kd_tree_search.h
@@ -0,0 +1,264 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef KD_TREE_SEARCH_H_
+#define KD_TREE_SEARCH_H_
+
+#include <CGAL/Orthogonal_k_neighbor_search.h>
+#include <CGAL/Orthogonal_incremental_neighbor_search.h>
+#include <CGAL/Search_traits.h>
+#include <CGAL/Search_traits_adapter.h>
+#include <CGAL/property_map.h>
+
+#include <boost/property_map/property_map.hpp>
+#include <boost/iterator/counting_iterator.hpp>
+
+#include <cstddef>
+#include <vector>
+
+namespace Gudhi {
+namespace spatial_searching {
+
+
+ /**
+ * \class Kd_tree_search Kd_tree_search.h gudhi/Kd_tree_search.h
+ * \brief Spatial tree data structure to perform (approximate) nearest and farthest neighbor search.
+ *
+ * \ingroup spatial_searching
+ *
+ * \details
+ * The class Kd_tree_search is a tree-based data structure, based on
+ * <a target="_blank" href="http://doc.cgal.org/latest/Spatial_searching/index.html">CGAL dD spatial searching data structures</a>.
+ * It provides a simplified API to perform (approximate) nearest and farthest neighbor searches. Contrary to CGAL default behavior, the tree
+ * does not store the points themselves, but stores indices.
+ *
+ * There are two types of queries: the <i>k-nearest or k-farthest neighbor query</i>, where <i>k</i> is fixed and the <i>k</i> nearest
+ * or farthest points are computed right away,
+ * and the <i>incremental nearest or farthest neighbor query</i>, where no number of neighbors is provided during the call, as the
+ * neighbors will be computed incrementally when the iterator on the range is incremented.
+ *
+ * \tparam Search_traits must be a model of the <a target="_blank"
+ * href="http://doc.cgal.org/latest/Spatial_searching/classSearchTraits.html">SearchTraits</a>
+ * concept, such as the <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Epick__d.html">CGAL::Epick_d</a> class, which
+ * can be static if you know the ambiant dimension at compile-time, or dynamic if you don't.
+ * \tparam Point_range is the type of the range that provides the points.
+ * It must be a range whose iterator type is a `RandomAccessIterator`.
+ */
+template <typename Search_traits, typename Point_range>
+class Kd_tree_search {
+ typedef boost::iterator_property_map<
+ typename Point_range::const_iterator,
+ CGAL::Identity_property_map<std::ptrdiff_t> > Point_property_map;
+
+ public:
+ /// The Traits.
+ typedef Search_traits Traits;
+ /// Number type used for distances.
+ typedef typename Traits::FT FT;
+ /// The point type.
+ typedef typename Point_range::value_type Point;
+
+ typedef CGAL::Search_traits<
+ FT, Point,
+ typename Traits::Cartesian_const_iterator_d,
+ typename Traits::Construct_cartesian_const_iterator_d> Traits_base;
+
+ typedef CGAL::Search_traits_adapter<
+ std::ptrdiff_t,
+ Point_property_map,
+ Traits_base> STraits;
+
+ typedef CGAL::Orthogonal_k_neighbor_search<STraits> K_neighbor_search;
+ typedef typename K_neighbor_search::Tree Tree;
+ typedef typename K_neighbor_search::Distance Distance;
+ /// \brief The range returned by a k-nearest or k-farthest neighbor search.
+ /// Its value type is `std::pair<std::size_t, FT>` where `first` is the index
+ /// of a point P and `second` is the squared distance between P and the query point.
+ typedef K_neighbor_search KNS_range;
+
+ typedef CGAL::Orthogonal_incremental_neighbor_search<
+ STraits, Distance, CGAL::Sliding_midpoint<STraits>, Tree>
+ Incremental_neighbor_search;
+ /// \brief The range returned by an incremental nearest or farthest neighbor search.
+ /// Its value type is `std::pair<std::size_t, FT>` where `first` is the index
+ /// of a point P and `second` is the squared distance between P and the query point.
+ typedef Incremental_neighbor_search INS_range;
+
+ /// \brief Constructor
+ /// @param[in] points Const reference to the point range. This range
+ /// is not copied, so it should not be destroyed or modified afterwards.
+ Kd_tree_search(Point_range const& points)
+ : m_points(points),
+ m_tree(boost::counting_iterator<std::ptrdiff_t>(0),
+ boost::counting_iterator<std::ptrdiff_t>(points.size()),
+ typename Tree::Splitter(),
+ STraits(std::begin(points))) {
+ // Build the tree now (we don't want to wait for the first query)
+ m_tree.build();
+ }
+
+ /// \brief Constructor
+ /// @param[in] points Const reference to the point range. This range
+ /// is not copied, so it should not be destroyed or modified afterwards.
+ /// @param[in] only_these_points Specifies the indices of the points that
+ /// should be actually inserted into the tree. The other points are ignored.
+ template <typename Point_indices_range>
+ Kd_tree_search(
+ Point_range const& points,
+ Point_indices_range const& only_these_points)
+ : m_points(points),
+ m_tree(
+ only_these_points.begin(), only_these_points.end(),
+ typename Tree::Splitter(),
+ STraits(std::begin(points))) {
+ // Build the tree now (we don't want to wait for the first query)
+ m_tree.build();
+ }
+
+ /// \brief Constructor
+ /// @param[in] points Const reference to the point range. This range
+ /// is not copied, so it should not be destroyed or modified afterwards.
+ /// @param[in] begin_idx, past_the_end_idx Define the subset of the points that
+ /// should be actually inserted into the tree. The other points are ignored.
+ Kd_tree_search(
+ Point_range const& points,
+ std::size_t begin_idx, std::size_t past_the_end_idx)
+ : m_points(points),
+ m_tree(
+ boost::counting_iterator<std::ptrdiff_t>(begin_idx),
+ boost::counting_iterator<std::ptrdiff_t>(past_the_end_idx),
+ typename Tree::Splitter(),
+ STraits(std::begin(points))) {
+ // Build the tree now (we don't want to wait for the first query)
+ m_tree.build();
+ }
+
+ // Be careful, this function invalidates the tree,
+ // which will be recomputed at the next query
+ void insert(std::ptrdiff_t point_idx) {
+ m_tree.insert(point_idx);
+ }
+
+ /// \brief Search for the k-nearest neighbors from a query point.
+ /// @param[in] p The query point.
+ /// @param[in] k Number of nearest points to search.
+ /// @param[in] sorted Indicates if the computed sequence of k-nearest neighbors needs to be sorted.
+ /// @param[in] eps Approximation factor.
+ /// @return A range containing the k-nearest neighbors.
+ KNS_range query_k_nearest_neighbors(const
+ Point &p,
+ unsigned int k,
+ bool sorted = true,
+ FT eps = FT(0)) const {
+ // Initialize the search structure, and search all N points
+ // Note that we need to pass the Distance explicitly since it needs to
+ // know the property map
+ K_neighbor_search search(
+ m_tree,
+ p,
+ k,
+ eps,
+ true,
+ CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
+ std::begin(m_points)), sorted);
+
+ return search;
+ }
+
+ /// \brief Search incrementally for the nearest neighbors from a query point.
+ /// @param[in] p The query point.
+ /// @param[in] eps Approximation factor.
+ /// @return A range containing the neighbors sorted by their distance to p.
+ /// All the neighbors are not computed by this function, but they will be
+ /// computed incrementally when the iterator on the range is incremented.
+ INS_range query_incremental_nearest_neighbors(const Point &p, FT eps = FT(0)) const {
+ // Initialize the search structure, and search all N points
+ // Note that we need to pass the Distance explicitly since it needs to
+ // know the property map
+ Incremental_neighbor_search search(
+ m_tree,
+ p,
+ eps,
+ true,
+ CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
+ std::begin(m_points)) );
+
+ return search;
+ }
+
+ /// \brief Search for the k-farthest points from a query point.
+ /// @param[in] p The query point.
+ /// @param[in] k Number of farthest points to search.
+ /// @param[in] sorted Indicates if the computed sequence of k-farthest neighbors needs to be sorted.
+ /// @param[in] eps Approximation factor.
+ /// @return A range containing the k-farthest neighbors.
+ KNS_range query_k_farthest_neighbors(const
+ Point &p,
+ unsigned int k,
+ bool sorted = true,
+ FT eps = FT(0)) const {
+ // Initialize the search structure, and search all N points
+ // Note that we need to pass the Distance explicitly since it needs to
+ // know the property map
+ K_neighbor_search search(
+ m_tree,
+ p,
+ k,
+ eps,
+ false,
+ CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
+ std::begin(m_points)), sorted);
+
+ return search;
+ }
+
+ /// \brief Search incrementally for the farthest neighbors from a query point.
+ /// @param[in] p The query point.
+ /// @param[in] eps Approximation factor.
+ /// @return A range containing the neighbors sorted by their distance to p.
+ /// All the neighbors are not computed by this function, but they will be
+ /// computed incrementally when the iterator on the range is incremented.
+ INS_range query_incremental_farthest_neighbors(const Point &p, FT eps = FT(0)) const {
+ // Initialize the search structure, and search all N points
+ // Note that we need to pass the Distance explicitly since it needs to
+ // know the property map
+ Incremental_neighbor_search search(
+ m_tree,
+ p,
+ eps,
+ false,
+ CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
+ std::begin(m_points)) );
+
+ return search;
+ }
+
+ private:
+ Point_range const& m_points;
+ Tree m_tree;
+};
+
+} // namespace spatial_searching
+} // namespace Gudhi
+
+#endif // KD_TREE_SEARCH_H_
diff --git a/src/Spatial_searching/test/CMakeLists.txt b/src/Spatial_searching/test/CMakeLists.txt
new file mode 100644
index 00000000..bdc95e4a
--- /dev/null
+++ b/src/Spatial_searching/test/CMakeLists.txt
@@ -0,0 +1,21 @@
+cmake_minimum_required(VERSION 2.6)
+project(Spatial_searching_tests)
+
+if (GCOVR_PATH)
+ # for gcovr to make coverage reports - Corbera Jenkins plugin
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
+endif()
+if (GPROF_PATH)
+ # for gprof to make coverage reports - Jenkins
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
+endif()
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable( Spatial_searching_test_Kd_tree_search test_Kd_tree_search.cpp )
+ target_link_libraries(Spatial_searching_test_Kd_tree_search
+ ${CGAL_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ add_test(Spatial_searching_test_Kd_tree_search ${CMAKE_CURRENT_BINARY_DIR}/Spatial_searching_test_Kd_tree_search
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Spatial_searching_UT.xml --log_level=test_suite --report_level=no)
+endif ()
diff --git a/src/Spatial_searching/test/test_Kd_tree_search.cpp b/src/Spatial_searching/test/test_Kd_tree_search.cpp
new file mode 100644
index 00000000..0ef22023
--- /dev/null
+++ b/src/Spatial_searching/test/test_Kd_tree_search.cpp
@@ -0,0 +1,112 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE Spatial_searching - test Kd_tree_search
+#include <boost/test/unit_test.hpp>
+
+#include <gudhi/Kd_tree_search.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+
+BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
+ typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > K;
+ typedef K::FT FT;
+ typedef K::Point_d Point;
+ typedef std::vector<Point> Points;
+
+ typedef Gudhi::spatial_searching::Kd_tree_search<
+ K, Points> Points_ds;
+
+ CGAL::Random rd;
+
+ Points points;
+ for (int i = 0; i < 500; ++i)
+ points.push_back(Point(rd.get_double(-1., 1), rd.get_double(-1., 1), rd.get_double(-1., 1), rd.get_double(-1., 1)));
+
+ Points_ds points_ds(points);
+
+ // Test query_k_nearest_neighbors
+ std::size_t closest_pt_index =
+ points_ds.query_k_nearest_neighbors(points[10], 1, false).begin()->first;
+ BOOST_CHECK(closest_pt_index == 10);
+
+ auto kns_range = points_ds.query_k_nearest_neighbors(points[20], 10, true);
+
+ std::vector<std::size_t> knn_result;
+ FT last_dist = -1.;
+ for (auto const& nghb : kns_range) {
+ BOOST_CHECK(nghb.second > last_dist);
+ knn_result.push_back(nghb.second);
+ last_dist = nghb.second;
+ }
+
+ // Test query_incremental_nearest_neighbors
+ closest_pt_index =
+ points_ds.query_incremental_nearest_neighbors(points[10]).begin()->first;
+ BOOST_CHECK(closest_pt_index == 10);
+
+ auto inn_range = points_ds.query_incremental_nearest_neighbors(points[20]);
+
+ std::vector<std::size_t> inn_result;
+ last_dist = -1.;
+ auto inn_it = inn_range.begin();
+ for (int i = 0; i < 10; ++inn_it, ++i) {
+ auto const& nghb = *inn_it;
+ BOOST_CHECK(nghb.second > last_dist);
+ inn_result.push_back(nghb.second);
+ last_dist = nghb.second;
+ }
+
+ // Same result for KNN and INN?
+ BOOST_CHECK(knn_result == inn_result);
+
+ // Test query_k_farthest_neighbors
+ auto kfn_range = points_ds.query_k_farthest_neighbors(points[20], 10, true);
+
+ std::vector<std::size_t> kfn_result;
+ last_dist = kfn_range.begin()->second;
+ for (auto const& nghb : kfn_range) {
+ BOOST_CHECK(nghb.second <= last_dist);
+ kfn_result.push_back(nghb.second);
+ last_dist = nghb.second;
+ }
+
+ // Test query_k_farthest_neighbors
+ auto ifn_range = points_ds.query_incremental_farthest_neighbors(points[20]);
+
+ std::vector<std::size_t> ifn_result;
+ last_dist = ifn_range.begin()->second;
+ auto ifn_it = ifn_range.begin();
+ for (int i = 0; i < 10; ++ifn_it, ++i) {
+ auto const& nghb = *ifn_it;
+ BOOST_CHECK(nghb.second <= last_dist);
+ ifn_result.push_back(nghb.second);
+ last_dist = nghb.second;
+ }
+
+ // Same result for KFN and IFN?
+ BOOST_CHECK(kfn_result == ifn_result);
+}
diff --git a/src/Subsampling/doc/Intro_subsampling.h b/src/Subsampling/doc/Intro_subsampling.h
new file mode 100644
index 00000000..c84616dd
--- /dev/null
+++ b/src/Subsampling/doc/Intro_subsampling.h
@@ -0,0 +1,70 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_SUBSAMPLING_INTRO_SUBSAMPLING_H_
+#define DOC_SUBSAMPLING_INTRO_SUBSAMPLING_H_
+
+// needs namespace for Doxygen to link on classes
+namespace Gudhi {
+// needs namespace for Doxygen to link on classes
+namespace subsampling {
+
+/** \defgroup subsampling Subsampling
+ *
+ * \author Cl&eacute;ment Jamin, Siargey Kachanovich
+ *
+ * @{
+ *
+ * \section subsamplingintroduction Introduction
+ *
+ * This Gudhi component offers methods to subsample a set of points.
+ *
+ * \section sparsifyexamples Example: sparsify_point_set
+ *
+ * This example outputs a subset of the input points so that the
+ * squared distance between any two points
+ * is greater than or equal to 0.4.
+ *
+ * \include Subsampling/example_sparsify_point_set.cpp
+ *
+ * \section farthestpointexamples Example: choose_n_farthest_points
+ *
+ * This example outputs a subset of 100 points obtained by Gonz&aacute;lez algorithm,
+ * starting with a random point.
+ *
+ * \include Subsampling/example_choose_n_farthest_points.cpp
+ *
+ * \section randompointexamples Example: pick_n_random_points
+ *
+ * This example outputs a subset of 100 points picked randomly.
+ *
+ * \include Subsampling/example_pick_n_random_points.cpp
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup subsampling
+
+} // namespace subsampling
+
+} // namespace Gudhi
+
+#endif // DOC_SUBSAMPLING_INTRO_SUBSAMPLING_H_
diff --git a/src/Subsampling/example/CMakeLists.txt b/src/Subsampling/example/CMakeLists.txt
new file mode 100644
index 00000000..bb043297
--- /dev/null
+++ b/src/Subsampling/example/CMakeLists.txt
@@ -0,0 +1,17 @@
+cmake_minimum_required(VERSION 2.6)
+project(Subsampling_examples)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable(Subsampling_example_pick_n_random_points example_pick_n_random_points.cpp)
+ add_executable(Subsampling_example_choose_n_farthest_points example_choose_n_farthest_points.cpp)
+ add_executable(Subsampling_example_custom_kernel example_custom_kernel.cpp)
+ add_executable(Subsampling_example_sparsify_point_set example_sparsify_point_set.cpp)
+ target_link_libraries(Subsampling_example_sparsify_point_set ${CGAL_LIBRARY})
+
+ add_test(Subsampling_example_pick_n_random_points
+ ${CMAKE_CURRENT_BINARY_DIR}/Subsampling_example_pick_n_random_points)
+ add_test(Subsampling_example_choose_n_farthest_points
+ ${CMAKE_CURRENT_BINARY_DIR}/Subsampling_example_choose_n_farthest_points)
+ add_test(Subsampling_example_sparsify_point_set
+ ${CMAKE_CURRENT_BINARY_DIR}/Subsampling_example_sparsify_point_set)
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Subsampling/example/example_choose_n_farthest_points.cpp b/src/Subsampling/example/example_choose_n_farthest_points.cpp
new file mode 100644
index 00000000..533aba74
--- /dev/null
+++ b/src/Subsampling/example/example_choose_n_farthest_points.cpp
@@ -0,0 +1,27 @@
+#include <gudhi/choose_n_farthest_points.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+#include <iterator>
+
+int main(void) {
+ typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > K;
+ typedef typename K::Point_d Point_d;
+
+ CGAL::Random rd;
+
+ std::vector<Point_d> points;
+ for (int i = 0; i < 500; ++i)
+ points.push_back(Point_d(rd.get_double(-1., 1), rd.get_double(-1., 1),
+ rd.get_double(-1., 1), rd.get_double(-1., 1)));
+
+ K k;
+ std::vector<Point_d> results;
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 100, std::back_inserter(results));
+ std::cout << "Before sparsification: " << points.size() << " points.\n";
+ std::cout << "After sparsification: " << results.size() << " points.\n";
+
+ return 0;
+}
diff --git a/src/Subsampling/example/example_custom_kernel.cpp b/src/Subsampling/example/example_custom_kernel.cpp
new file mode 100644
index 00000000..25b5bf6c
--- /dev/null
+++ b/src/Subsampling/example/example_custom_kernel.cpp
@@ -0,0 +1,63 @@
+#include <gudhi/choose_n_farthest_points.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+#include <iterator>
+
+
+/* The class Kernel contains a distance function defined on the set of points {0, 1, 2, 3}
+ * and computes a distance according to the matrix:
+ * 0 1 2 4
+ * 1 0 4 2
+ * 2 4 0 1
+ * 4 2 1 0
+ */
+class Kernel {
+ public:
+ typedef double FT;
+ typedef unsigned Point_d;
+
+ // Class Squared_distance_d
+ class Squared_distance_d {
+ private:
+ std::vector<std::vector<FT>> matrix_;
+
+ public:
+ Squared_distance_d() {
+ matrix_.push_back(std::vector<FT>({0, 1, 2, 4}));
+ matrix_.push_back(std::vector<FT>({1, 0, 4, 2}));
+ matrix_.push_back(std::vector<FT>({2, 4, 0, 1}));
+ matrix_.push_back(std::vector<FT>({4, 2, 1, 0}));
+ }
+
+ FT operator()(Point_d p1, Point_d p2) {
+ return matrix_[p1][p2];
+ }
+ };
+
+ // Constructor
+ Kernel() {}
+
+ // Object of type Squared_distance_d
+ Squared_distance_d squared_distance_d_object() const {
+ return Squared_distance_d();
+ }
+};
+
+int main(void) {
+ typedef Kernel K;
+ typedef typename K::Point_d Point_d;
+
+ K k;
+ std::vector<Point_d> points = {0, 1, 2, 3};
+ std::vector<Point_d> results;
+
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 2, std::back_inserter(results));
+ std::cout << "Before sparsification: " << points.size() << " points.\n";
+ std::cout << "After sparsification: " << results.size() << " points.\n";
+ std::cout << "Result table: {" << results[0] << "," << results[1] << "}\n";
+
+ return 0;
+}
diff --git a/src/Subsampling/example/example_pick_n_random_points.cpp b/src/Subsampling/example/example_pick_n_random_points.cpp
new file mode 100644
index 00000000..1e38e405
--- /dev/null
+++ b/src/Subsampling/example/example_pick_n_random_points.cpp
@@ -0,0 +1,27 @@
+#include <gudhi/pick_n_random_points.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+#include <iterator>
+
+int main(void) {
+ typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > K;
+ typedef typename K::Point_d Point_d;
+
+ CGAL::Random rd;
+
+ std::vector<Point_d> points;
+ for (int i = 0; i < 500; ++i)
+ points.push_back(Point_d(rd.get_double(-1., 1), rd.get_double(-1., 1),
+ rd.get_double(-1., 1), rd.get_double(-1., 1)));
+
+ K k;
+ std::vector<Point_d> results;
+ Gudhi::subsampling::pick_n_random_points(points, 100, std::back_inserter(results));
+ std::cout << "Before sparsification: " << points.size() << " points.\n";
+ std::cout << "After sparsification: " << results.size() << " points.\n";
+
+ return 0;
+}
diff --git a/src/Subsampling/example/example_sparsify_point_set.cpp b/src/Subsampling/example/example_sparsify_point_set.cpp
new file mode 100644
index 00000000..b35a18d9
--- /dev/null
+++ b/src/Subsampling/example/example_sparsify_point_set.cpp
@@ -0,0 +1,27 @@
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+#include <iterator>
+
+int main(void) {
+ typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > K;
+ typedef typename K::Point_d Point_d;
+
+ CGAL::Random rd;
+
+ std::vector<Point_d> points;
+ for (int i = 0; i < 500; ++i)
+ points.push_back(Point_d(rd.get_double(-1., 1), rd.get_double(-1., 1),
+ rd.get_double(-1., 1), rd.get_double(-1., 1)));
+
+ K k;
+ std::vector<Point_d> results;
+ Gudhi::subsampling::sparsify_point_set(k, points, 0.4, std::back_inserter(results));
+ std::cout << "Before sparsification: " << points.size() << " points.\n";
+ std::cout << "After sparsification: " << results.size() << " points.\n";
+
+ return 0;
+}
diff --git a/src/Subsampling/include/gudhi/choose_n_farthest_points.h b/src/Subsampling/include/gudhi/choose_n_farthest_points.h
new file mode 100644
index 00000000..5e908090
--- /dev/null
+++ b/src/Subsampling/include/gudhi/choose_n_farthest_points.h
@@ -0,0 +1,157 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CHOOSE_N_FARTHEST_POINTS_H_
+#define CHOOSE_N_FARTHEST_POINTS_H_
+
+#include <boost/range.hpp>
+
+#include <gudhi/Kd_tree_search.h>
+
+#include <gudhi/Clock.h>
+
+#include <CGAL/Search_traits.h>
+#include <CGAL/Search_traits_adapter.h>
+#include <CGAL/Fuzzy_sphere.h>
+
+#include <iterator>
+#include <algorithm> // for sort
+#include <vector>
+#include <random>
+#include <limits> // for numeric_limits<>
+
+namespace Gudhi {
+
+namespace subsampling {
+
+/**
+ * \ingroup subsampling
+ * \brief Subsample by a greedy strategy of iteratively adding the farthest point from the
+ * current chosen point set to the subsampling.
+ * The iteration starts with the landmark `starting point`.
+ * \tparam Kernel must provide a type Kernel::Squared_distance_d which is a model of the
+ * concept <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classKernel__d_1_1Squared__distance__d.html">Kernel_d::Squared_distance_d</a>
+ * concept.
+ * It must also contain a public member 'squared_distance_d_object' of this type.
+ * \tparam Point_range Range whose value type is Kernel::Point_d. It must provide random-access
+ * via `operator[]` and the points should be stored contiguously in memory.
+ * \tparam OutputIterator Output iterator whose value type is Kernel::Point_d.
+ * \details It chooses `final_size` points from a random access range `input_pts` and
+ * outputs it in the output iterator `output_it`.
+ * @param[in] k A kernel object.
+ * @param[in] input_pts Const reference to the input points.
+ * @param[in] final_size The size of the subsample to compute.
+ * @param[in] starting_point The seed in the farthest point algorithm.
+ * @param[out] output_it The output iterator.
+ *
+ */
+template < typename Kernel,
+typename Point_range,
+typename OutputIterator>
+void choose_n_farthest_points(Kernel const &k,
+ Point_range const &input_pts,
+ std::size_t final_size,
+ std::size_t starting_point,
+ OutputIterator output_it) {
+ std::size_t nb_points = boost::size(input_pts);
+ if (final_size > nb_points)
+ final_size = nb_points;
+
+ // Tests to the limit
+ if (final_size < 1)
+ return;
+
+ typename Kernel::Squared_distance_d sqdist = k.squared_distance_d_object();
+
+ std::size_t current_number_of_landmarks = 0; // counter for landmarks
+ const double infty = std::numeric_limits<double>::infinity(); // infinity (see next entry)
+ std::vector< double > dist_to_L(nb_points, infty); // vector of current distances to L from input_pts
+
+ std::size_t curr_max_w = starting_point;
+
+ for (current_number_of_landmarks = 0; current_number_of_landmarks != final_size; current_number_of_landmarks++) {
+ // curr_max_w at this point is the next landmark
+ *output_it++ = input_pts[curr_max_w];
+ std::size_t i = 0;
+ for (auto& p : input_pts) {
+ double curr_dist = sqdist(p, *(std::begin(input_pts) + curr_max_w));
+ if (curr_dist < dist_to_L[i])
+ dist_to_L[i] = curr_dist;
+ ++i;
+ }
+ // choose the next curr_max_w
+ double curr_max_dist = 0; // used for defining the furhest point from L
+ for (i = 0; i < dist_to_L.size(); i++)
+ if (dist_to_L[i] > curr_max_dist) {
+ curr_max_dist = dist_to_L[i];
+ curr_max_w = i;
+ }
+ }
+}
+
+/**
+ * \ingroup subsampling
+ * \brief Subsample by a greedy strategy of iteratively adding the farthest point from the
+ * current chosen point set to the subsampling.
+ * The iteration starts with a random landmark.
+ * \tparam Kernel must provide a type Kernel::Squared_distance_d which is a model of the
+ * concept <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classKernel__d_1_1Squared__distance__d.html">Kernel_d::Squared_distance_d</a>
+ * concept.
+ * It must also contain a public member 'squared_distance_d_object' of this type.
+ * \tparam Point_range Range whose value type is Kernel::Point_d. It must provide random-access
+ * via `operator[]` and the points should be stored contiguously in memory.
+ * \tparam OutputIterator Output iterator whose value type is Kernel::Point_d.
+ * \details It chooses `final_size` points from a random access range `input_pts` and
+ * outputs it in the output iterator `output_it`.
+ * @param[in] k A kernel object.
+ * @param[in] input_pts Const reference to the input points.
+ * @param[in] final_size The size of the subsample to compute.
+ * @param[out] output_it The output iterator.
+ *
+ */
+template < typename Kernel,
+typename Point_range,
+typename OutputIterator>
+void choose_n_farthest_points(Kernel const& k,
+ Point_range const &input_pts,
+ unsigned final_size,
+ OutputIterator output_it) {
+ // Tests to the limit
+ if ((final_size < 1) || (input_pts.size() == 0))
+ return;
+
+ // Choose randomly the first landmark
+ std::random_device rd;
+ std::mt19937 gen(rd());
+ std::uniform_int_distribution<> dis(0, (input_pts.size() - 1));
+ std::size_t starting_point = dis(gen);
+
+ choose_n_farthest_points(k, input_pts, final_size, starting_point, output_it);
+}
+
+} // namespace subsampling
+
+} // namespace Gudhi
+
+#endif // CHOOSE_N_FARTHEST_POINTS_H_
diff --git a/src/Subsampling/include/gudhi/pick_n_random_points.h b/src/Subsampling/include/gudhi/pick_n_random_points.h
new file mode 100644
index 00000000..f0e3f1f1
--- /dev/null
+++ b/src/Subsampling/include/gudhi/pick_n_random_points.h
@@ -0,0 +1,86 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef PICK_N_RANDOM_POINTS_H_
+#define PICK_N_RANDOM_POINTS_H_
+
+#include <gudhi/Clock.h>
+
+#include <boost/range/size.hpp>
+
+#include <cstddef>
+#include <random> // random_device, mt19937
+#include <algorithm> // shuffle
+#include <numeric> // iota
+#include <iterator>
+#include <vector>
+
+
+namespace Gudhi {
+
+namespace subsampling {
+
+/**
+ * \ingroup subsampling
+ * \brief Subsample a point set by picking random vertices.
+ *
+ * \details It chooses `final_size` distinct points from a random access range `points`
+ * and outputs them to the output iterator `output_it`.
+ * Point_container::iterator should be ValueSwappable and RandomAccessIterator.
+ */
+template <typename Point_container,
+typename OutputIterator>
+void pick_n_random_points(Point_container const &points,
+ std::size_t final_size,
+ OutputIterator output_it) {
+#ifdef GUDHI_SUBS_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ std::size_t nbP = boost::size(points);
+ if (final_size > nbP)
+ final_size = nbP;
+
+ std::vector<int> landmarks(nbP);
+ std::iota(landmarks.begin(), landmarks.end(), 0);
+
+ std::random_device rd;
+ std::mt19937 g(rd());
+
+ std::shuffle(landmarks.begin(), landmarks.end(), g);
+ landmarks.resize(final_size);
+
+ for (int l : landmarks)
+ *output_it++ = points[l];
+
+#ifdef GUDHI_SUBS_PROFILING
+ t.end();
+ std::cerr << "Random landmark choice took " << t.num_seconds()
+ << " seconds." << std::endl;
+#endif
+}
+
+} // namespace subsampling
+
+} // namespace Gudhi
+
+#endif // PICK_N_RANDOM_POINTS_H_
diff --git a/src/Subsampling/include/gudhi/sparsify_point_set.h b/src/Subsampling/include/gudhi/sparsify_point_set.h
new file mode 100644
index 00000000..507f8c79
--- /dev/null
+++ b/src/Subsampling/include/gudhi/sparsify_point_set.h
@@ -0,0 +1,113 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef SPARSIFY_POINT_SET_H_
+#define SPARSIFY_POINT_SET_H_
+
+#include <gudhi/Kd_tree_search.h>
+#ifdef GUDHI_SUBSAMPLING_PROFILING
+#include <gudhi/Clock.h>
+#endif
+
+#include <cstddef>
+#include <vector>
+
+namespace Gudhi {
+
+namespace subsampling {
+
+/**
+ * \ingroup subsampling
+ * \brief Outputs a subset of the input points so that the
+ * squared distance between any two points
+ * is greater than or equal to `min_squared_dist`.
+ *
+ * \tparam Kernel must be a model of the <a target="_blank"
+ * href="http://doc.cgal.org/latest/Spatial_searching/classSearchTraits.html">SearchTraits</a>
+ * concept, such as the <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Epick__d.html">CGAL::Epick_d</a> class, which
+ * can be static if you know the ambiant dimension at compile-time, or dynamic if you don't.
+ * \tparam Point_range Range whose value type is Kernel::Point_d. It must provide random-access
+ * via `operator[]` and the points should be stored contiguously in memory.
+ * \tparam OutputIterator Output iterator whose value type is Kernel::Point_d.
+ *
+ * @param[in] k A kernel object.
+ * @param[in] input_pts Const reference to the input points.
+ * @param[in] min_squared_dist Minimum squared distance separating the output points.
+ * @param[out] output_it The output iterator.
+ */
+template <typename Kernel, typename Point_range, typename OutputIterator>
+void
+sparsify_point_set(
+ const Kernel &k, Point_range const& input_pts,
+ typename Kernel::FT min_squared_dist,
+ OutputIterator output_it) {
+ typedef typename Gudhi::spatial_searching::Kd_tree_search<
+ Kernel, Point_range> Points_ds;
+
+#ifdef GUDHI_SUBSAMPLING_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ Points_ds points_ds(input_pts);
+
+ std::vector<bool> dropped_points(input_pts.size(), false);
+
+ // Parse the input points, and add them if they are not too close to
+ // the other points
+ std::size_t pt_idx = 0;
+ for (typename Point_range::const_iterator it_pt = input_pts.begin();
+ it_pt != input_pts.end();
+ ++it_pt, ++pt_idx) {
+ if (dropped_points[pt_idx])
+ continue;
+
+ *output_it++ = *it_pt;
+
+ auto ins_range = points_ds.query_incremental_nearest_neighbors(*it_pt);
+
+ // If another point Q is closer that min_squared_dist, mark Q to be dropped
+ for (auto const& neighbor : ins_range) {
+ std::size_t neighbor_point_idx = neighbor.first;
+ // If the neighbor is too close, we drop the neighbor
+ if (neighbor.second < min_squared_dist) {
+ // N.B.: If neighbor_point_idx < pt_idx,
+ // dropped_points[neighbor_point_idx] is already true but adding a
+ // test doesn't make things faster, so why bother?
+ dropped_points[neighbor_point_idx] = true;
+ } else {
+ break;
+ }
+ }
+ }
+
+#ifdef GUDHI_SUBSAMPLING_PROFILING
+ t.end();
+ std::cerr << "Point set sparsified in " << t.num_seconds()
+ << " seconds." << std::endl;
+#endif
+}
+
+} // namespace subsampling
+} // namespace Gudhi
+
+#endif // SPARSIFY_POINT_SET_H_
diff --git a/src/Subsampling/test/CMakeLists.txt b/src/Subsampling/test/CMakeLists.txt
new file mode 100644
index 00000000..5517fe9d
--- /dev/null
+++ b/src/Subsampling/test/CMakeLists.txt
@@ -0,0 +1,34 @@
+cmake_minimum_required(VERSION 2.6)
+project(Subsampling_tests)
+
+if (GCOVR_PATH)
+ # for gcovr to make coverage reports - Corbera Jenkins plugin
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
+endif()
+if (GPROF_PATH)
+ # for gprof to make coverage reports - Jenkins
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
+endif()
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable( Subsampling_test_pick_n_random_points test_pick_n_random_points.cpp )
+ target_link_libraries(Subsampling_test_pick_n_random_points ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ add_executable( Subsampling_test_choose_n_farthest_points test_choose_n_farthest_points.cpp )
+ target_link_libraries(Subsampling_test_choose_n_farthest_points ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ add_executable(Subsampling_test_sparsify_point_set test_sparsify_point_set.cpp)
+ target_link_libraries(Subsampling_test_sparsify_point_set ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ add_test(Subsampling_test_pick_n_random_points ${CMAKE_CURRENT_BINARY_DIR}/Subsampling_test_pick_n_random_points
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Subsampling_test_pick_n_random_points_UT.xml --log_level=test_suite --report_level=no)
+
+ add_test(Subsampling_test_choose_n_farthest_points ${CMAKE_CURRENT_BINARY_DIR}/Subsampling_test_choose_n_farthest_points
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Subsampling_test_choose_n_farthest_points_UT.xml --log_level=test_suite --report_level=no)
+
+ add_test(Subsampling_test_sparsify_point_set ${CMAKE_CURRENT_BINARY_DIR}/Subsampling_test_sparsify_point_set
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Subsampling_test_sparsify_point_set_UT.xml --log_level=test_suite --report_level=no)
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Subsampling/test/test_choose_n_farthest_points.cpp b/src/Subsampling/test/test_choose_n_farthest_points.cpp
new file mode 100644
index 00000000..0bc0dff4
--- /dev/null
+++ b/src/Subsampling/test/test_choose_n_farthest_points.cpp
@@ -0,0 +1,103 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+// #ifdef _DEBUG
+// # define TBB_USE_THREADING_TOOL
+// #endif
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "witness_complex_points"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+#include <gudhi/choose_n_farthest_points.h>
+#include <vector>
+#include <iterator>
+
+#include <CGAL/Epick_d.h>
+
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> K;
+typedef typename K::FT FT;
+typedef typename K::Point_d Point_d;
+
+typedef boost::mpl::list<CGAL::Epick_d<CGAL::Dynamic_dimension_tag>, CGAL::Epick_d<CGAL::Dimension_tag<4>>> list_of_tested_kernels;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(test_choose_farthest_point, Kernel, list_of_tested_kernels) {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point_d;
+ std::vector< Point_d > points, landmarks;
+ // Add grid points (625 points)
+ for (FT i = 0; i < 5; i += 1.0)
+ for (FT j = 0; j < 5; j += 1.0)
+ for (FT k = 0; k < 5; k += 1.0)
+ for (FT l = 0; l < 5; l += 1.0) {
+ std::vector<FT> point({i, j, k, l});
+ points.push_back(Point_d(point.begin(), point.end()));
+ }
+
+ landmarks.clear();
+ Kernel k;
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 100, std::back_inserter(landmarks));
+
+ BOOST_CHECK(landmarks.size() == 100);
+ for (auto landmark : landmarks)
+ {
+ // Check all landmarks are in points
+ BOOST_CHECK(std::find (points.begin(), points.end(), landmark) != points.end());
+ }
+}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(test_choose_farthest_point_limits, Kernel, list_of_tested_kernels) {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point_d;
+ std::vector< Point_d > points, landmarks;
+ landmarks.clear();
+ Kernel k;
+ // Choose -1 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, -1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+ // Choose 0 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 0, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+ // Choose 1 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+
+ std::vector<FT> point({0.0, 0.0, 0.0, 0.0});
+ points.push_back(Point_d(point.begin(), point.end()));
+ // Choose -1 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, -1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 1);
+ landmarks.clear();
+ // Choose 0 farthest points in a one point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 0, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+ // Choose 1 farthest points in a one point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 1);
+ landmarks.clear();
+
+}
diff --git a/src/Subsampling/test/test_pick_n_random_points.cpp b/src/Subsampling/test/test_pick_n_random_points.cpp
new file mode 100644
index 00000000..6c8dbea2
--- /dev/null
+++ b/src/Subsampling/test/test_pick_n_random_points.cpp
@@ -0,0 +1,69 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+// #ifdef _DEBUG
+// # define TBB_USE_THREADING_TOOL
+// #endif
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE Subsampling - test pick_n_random_points
+#include <boost/test/unit_test.hpp>
+
+#include <gudhi/pick_n_random_points.h>
+#include <vector>
+#include <iterator>
+
+#include <CGAL/Epick_d.h>
+
+
+BOOST_AUTO_TEST_CASE(test_pick_n_random_points)
+{
+ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> K;
+ typedef typename K::FT FT;
+ typedef typename K::Point_d Point_d;
+
+ std::vector<Point_d> vect;
+ vect.push_back(Point_d(std::vector<FT>({0,0,0,0})));
+ vect.push_back(Point_d(std::vector<FT>({0,0,0,1})));
+ vect.push_back(Point_d(std::vector<FT>({0,0,1,0})));
+ vect.push_back(Point_d(std::vector<FT>({0,0,1,1})));
+ vect.push_back(Point_d(std::vector<FT>({0,1,0,0})));
+ vect.push_back(Point_d(std::vector<FT>({0,1,0,1})));
+ vect.push_back(Point_d(std::vector<FT>({0,1,1,0})));
+ vect.push_back(Point_d(std::vector<FT>({0,1,1,1})));
+ vect.push_back(Point_d(std::vector<FT>({1,0,0,0})));
+ vect.push_back(Point_d(std::vector<FT>({1,0,0,1})));
+ vect.push_back(Point_d(std::vector<FT>({1,0,1,0})));
+ vect.push_back(Point_d(std::vector<FT>({1,0,1,1})));
+ vect.push_back(Point_d(std::vector<FT>({1,1,0,0})));
+ vect.push_back(Point_d(std::vector<FT>({1,1,0,1})));
+ vect.push_back(Point_d(std::vector<FT>({1,1,1,0})));
+ vect.push_back(Point_d(std::vector<FT>({1,1,1,1})));
+
+ std::vector<Point_d> results;
+ Gudhi::subsampling::pick_n_random_points(vect, 5, std::back_inserter(results));
+ std::cout << "landmark vector contains: ";
+ for (auto l: results)
+ std::cout << l << "\n";
+
+ BOOST_CHECK(results.size() == 5);
+}
diff --git a/src/Subsampling/test/test_sparsify_point_set.cpp b/src/Subsampling/test/test_sparsify_point_set.cpp
new file mode 100644
index 00000000..f993d6d6
--- /dev/null
+++ b/src/Subsampling/test/test_sparsify_point_set.cpp
@@ -0,0 +1,55 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE Subsampling - test sparsify_point_set
+#include <boost/test/unit_test.hpp>
+
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <vector>
+#include <iterator>
+
+BOOST_AUTO_TEST_CASE(test_sparsify_point_set)
+{
+ typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > K;
+ typedef typename K::Point_d Point_d;
+
+ CGAL::Random rd;
+
+ std::vector<Point_d> points;
+ for (int i = 0 ; i < 500 ; ++i)
+ points.push_back(Point_d(rd.get_double(-1.,1),rd.get_double(-1.,1),rd.get_double(-1.,1),rd.get_double(-1.,1)));
+
+ K k;
+ std::vector<Point_d> results;
+ Gudhi::subsampling::sparsify_point_set(k, points, 0.5, std::back_inserter(results));
+ std::cout << "Before sparsification: " << points.size() << " points.\n";
+ std::cout << "After sparsification: " << results.size() << " points.\n";
+ //for (auto p : results)
+ // std::cout << p << "\n";
+
+ BOOST_CHECK(points.size() > results.size());
+}
diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt
new file mode 100644
index 00000000..ef772be8
--- /dev/null
+++ b/src/Tangential_complex/benchmark/CMakeLists.txt
@@ -0,0 +1,11 @@
+cmake_minimum_required(VERSION 2.6)
+project(Tangential_complex_benchmark)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable(Tangential_complex_benchmark benchmark_tc.cpp)
+ target_link_libraries(Tangential_complex_benchmark
+ ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Tangential_complex/benchmark/RIB_exporter.h b/src/Tangential_complex/benchmark/RIB_exporter.h
new file mode 100644
index 00000000..73c14041
--- /dev/null
+++ b/src/Tangential_complex/benchmark/RIB_exporter.h
@@ -0,0 +1,269 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef GUDHI_TC_RIB_EXPORTER_H
+#define GUDHI_TC_RIB_EXPORTER_H
+
+#include <gudhi/Tangential_complex/utilities.h>
+
+#include <tuple>
+#include <string>
+
+template <typename PointRandomAccessRange, typename SimplexRange>
+class RIB_exporter {
+ typedef typename PointRandomAccessRange::value_type Point;
+ typedef typename SimplexRange::value_type Simplex;
+ public:
+
+ typedef std::tuple<double, double, double, double> Color; // RGBA
+ typedef std::tuple<int, int, int> Coords_choice;
+
+ // Constructor
+ RIB_exporter(
+ PointRandomAccessRange const& points,
+ SimplexRange const& simplices,
+ std::ofstream &out,
+ std::string const& rendered_image_filename = "export.tif",
+ bool is_preview = false, // low-quality
+ Coords_choice coords_choice = std::make_tuple(0, 1, 2),
+ int image_width = 1920,
+ int image_height = 1080,
+ Color const& triangle_color = std::make_tuple(1., 1., 1., 1.),
+ bool ambient_light = true,
+ double ambient_intensity = 0.3,
+ bool shadow = true,
+ double shadow_intensity = 0.85,
+ double point_sphere_radius = 0.003)
+ : m_points(points),
+ m_simplices(simplices),
+ m_out(out),
+ m_rendered_image_filename(rendered_image_filename),
+ m_is_preview(is_preview),
+ m_coords_choice(coords_choice),
+ m_image_width(image_width),
+ m_image_height(image_height),
+ m_current_color(0., 0., 0., 0.),
+ m_current_alpha(1),
+ m_triangle_color(triangle_color),
+ m_ambient_light(ambient_light),
+ m_ambient_intensity(ambient_intensity),
+ m_shadow(shadow),
+ m_shadow_intensity(shadow_intensity),
+ m_point_sphere_radius(point_sphere_radius) {
+ m_out.precision(8);
+ }
+
+ void write_file() {
+ write_header();
+ write_lights();
+ /*if (m_point_sphere_radius != 0.)
+ write_point_spheres();*/
+ write_triangles();
+
+ m_out << "WorldEnd\n";
+ }
+
+ private:
+
+ void write_header() {
+ m_out << "Option \"searchpath\" \"shader\" "
+ "\".:./shaders:%PIXIE_SHADERS%:%PIXIEHOME%/shaders\"\n";
+
+ if (m_is_preview) {
+ m_out << "Attribute \"visibility\" \"specular\" 1\n"
+ << "Attribute \"visibility\" \"transmission\" 1\n\n";
+ }
+
+ m_out << "Display \"" << m_rendered_image_filename << "\" \"file\" \"rgb\"\n";
+
+ if (!m_is_preview) {
+ m_out << "Format " << m_image_width << " " << m_image_height << " 1\n";
+ } else {
+ double ratio = double(m_image_height) / double(m_image_width);
+
+ int width = (ratio < 1.) ? 300 : int(300. / ratio);
+ int height = (ratio < 1.) ? int(ratio * 300.) : 300;
+
+ m_out << "Format " << width << " " << height << " 1\n";
+ }
+
+
+ if (m_image_width > m_image_height) {
+ double ratio = double(m_image_height) / double(m_image_width);
+ m_out << "ScreenWindow -1 1 " << -ratio << " " << ratio << "\n";
+ } else if (m_image_height > m_image_width) {
+ double ratio = double(m_image_width) / double(m_image_height);
+ m_out << "ScreenWindow " << -ratio << " " << ratio << " -1 1\n";
+ }
+
+ m_out << "Projection \"perspective\" \"fov\" 45\n"
+ << "Translate 0 0 3\n"
+ << "PixelSamples 4 4\n"
+ << "PixelFilter \"catmull-rom\" 3 3\n"
+ << "ShadingInterpolation \"smooth\"\n"
+ << "Rotate -10 20 0 1\n"
+ << "WorldBegin\n";
+ }
+
+ void write_lights() {
+ if (!m_is_preview) {
+ // ShadowLight
+ m_out << "LightSource \"shadowdistant\" 1 \"from\" [0 0 0] \"to\" [0 0 1]"
+ << " \"shadowname\" \"raytrace\" \"intensity\" "
+ << m_shadow_intensity << "\n";
+
+ // Ambient light
+ m_out << "LightSource \"ambientlight\" 2 \"intensity\" "
+ << m_ambient_intensity << "\n";
+ } else {
+ m_out << "LightSource \"distantLight\" 1 \"from\" [0 0 0] \"to\" [0 0 1]"
+ << " \"intensity\" " << m_shadow_intensity << "\n";
+
+ // Ambient light
+ m_out << "LightSource \"ambientlight\" 2 \"intensity\" "
+ << m_ambient_intensity << "\n";
+ }
+
+ // Background light
+ m_out << "LightSource \"ambientlight\" 99 \"intensity\" 1\n";
+
+ // Turn background light OFF
+ turn_background_light(false);
+ }
+
+ void turn_background_light(bool turn_on) {
+ if (!turn_on) {
+ m_out << "Illuminate 1 1" << std::endl;
+ if (!m_is_preview)
+ m_out << "Illuminate 2 1" << std::endl;
+ m_out << "Illuminate 99 0" << std::endl;
+ } else {
+ m_out << "Illuminate 1 0" << std::endl;
+ if (!m_is_preview)
+ m_out << "Illuminate 2 0" << std::endl;
+ m_out << "Illuminate 99 1" << std::endl;
+ }
+ }
+
+ void write_color(Color const& color, bool use_transparency) {
+ if (m_current_color == color)
+ return;
+
+ m_current_color = color;
+
+ // Write opacity data
+ if (use_transparency)
+ write_opacity(std::get<3>(color));
+
+ // Write color data
+ m_out << "Color [ " << std::get<0>(color) << " " << std::get<1>(color)
+ << " " << std::get<2>(color) << " ]\n";
+ }
+
+ void write_opacity(const double alpha) {
+ if (m_current_alpha == alpha)
+ return;
+
+ m_current_alpha = alpha;
+
+ // Write opacity data
+ m_out << "Opacity " << alpha << " " << alpha << " " << alpha << std::endl;
+ }
+
+ void write_point(Point const& p) {
+ m_out << " " << p[std::get<0>(m_coords_choice)]
+ << " " << p[std::get<1>(m_coords_choice)]
+ << " " << p[std::get<2>(m_coords_choice)] << " ";
+ }
+
+ void write_triangles() {
+ m_out << "Surface \"plastic\" \"Ka\" 0.65 \"Kd\" 0.85 \"Ks\" 0.25 \"roughness\" 0.1" << std::endl;
+
+ for (auto const& simplex : m_simplices) {
+ std::vector<Simplex> triangles;
+ // Get the triangles composing the simplex
+ combinations(simplex, 3, std::back_inserter(triangles));
+ for (auto const& t : triangles)
+ write_triangle(t);
+ }
+ }
+
+ template <typename PointIndexRange>
+ void write_triangle(PointIndexRange const& t) {
+ // Color
+ write_color(m_triangle_color, true);
+
+ // Triangle
+ m_out << "Polygon \"P\" [";
+ for (auto idx : t)
+ write_point(m_points[idx]);
+ m_out << "]" << std::endl;
+
+ // Edges (will be drawn later on)
+ /*add_edge(p, q, edge_color);
+ add_edge(p, r, edge_color);
+ add_edge(q, r, edge_color);
+
+ // Vertices (will be drawn later on)
+ add_vertex(p, edge_color);
+ add_vertex(q, edge_color);
+ add_vertex(r, edge_color);*/
+ }
+
+ void write_point_sphere(Point const& p) {
+ if (m_point_sphere_radius == 0.)
+ return;
+
+ m_out << "Translate " << p[0] << " " << p[1] << " " << p[2] << std::endl;
+ // Sphere radius zmin zmax thetamax
+ m_out << "Sphere " << m_point_sphere_radius << " " << -m_point_sphere_radius
+ << " " << m_point_sphere_radius << " 360" << std::endl;
+ m_out << "Identity" << std::endl;
+ }
+
+ void write_point_spheres() {
+ write_color(std::make_tuple(0.7, 0.7, 0.7, 0.5), true);
+ for (auto const& p : m_points)
+ write_point_sphere(p);
+ }
+
+ //===========================================================================
+
+ PointRandomAccessRange const& m_points;
+ SimplexRange const& m_simplices;
+ std::ofstream &m_out;
+ std::string m_rendered_image_filename;
+ bool m_is_preview;
+ Coords_choice m_coords_choice;
+ int m_image_width;
+ int m_image_height;
+ Color m_current_color;
+ Color m_triangle_color;
+ double m_current_alpha;
+ bool m_ambient_light;
+ double m_ambient_intensity;
+ bool m_shadow;
+ double m_shadow_intensity;
+ double m_point_sphere_radius;
+};
+
+#endif // GUDHI_TC_RIB_EXPORTER_H
diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h
new file mode 100644
index 00000000..ed44f90a
--- /dev/null
+++ b/src/Tangential_complex/benchmark/XML_exporter.h
@@ -0,0 +1,207 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <string>
+#include <vector>
+#include <iostream>
+#include <fstream>
+#include <ctime>
+
+template<typename value_type = std::string>
+class Simple_XML_exporter {
+ public:
+ typedef value_type Value_type;
+ typedef std::vector<value_type> Element;
+ typedef std::map<std::string, value_type> Element_with_map;
+ typedef std::vector<Element> List_of_elements;
+
+ Simple_XML_exporter(
+ const std::string &list_name,
+ const std::string &element_name,
+ const std::vector<std::string> &subelement_names,
+ bool add_timestamp = true)
+ : m_list_name(list_name),
+ m_element_name(element_name),
+ m_subelement_names(subelement_names),
+ m_add_timestamp(add_timestamp) { }
+
+ bool add_element(const Element &element) {
+ if (element.size() == m_subelement_names.size()) {
+ m_list_of_elements.push_back(element);
+ return true;
+ } else {
+ std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl;
+ return false;
+ }
+ }
+
+ bool add_element(Element_with_map &element) {
+ Element elt;
+
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+ for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) {
+ elt.push_back(element[*it_subelement_name]);
+ }
+
+ return add_element(elt);
+ }
+
+ bool export_to_xml(const std::string &filename) const {
+ std::ofstream xmlfile;
+ xmlfile.open(filename.c_str());
+ xmlfile << "<?xml version='1.0'?>" << std::endl;
+ xmlfile << "<" << m_list_name << ">" << std::endl;
+
+ typename List_of_elements::const_iterator it_element = m_list_of_elements.begin();
+ typename List_of_elements::const_iterator it_element_end = m_list_of_elements.end();
+ for (int id = 1; it_element != it_element_end; ++it_element, ++id) {
+ xmlfile << " <" << m_element_name << ">" << std::endl;
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+
+ if (m_add_timestamp)
+ xmlfile << " <id> " << time(NULL) << " </id>" << std::endl;
+
+ for (int i = 0;
+ it_subelement_name != it_subelement_name_end;
+ ++it_subelement_name, ++i) {
+ xmlfile
+ << " <" << *it_subelement_name << "> "
+ << (*it_element)[i]
+ << " </" << *it_subelement_name << ">" << std::endl;
+ }
+ xmlfile << " </" << m_element_name << ">" << std::endl;
+ }
+
+ xmlfile << "</" << m_list_name << ">" << std::endl;
+ xmlfile.close();
+ return 0;
+
+ }
+
+ protected:
+ std::string m_list_name;
+ std::string m_element_name;
+ std::vector<std::string> m_subelement_names;
+ List_of_elements m_list_of_elements;
+ bool m_add_timestamp;
+};
+
+template<typename value_type = std::string>
+class Streaming_XML_exporter {
+ public:
+ typedef value_type Value_type;
+ typedef std::vector<value_type> Element;
+ typedef std::map<std::string, value_type> Element_with_map;
+ typedef std::vector<Element> List_of_elements;
+
+ Streaming_XML_exporter(
+ const std::string &filename,
+ const std::string &list_name,
+ const std::string &element_name,
+ const std::vector<std::string> &subelement_names,
+ bool add_timestamp = true)
+ : m_list_name(list_name),
+ m_element_name(element_name),
+ m_subelement_names(subelement_names),
+ m_add_timestamp(add_timestamp) {
+ m_xml_fstream.open(filename.c_str());
+ if (m_xml_fstream.good()) {
+ m_xml_fstream << "<?xml version='1.0'?>" << std::endl;
+ m_xml_fstream << "<" << m_list_name << ">" << std::endl;
+ } else {
+ std::cerr << "Could not open file '" << filename << "'." << std::endl;
+ }
+ }
+
+ virtual ~Streaming_XML_exporter() {
+ close_file();
+ }
+
+ void close_file() {
+ m_xml_fstream.close();
+ }
+
+ bool add_element(const Element &element) {
+ if (element.size() == m_subelement_names.size()) {
+ m_xml_fstream << " <" << m_element_name << ">" << std::endl;
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+
+ if (m_add_timestamp) {
+ m_xml_fstream << " <id> " << time(NULL) << " </id>" << std::endl;
+ }
+
+ for (int i = 0;
+ it_subelement_name != it_subelement_name_end;
+ ++it_subelement_name, ++i) {
+ m_xml_fstream
+ << " <" << *it_subelement_name << "> "
+ << element[i]
+ << " </" << *it_subelement_name << ">" << std::endl;
+ }
+ m_xml_fstream << " </" << m_element_name << ">" << std::endl;
+
+ // Save current pointer position
+ std::ofstream::streampos pos = m_xml_fstream.tellp();
+ // Close the XML file (temporarily) so that the XML file is always correct
+ m_xml_fstream << "</" << m_list_name << ">" << std::endl;
+ // Restore the pointer position so that the next "add_element" will overwrite
+ // the end of the file
+ m_xml_fstream.seekp(pos);
+
+ m_xml_fstream.flush();
+ return true;
+ } else {
+ std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl;
+ return false;
+ }
+ }
+
+ bool add_element(Element_with_map &element) {
+ Element elt;
+
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+ for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) {
+ elt.push_back(element[*it_subelement_name]);
+ }
+
+ return add_element(elt);
+ }
+
+ protected:
+ std::ofstream m_xml_fstream;
+ std::string m_list_name;
+ std::string m_element_name;
+ std::vector<std::string> m_subelement_names;
+ bool m_add_timestamp;
+};
diff --git a/src/Tangential_complex/benchmark/benchmark_script.txt b/src/Tangential_complex/benchmark/benchmark_script.txt
new file mode 100644
index 00000000..f4ddaac3
--- /dev/null
+++ b/src/Tangential_complex/benchmark/benchmark_script.txt
@@ -0,0 +1,221 @@
+#---------------------------------------------------------------------------------------------------------------------------------------------------------
+# Input PARAM1 PARAM2 PARAM3 NUM_P AMB INTR SPARSITY MAX_PERTURB PERTURB ADD_HDIM COLLAPSE FIX_TIME_LIMIT NUM_ITERATIONS
+#---------------------------------------------------------------------------------------------------------------------------------------------------------
+
+#---------------------------------------------------------------- Alpha TC tests ------------------------------------------------------------------------
+#generate_sphere_d 1 0 - 8 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 6 2d with a perturb sometimes
+#generate_sphere_d 1 0 - 50 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 49 1d
+#generate_sphere_d 1 1 - 50 2 1 0.01 0.005 N Y N 3 1 #Noise => OK: 45 2d + 3 3d
+#generate_torus_d N - - 15 2 1 0.01 0.05 N Y N 10 1
+#generate_sphere_d 0.302 0 - 8 3 2 0.01 0.005 N Y N 60 1 #No noise => OK: 7 3d with a perturb sometimes
+#generate_sphere_d 0.302 0 - 50 3 2 0.01 0.005 N Y N 60 1 #No noise => no inconsitencies
+#generate_sphere_d 0.302 3 - 50 3 2 0.01 0.005 N Y N 60 1 #Noise => OK: 90 2d + 3 3d
+#generate_sphere_d 1 1 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 1% => OK: 3113 3d + 35 4d
+#generate_sphere_d 1 2 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 2969 3d + 91 4d
+#generate_sphere_d 1 2 - 5000 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 27905 3d + 2485 4d
+#generate_sphere_d 0.302 2 - 300 2 1 0.01 0.005 N Y N 60 1
+#generate_torus_3D 2 1 N 200 3 2 0.01 0.05 N Y N 600 1 #OK: 1048 3d ~170s
+#generate_torus_3D 2 1 N 2000 3 2 0.01 0.05 N Y N 600 1 #OK: 3545 2d + 27 3d ~35s
+#generate_torus_d N 1 - 50 4 2 0.01 0.05 N Y N 3 1 #OK: 431 4d
+#generate_torus_d N 1 - 500 4 2 0.01 0.05 N Y N 3 1 #OK: 881 2d + 37 3d
+#generate_torus_d Y 1 - 250 4 2 0.01 0.05 N Y N 3 1 #OK: 80 d2 + 185 d3
+#generate_torus_d N - - 50 6 3 0.01 0.05 Y Y N 10 1 #
+#generate_torus_d Y - - 700 6 3 0.01 0.05 Y Y N 100 1 #Grid
+#generate_torus_d N - - 10000 6 3 0.01 0.05 Y Y N 30000 1
+#generate_moment_curve 0 1 - 10 3 1 0.01 0.005 N Y N 60 1
+#generate_two_spheres_d 3 4 - 500 3 2 0.01 0.05 N Y N 10 1 #OK: 320 2d + 1167 3d
+#generate_klein_bottle_4D 40 15 - 500 4 2 0.01 0.2 N Y N 60 1 #OK: 901 d2 + 50 d3 + 1 d4
+#data/SO3_10000.xyz - - - 0 9 3 0.01 0.05 Y Y N 300 1 #Too long. Be careful with the memory consumption!
+#data/buddha_100kv.xyz - - - 0 3 2 0.01 0.005 Y Y N 120 1 #Too long...
+#data/fandisk.xyz - - - 0 3 2 0.01 0.005 Y Y N 5 1 #NOT OK: Tq & V do not intersect
+
+#---------------------------------------------------------- Spatial search benchmarking --------------------------------------------------------------
+#generate_torus_3D 2 1 Y 10000 3 2 0 0 Y N N 600 1
+#data/buddha_100kv.xyz - - - 0 3 2 0 0 N Y N 120 1
+#generate_torus_d N - - 10000 30 15 0 0 Y N N 3600 1
+#generate_torus_d N - - 100000 12 6 0 0 Y N N 3600 1
+#data/SO3_50000.xyz - - - 0 9 3 0 0 Y N N 60 1
+#data/Cy8.xyz - - - 0 24 2 0 0 N Y N 60 1
+#generate_sphere_d 0.5 - - 10000 2 1 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 3 2 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 4 3 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 5 4 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 6 5 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 7 6 0 0 N N Y 60 1
+
+#---------------------------------------------------------- Very small cases for Debug mode --------------------------------------------------------------
+#generate_sphere_d 4 - - 20 3 2 0.05 0.025 Y N N 60 1
+generate_sphere_d 3 10 - 70 3 2 0.05 0.025 Y N N 60 1
+#generate_sphere_d 3 - - 1000 3 2 0.05 0.025 Y N N 60 1
+#generate_sphere_d 3 - - 10 4 3 0.05 0.025 Y N N 60 1
+#generate_sphere_d 3 - - 70 5 4 0.05 0.025 Y N N 60 1
+#generate_klein_bottle_4D 4 3 - 70 4 2 0.05 0.025 Y N N 3 1
+#generate_klein_bottle_variant_5D 4 3 - 70 5 2 0.05 0.025 Y N N 3 1
+#data/SO3_10000.xyz - - - 0 9 3 0.7 0.35 Y N N 60 1
+#generate_moment_curve 0 1 - 30 3 1 0.005 0.0025 Y N N 60 1
+
+#------------------------------------------------------------------ From files --------------------------------------------------------------------------
+#data/SO3_50000.xyz - - - 0 9 3 0.05 0.05 Y N N 6000 1
+#data/SO3_10000.xyz - - - 0 9 3 0.1 0.1 Y N N 60000 1
+#data/cube3D_eps_0.1.xyz - - - 0 3 2 0.05 0.05 Y N N 3000 1
+#data/cube4D_eps_0.1.xyz - - - 0 4 3 0.05 0.05 N Y N 3000 1
+#data/cube5D_eps_0.1.xyz - - - 0 5 4 0.05 0.05 N Y N 3000 1
+#data/Cy8.xyz - - - 0 24 2 0.1 0.1 N Y N 60 1
+#data/Kl.xyz - - - 0 5 2 0.05 0.05 N Y N 60 1
+#data/S3.xyz - - - 0 4 3 0.05 0.05 N Y N 60 1
+
+#data/Alvarez_variete_k2_D4_29700p.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1 # points on a "grid"
+#data/Alvarez_variete_k2_D4_10k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 200 1
+#data/Alvarez_variete_k2_D4_30k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 6000 1
+#data/Alvarez_variete_k2_D4_120k_2x2_denser_in_1x1.xyz - - - 0 4 2 0.002 0.002 Y N N 60000 1
+#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.005 0.005 Y N N 100000 1
+#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.05 0.05 Y N N 50000 1 # heavy sparsification (e.g. 0.05 => 33k points)
+#data/Alvarez_variete_k2_D4_90k_2x2.xyz - - - 0 4 2 0.003 0.003 Y N N 6000 1
+#data/Alvarez_variete_k2_D4_30k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_variete_k2_D4_60k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 1800 1
+
+#data/Alvarez_variete_k2_D8_9003p.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1
+#data/Alvarez_variete_k2_D8_90K.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1
+#data/Alvarez_variete_k2_D8_300k_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification
+#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification
+#data/Alvarez_variete_k2_D8_900k_10x10.xyz - - - 0 8 2 0.02 0.02 Y N N 60 1 # heavy sparsification
+
+#data/Alvarez_courbeElliptique_k2_D8_200K_2x2.xyz - - - 0 8 2 0.006 0.006 Y N N 60 1
+
+#data/Alvarez_surface_deg2_k2_D8_6000K_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.003 Y N N 3600 1
+#data/Alvarez_surface_deg4_k2_D8_382K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg5_k2_D8_112K.xyz - - - 0 8 2 0.001 0.001 Y N N 240 1
+#data/Alvarez_surface_deg6_k2_D8_67K.xyz - - - 0 8 2 0.015 0.015 Y N N 60 1
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.025 Y N N 60 1
+#data/Alvarez_surface_deg9_k2_D8_42K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1
+#data/sparsified/Alvarez_deg8_k2_D8_32K_sparsified_from_41K_0.01.xyz - - - 0 8 2 0.05 0.05 Y N N 600 1
+
+# PAS VRAIMENT DE DIFFERENCE
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.007 Y N N 3600 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.014 0.007 Y N N 3600 1
+
+# PAS VRAIMENT DE DIFFERENCE
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.005 Y N N 120 1
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.005 Y N N 120 1
+
+# PAS VRAIMENT DE DIFFERENCE
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.001 0.01 Y N N 3600 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.02 0.01 Y N N 3600 1
+#data/sparsified/Alvarez_deg3_k2_D8_534k_sparsified_from_902K_0.001.xyz - - - 0 8 2 0.01 0.01 Y N N 3600 1
+
+# PAS TRES CLAIR, MAIS DIFFERENCE EN NOMBRE D'ETAPES (>100 vs 15-20) :
+#data/sparsified/Alvarez_deg8_k2_D8_38K_sparsified_from_41K_0.005.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.001 0.02 Y N N 60 1
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.02 Y N N 60 1
+
+# With pre-computed tangent spaces
+#data/test.pwt - - - 0 4 2 0.01 0.01 N N N 500000 1
+#data/Alvarez_variete_k2_D4_30000p.xyz - - - 0 4 2 0.01 0.01 Y N N 500000 1
+#data/Alvarez_variete_k2_D4_30000p_with_TSB.pwt - - - 0 4 2 0.01 0.01 Y N N 500000 1
+
+#---------------------------------------------------------------------- 3D meshes -----------------------------------------------------------------------
+#data/buddha_100kv.xyz - - - 0 3 2 0.005 0.005 Y N N 3 1
+#data/fandisk.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/fertility.xyz - - - 0 3 2 0.4 0.4 Y N N 3 1
+#data/bunny.xyz - - - 0 3 2 0.0006 0.0003 Y N N 3000 1
+#data/blob.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/3holes.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/785_hand_2500v.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/785_hand_50kv.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/bumpy_sphere.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#D:\INRIA\Data\_Models\Pointclouds\ajax_jotero.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#D:\INRIA\Data\_Models\Pointclouds\house.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#D:\INRIA\Data\_Models\Pointclouds\lucy_14M.xyz - - - 0 3 2 0.6 0.3 Y N N 3 1
+
+#----------------------------------------------------------- Generated point sets -----------------------------------------------------------------------
+#generate_sphere_d 3 - - 4 3 2 0.05 0.05 Y N N 3000 1
+#generate_sphere_d 3 - - 30000 2 1 0.005 0.005 Y N N 3000 1
+#generate_sphere_d 1 - - 500000 3 2 0.005 0.005 Y N N 3000 1
+#generate_sphere_d 3 - - 30000 4 3 0.05 0.05 Y N N 3000 1
+#generate_sphere_d 3 0 - 300 3 2 0.005 0.005 Y N N 60 1
+#generate_sphere_d 3 4 - 3000 3 2 0.005 0.005 Y N N 60 1
+#generate_sphere_d 3 7 - 3000 3 2 0.005 0.005 Y N N 60 1
+#generate_torus_3D 2 1 N 300 3 2 0.05 0.05 Y N N 600 1
+#generate_torus_d N - - 200 4 2 0.05 0.05 Y N N 600 1
+
+#generate_torus_d Y - - 100 6 3 0.1 0.19 Y N N 600 1
+#generate_torus_d Y - - 1000 6 3 0. 0.19 Y N N 600 1
+#generate_torus_d Y - - 10000 6 3 0. 0.19 Y N N 600 1
+#generate_torus_d Y - - 100000 6 3 0. 0.19 Y N N 600 1
+#generate_plane - - - 30000 3 2 0.005 0.005 Y N N 3000 1
+#generate_moment_curve 0 1 - 30000 6 1 0.005 0.005 Y N N 60 1
+#generate_klein_bottle_4D 4 3 - 700 4 2 0.05 0.05 Y N N 500 20
+#generate_klein_bottle_variant_5D 4 3 - 30000 5 2 0.05 0.05 Y N N 600 1
+#generate_klein_bottle_4D 8 5 - 5000 4 2 0.2 0.2 Y N N 60 1 #Takes forever
+#data/sparsified/Flat_torus_195p_sparsified_0.05_from_200p.xyz N - - 0 4 2 -1 0.2 Y N N 600 1
+
+#----------------------------------------------------------- Performance testing ------------------------------------------------------------------------
+# TC: 5.55 / 1st fix step : 0.2
+#data/fertility.xyz - - - 0 3 2 0.1 0.1 Y N N 10 1
+
+#---------------------------------------------------------- 04/04/2016 - for stats ----------------------------------------------------------
+
+#generate_torus_3D 2 1 N 5000 3 2 0.05 0.05 Y N N 120 1
+#generate_torus_d N - - 500 4 2 0.05 0.05 Y N N 120 1
+#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.005 0.005 Y N N 120 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.01 0.01 Y N N 120 1
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 10
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1
+#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1
+#generate_torus_d N - - 200000 6 3 0.05 0.05 Y N N 1200 1
+
+#---------------------------------------------------------- 14/04/2016 - stats about noise ----------------------------------------------------------
+
+#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 1 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 2 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 3 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 4 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 5 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 6 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 7 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 8 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 9 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 10 - 1000 4 2 0.05 0.19 Y N N 120 4
+
+#generate_sphere_d 3 0 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 2 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 3 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 4 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 5 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 6 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 7 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 8 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 9 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 10 - 1000 4 3 0.05 0.05 Y N N 120 4
+
+#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.01 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.02 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.03 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.04 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.05 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.06 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.07 5000 4 2 0.05 0.05 Y N N 120 4
+
+#---------------------------------------------------------- 04/2016 - stats with different perturb techniques ----------------------------------------------------------
+
+# Tangential translation
+#data/SO3_50000.xyz - - - 0 9 3 0 0.05 Y N N 500 10
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10
+#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 10
+#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 10
+#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 10
+
+# Weight
+#data/SO3_50000.xyz - - - 0 9 3 0.1 0.05 Y N N 500 10
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10
+#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.025 Y N N 20000 10
+#generate_torus_d Y 0 - 1000 4 2 0.05 0.025 Y N N 120 10
+#generate_sphere_d 3 1 - 1000 4 3 0.05 0.025 Y N N 12000 10 \ No newline at end of file
diff --git a/src/Tangential_complex/benchmark/benchmark_tc.cpp b/src/Tangential_complex/benchmark/benchmark_tc.cpp
new file mode 100644
index 00000000..6d6dd548
--- /dev/null
+++ b/src/Tangential_complex/benchmark/benchmark_tc.cpp
@@ -0,0 +1,785 @@
+/******************************************************************************
+This benchmark allows to compute the Tangential Complex from input files or
+generated point sets.
+
+It reads the benchmark_script.txt file (located in the same folder as this
+file) and compute one or several complexes for each line. Unless TC_NO_EXPORT
+is defined, each complex is exported as an OFF file and/or as a RIB file
+(RenderMan). In addition an XML file is created at each run of the benchmark.
+It contains statistics about the complexes that were created. This XML file
+can be processed in Excel, for example.
+ ******************************************************************************/
+
+// Without TBB_USE_THREADING_TOOL Intel Inspector XE will report false positives in Intel TBB
+// (http://software.intel.com/en-us/articles/compiler-settings-for-threading-error-analysis-in-intel-inspector-xe/)
+#ifdef _DEBUG
+#define TBB_USE_THREADING_TOOL
+#endif
+
+#include <cstddef>
+
+//#define GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+//#define TC_INPUT_STRIDES 3 // only take one point every TC_INPUT_STRIDES points
+#define TC_NO_EXPORT // do not output OFF files
+//#define TC_EXPORT_TO_RIB //
+//#define GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET
+//#define GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF
+
+const std::size_t ONLY_LOAD_THE_FIRST_N_POINTS = 20000000;
+
+#include <gudhi/Debug_utils.h>
+#include <gudhi/Clock.h>
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+#include <gudhi/random_point_generators.h>
+#include <gudhi/Tangential_complex/utilities.h>
+
+#include <CGAL/assertions_behaviour.h>
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <boost/algorithm/string/replace.hpp>
+#include <boost/algorithm/string/trim_all.hpp>
+#include <boost/range/adaptor/strided.hpp>
+
+#include <cstdlib>
+#include <ctime>
+#include <fstream>
+#include <cmath> // for std::sqrt
+
+#ifdef GUDHI_USE_TBB
+#include <tbb/task_scheduler_init.h>
+#endif
+#include "XML_exporter.h"
+#include "RIB_exporter.h"
+#define GUDHI_TC_EXPORT_PERFORMANCE_DATA
+#define GUDHI_TC_SET_PERFORMANCE_DATA(value_name, value) \
+ XML_perf_data::set(value_name, value);
+
+
+namespace subsampl = Gudhi::subsampling;
+namespace tc = Gudhi::tangential_complex;
+
+const char * const BENCHMARK_SCRIPT_FILENAME = "benchmark_script.txt";
+
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+typedef Kernel::FT FT;
+typedef Kernel::Point_d Point;
+typedef Kernel::Vector_d Vector;
+typedef tc::Tangential_complex<
+Kernel, CGAL::Dynamic_dimension_tag,
+CGAL::Parallel_tag> TC;
+typedef TC::Simplex Simplex;
+typedef TC::Simplex_set Simplex_set;
+
+class XML_perf_data {
+ public:
+ typedef Streaming_XML_exporter<std::string> XML_exporter;
+
+ XML_perf_data(const std::string &filename)
+ : m_xml(filename, "ContainerPerformance", "Perf",
+ construct_subelements_names()) { }
+
+ virtual ~XML_perf_data() { }
+
+ static XML_perf_data &get() {
+ static XML_perf_data singleton(build_filename());
+ return singleton;
+ }
+
+ template <typename Value_type>
+ static void set(const std::string &name, Value_type value) {
+ get().set_data(name, value);
+ }
+
+ static void commit() {
+ get().commit_current_element();
+ }
+
+ protected:
+
+ static std::string build_filename() {
+ std::stringstream sstr;
+ sstr << "perf_logs/Performance_log_" << time(0) << ".xml";
+ return sstr.str();
+ }
+
+ static std::vector<std::string> construct_subelements_names() {
+ std::vector<std::string> subelements;
+ subelements.push_back("Input");
+ subelements.push_back("Param1");
+ subelements.push_back("Param2");
+ subelements.push_back("Param3");
+ subelements.push_back("Intrinsic_dim");
+ subelements.push_back("Ambient_dim");
+ subelements.push_back("Num_threads");
+ subelements.push_back("Sparsity");
+ subelements.push_back("Max_perturb");
+ subelements.push_back("Num_points_in_input");
+ subelements.push_back("Num_points");
+ subelements.push_back("Perturb_technique");
+ subelements.push_back("Perturb_which_points");
+ subelements.push_back("Initial_num_inconsistent_local_tr");
+ subelements.push_back("Best_num_inconsistent_local_tr");
+ subelements.push_back("Final_num_inconsistent_local_tr");
+ subelements.push_back("Init_time");
+ subelements.push_back("Comput_time");
+ subelements.push_back("Perturb_successful");
+ subelements.push_back("Perturb_time");
+ subelements.push_back("Perturb_steps");
+ subelements.push_back("Result_pure_pseudomanifold");
+ subelements.push_back("Result_num_wrong_dim_simplices");
+ subelements.push_back("Result_num_wrong_number_of_cofaces");
+ subelements.push_back("Result_num_unconnected_stars");
+ subelements.push_back("Info");
+
+ return subelements;
+ }
+
+ void set_data(const std::string &name, const std::string &value) {
+ m_current_element[name] = value;
+ }
+
+ template <typename Value_type>
+ void set_data(const std::string &name, Value_type value) {
+ std::stringstream sstr;
+ sstr << value;
+ set_data(name, sstr.str());
+ }
+
+ void commit_current_element() {
+ m_xml.add_element(m_current_element);
+ m_current_element.clear();
+ }
+
+ XML_exporter m_xml;
+ XML_exporter::Element_with_map m_current_element;
+};
+
+template<
+typename Kernel, typename OutputIteratorPoints>
+bool load_points_from_file(
+ const std::string &filename,
+ OutputIteratorPoints points,
+ std::size_t only_first_n_points = (std::numeric_limits<std::size_t>::max)()) {
+ typedef typename Kernel::Point_d Point;
+
+ std::ifstream in(filename);
+ if (!in.is_open()) {
+ std::cerr << "Could not open '" << filename << "'" << std::endl;
+ return false;
+ }
+
+ Kernel k;
+ Point p;
+ int num_ppints;
+ in >> num_ppints;
+
+ std::size_t i = 0;
+ while (i < only_first_n_points && in >> p) {
+ *points++ = p;
+ ++i;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << "'" << filename << "' loaded." << std::endl;
+#endif
+
+ return true;
+}
+
+template<
+typename Kernel, typename Tangent_space_basis,
+typename OutputIteratorPoints, typename OutputIteratorTS>
+bool load_points_and_tangent_space_basis_from_file(
+ const std::string &filename,
+ OutputIteratorPoints points,
+ OutputIteratorTS tangent_spaces,
+ int intrinsic_dim,
+ std::size_t only_first_n_points = (std::numeric_limits<std::size_t>::max)()) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::Vector_d Vector;
+
+ std::ifstream in(filename);
+ if (!in.is_open()) {
+ std::cerr << "Could not open '" << filename << "'" << std::endl;
+ return false;
+ }
+
+ Kernel k;
+ Point p;
+ int num_ppints;
+ in >> num_ppints;
+
+ std::size_t i = 0;
+ while (i < only_first_n_points && in >> p) {
+ *points++ = p;
+
+ Tangent_space_basis tsb(i);
+ for (int d = 0; d < intrinsic_dim; ++d) {
+ Vector v;
+ in >> v;
+ tsb.push_back(tc::internal::normalize_vector(v, k));
+ }
+ *tangent_spaces++ = tsb;
+ ++i;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << "'" << filename << "' loaded." << std::endl;
+#endif
+
+ return true;
+}
+
+// color_inconsistencies: only works if p_complex = NULL
+template <typename TC>
+bool export_to_off(
+ TC const& tc,
+ std::string const& input_name_stripped,
+ std::string const& suffix,
+ bool color_inconsistencies = false,
+ typename TC::Simplicial_complex const* p_complex = NULL,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) {
+#ifdef TC_NO_EXPORT
+ return true;
+#endif
+
+ CGAL::Identity<Point> proj_functor;
+
+ if (tc.intrinsic_dimension() <= 3) {
+ std::stringstream output_filename;
+ output_filename << "output/" << input_name_stripped << "_"
+ << tc.intrinsic_dimension() << "_in_R"
+ << tc.ambient_dimension() << "_"
+ << tc.number_of_vertices() << "v"
+ << suffix << ".off";
+ std::ofstream off_stream(output_filename.str().c_str());
+
+ if (p_complex) {
+#ifndef TC_NO_EXPORT
+ tc.export_to_off(
+ *p_complex, off_stream,
+ p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue,
+ proj_functor);
+#endif
+ } else {
+ tc.export_to_off(
+ off_stream, color_inconsistencies,
+ p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue,
+ NULL,
+ proj_functor);
+ }
+ return true;
+ }
+ return false;
+}
+
+void make_tc(std::vector<Point> &points,
+ TC::TS_container const& tangent_spaces, // can be empty
+ int intrinsic_dim,
+ double sparsity = 0.01,
+ double max_perturb = 0.005,
+ bool perturb = true,
+ bool add_high_dim_simpl = false,
+ bool collapse = false,
+ double time_limit_for_perturb = 0.,
+ const char *input_name = "tc") {
+ Kernel k;
+
+ if (sparsity > 0. && !tangent_spaces.empty()) {
+ std::cerr << "Error: cannot sparsify point set with pre-computed normals.\n";
+ return;
+ }
+
+ //===========================================================================
+ // Init
+ //===========================================================================
+ Gudhi::Clock t;
+
+ // Get input_name_stripped
+ std::string input_name_stripped(input_name);
+ size_t slash_index = input_name_stripped.find_last_of('/');
+ if (slash_index == std::string::npos)
+ slash_index = input_name_stripped.find_last_of('\\');
+ if (slash_index == std::string::npos)
+ slash_index = 0;
+ else
+ ++slash_index;
+ input_name_stripped = input_name_stripped.substr(
+ slash_index, input_name_stripped.find_last_of('.') - slash_index);
+
+ GUDHI_TC_SET_PERFORMANCE_DATA("Num_points_in_input", points.size());
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ std::vector<Point> points_not_sparse = points;
+#endif
+
+ //===========================================================================
+ // Sparsify point set if requested
+ //===========================================================================
+ if (sparsity > 0.) {
+ std::size_t num_points_before = points.size();
+ std::vector<Point> sparsified_points;
+ subsampl::sparsify_point_set(k, points, sparsity*sparsity,
+ std::back_inserter(sparsified_points));
+ sparsified_points.swap(points);
+ std::cerr << "Number of points before/after sparsification: "
+ << num_points_before << " / " << points.size() << "\n";
+
+#ifdef GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET
+ std::ofstream ps_stream("output/sparsified_point_set.txt");
+ tc::internal::export_point_set(k, points, ps_stream);
+#endif
+ }
+
+ GUDHI_TC_SET_PERFORMANCE_DATA("Sparsity", sparsity);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Max_perturb", max_perturb);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Num_points", points.size());
+
+ //===========================================================================
+ // Compute Tangential Complex
+ //===========================================================================
+
+ TC tc(
+ points,
+ intrinsic_dim,
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ points_not_sparse.begin(), points_not_sparse.end(),
+#endif
+ k);
+
+ if (!tangent_spaces.empty()) {
+ tc.set_tangent_planes(tangent_spaces);
+ }
+
+ t.end();
+ double init_time = t.num_seconds();
+
+ t.begin();
+ tc.compute_tangential_complex();
+ t.end();
+ double computation_time = t.num_seconds();
+
+ //===========================================================================
+ // Export to OFF
+ //===========================================================================
+
+ // Create complex
+ int max_dim = -1;
+ TC::Simplicial_complex complex;
+ Simplex_set inconsistent_simplices;
+ max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices);
+
+ // TODO(CJ): TEST
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree, true, false);
+ // std::cerr << stree;
+
+ t.begin();
+ bool ret = export_to_off(
+ tc, input_name_stripped, "_INITIAL_TC", true,
+ &complex, &inconsistent_simplices);
+ t.end();
+ double export_before_time = (ret ? t.num_seconds() : -1);
+
+ unsigned int num_perturb_steps = 0;
+ double perturb_time = -1;
+ double export_after_perturb_time = -1.;
+ bool perturb_success = false;
+ if (perturb) {
+ //=========================================================================
+ // Try to fix inconsistencies by perturbing points
+ //=========================================================================
+ t.begin();
+ auto fix_result =
+ tc.fix_inconsistencies_using_perturbation(max_perturb, time_limit_for_perturb);
+ t.end();
+ perturb_time = t.num_seconds();
+
+ perturb_success = fix_result.success;
+ GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr",
+ fix_result.initial_num_inconsistent_stars);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr",
+ fix_result.best_num_inconsistent_stars);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr",
+ fix_result.final_num_inconsistent_stars);
+
+ //=========================================================================
+ // Export to OFF
+ //=========================================================================
+
+ // Re-build the complex
+ Simplex_set inconsistent_simplices;
+ max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices);
+
+ t.begin();
+ bool exported = export_to_off(
+ tc, input_name_stripped, "_AFTER_FIX", true, &complex,
+ &inconsistent_simplices);
+ t.end();
+ export_after_perturb_time = (exported ? t.num_seconds() : -1);
+
+ //std::string fn = "output/inc_stars/";
+ //fn += input_name_stripped;
+ //tc.export_inconsistent_stars_to_OFF_files(fn);
+
+#if !defined(TC_NO_EXPORT) && defined(TC_EXPORT_TO_RIB)
+ std::ofstream rib(std::string("output/") + input_name_stripped + ".rib");
+ RIB_exporter<TC::Points, TC::Simplicial_complex::Simplex_set> rib_exporter(
+ tc.points(),
+ complex.simplex_range(),
+ rib,
+ input_name_stripped + ".tif",
+ false, // is_preview
+ std::make_tuple(2, 4, 6),
+ 1600, 503 // resolution
+ );
+ rib_exporter.write_file();
+
+ std::ofstream rib_LQ(std::string("output/") + input_name_stripped + "_LQ.rib");
+ RIB_exporter<TC::Points, TC::Simplicial_complex::Simplex_set> rib_exporter_LQ(
+ tc.points(),
+ complex.simplex_range(),
+ rib_LQ,
+ input_name_stripped + "_LQ.tif",
+ true, // is_preview
+ std::make_tuple(0, 4, 5)
+ );
+ rib_exporter_LQ.write_file();
+#endif
+ } else {
+ GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr", "N/A");
+ GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr", "N/A");
+ GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr", "N/A");
+ }
+
+ max_dim = tc.create_complex(complex, true, false, 2);
+
+ complex.display_stats();
+
+ if (intrinsic_dim == 2)
+ complex.euler_characteristic(true);
+
+ //===========================================================================
+ // Collapse
+ //===========================================================================
+ if (collapse) {
+ complex.collapse(max_dim);
+ complex.display_stats();
+ }
+
+ //===========================================================================
+ // Is the result a pure pseudomanifold?
+ //===========================================================================
+ std::size_t num_wrong_dim_simplices,
+ num_wrong_number_of_cofaces,
+ num_unconnected_stars;
+ Simplex_set wrong_dim_simplices;
+ Simplex_set wrong_number_of_cofaces_simplices;
+ Simplex_set unconnected_stars_simplices;
+ bool is_pure_pseudomanifold = complex.is_pure_pseudomanifold(
+ intrinsic_dim, tc.number_of_vertices(),
+ false, // do NOT allow borders
+ false, 1,
+ &num_wrong_dim_simplices, &num_wrong_number_of_cofaces,
+ &num_unconnected_stars,
+ &wrong_dim_simplices, &wrong_number_of_cofaces_simplices,
+ &unconnected_stars_simplices);
+
+ //===========================================================================
+ // Export to OFF
+ //===========================================================================
+
+ double export_after_collapse_time = -1.;
+ if (collapse) {
+ t.begin();
+ bool exported = export_to_off(
+ tc, input_name_stripped, "_AFTER_COLLAPSE", false, &complex,
+ &wrong_dim_simplices, &wrong_number_of_cofaces_simplices,
+ &unconnected_stars_simplices);
+ t.end();
+ std::cerr
+ << " OFF colors:\n"
+ << " * Red: wrong dim simplices\n"
+ << " * Green: wrong number of cofaces simplices\n"
+ << " * Blue: not-connected stars\n";
+ export_after_collapse_time = (exported ? t.num_seconds() : -1.);
+ }
+
+ //===========================================================================
+ // Display info
+ //===========================================================================
+
+ std::cerr
+ << "\n================================================\n"
+ << "Number of vertices: " << tc.number_of_vertices() << "\n"
+ << "Computation times (seconds): \n"
+ << " * Tangential complex: " << init_time + computation_time << "\n"
+ << " - Init + kd-tree = " << init_time << "\n"
+ << " - TC computation = " << computation_time << "\n"
+ << " * Export to OFF (before perturb): " << export_before_time << "\n"
+ << " * Fix inconsistencies 1: " << perturb_time
+ << " (" << num_perturb_steps << " steps) ==> "
+ << (perturb_success ? "FIXED" : "NOT fixed") << "\n"
+ << " * Export to OFF (after perturb): " << export_after_perturb_time << "\n"
+ << " * Export to OFF (after collapse): "
+ << export_after_collapse_time << "\n"
+ << "================================================\n";
+
+ //===========================================================================
+ // Export info
+ //===========================================================================
+ GUDHI_TC_SET_PERFORMANCE_DATA("Init_time", init_time);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Comput_time", computation_time);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_successful",
+ (perturb_success ? 1 : 0));
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_time", perturb_time);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_steps", num_perturb_steps);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_pure_pseudomanifold",
+ (is_pure_pseudomanifold ? 1 : 0));
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_dim_simplices",
+ num_wrong_dim_simplices);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_number_of_cofaces",
+ num_wrong_number_of_cofaces);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_unconnected_stars",
+ num_unconnected_stars);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Info", "");
+}
+
+int main() {
+ CGAL::set_error_behaviour(CGAL::ABORT);
+
+#ifdef GUDHI_USE_TBB
+#ifdef _DEBUG
+ int num_threads = 1;
+#else
+ int num_threads = tbb::task_scheduler_init::default_num_threads() - 4;
+#endif
+#endif
+
+ unsigned int seed = static_cast<unsigned int> (time(NULL));
+ CGAL::default_random = CGAL::Random(seed); // TODO(CJ): use set_default_random
+ std::cerr << "Random seed = " << seed << "\n";
+
+ std::ifstream script_file;
+ script_file.open(BENCHMARK_SCRIPT_FILENAME);
+ // Script?
+ // Script file format: each line gives
+ // - Filename (point set) or "generate_XXX" (point set generation)
+ // - Ambient dim
+ // - Intrinsic dim
+ // - Number of iterations with these parameters
+ if (script_file.is_open()) {
+ int i = 1;
+#ifdef GUDHI_USE_TBB
+#ifdef BENCHMARK_WITH_1_TO_MAX_THREADS
+ for (num_threads = 1;
+ num_threads <= tbb::task_scheduler_init::default_num_threads();
+ ++num_threads)
+#endif
+#endif
+ /*for (Concurrent_mesher_config::get().num_work_items_per_batch = 5 ;
+ Concurrent_mesher_config::get().num_work_items_per_batch < 100 ;
+ Concurrent_mesher_config::get().num_work_items_per_batch += 5)*/ {
+#ifdef GUDHI_USE_TBB
+ tbb::task_scheduler_init init(
+ num_threads > 0 ? num_threads : tbb::task_scheduler_init::automatic);
+#endif
+
+ std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' found.\n";
+ script_file.seekg(0);
+ while (script_file.good()) {
+ std::string line;
+ std::getline(script_file, line);
+ if (line.size() > 1 && line[0] != '#') {
+ boost::replace_all(line, "\t", " ");
+ boost::trim_all(line);
+ std::cerr << "\n\n";
+ std::cerr << "*****************************************\n";
+ std::cerr << "******* " << line << "\n";
+ std::cerr << "*****************************************\n";
+ std::stringstream sstr(line);
+
+ std::string input;
+ std::string param1;
+ std::string param2;
+ std::string param3;
+ std::size_t num_points;
+ int ambient_dim;
+ int intrinsic_dim;
+ double sparsity;
+ double max_perturb;
+ char perturb, add_high_dim_simpl, collapse;
+ double time_limit_for_perturb;
+ int num_iteration;
+ sstr >> input;
+ sstr >> param1;
+ sstr >> param2;
+ sstr >> param3;
+ sstr >> num_points;
+ sstr >> ambient_dim;
+ sstr >> intrinsic_dim;
+ sstr >> sparsity;
+ sstr >> max_perturb;
+ sstr >> perturb;
+ sstr >> add_high_dim_simpl;
+ sstr >> collapse;
+ sstr >> time_limit_for_perturb;
+ sstr >> num_iteration;
+
+ for (int j = 0; j < num_iteration; ++j) {
+ std::string input_stripped = input;
+ size_t slash_index = input_stripped.find_last_of('/');
+ if (slash_index == std::string::npos)
+ slash_index = input_stripped.find_last_of('\\');
+ if (slash_index == std::string::npos)
+ slash_index = 0;
+ else
+ ++slash_index;
+ input_stripped = input_stripped.substr(
+ slash_index, input_stripped.find_last_of('.') - slash_index);
+
+ GUDHI_TC_SET_PERFORMANCE_DATA("Input", input_stripped);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Param1", param1);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Param2", param2);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Param3", param3);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Ambient_dim", ambient_dim);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Intrinsic_dim", intrinsic_dim);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_technique", "Tangential_translation");
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_which_points", "Center_vertex");
+
+#ifdef GUDHI_USE_TBB
+ GUDHI_TC_SET_PERFORMANCE_DATA(
+ "Num_threads",
+ (num_threads == -1 ? tbb::task_scheduler_init::default_num_threads() : num_threads));
+#else
+ GUDHI_TC_SET_PERFORMANCE_DATA("Num_threads", "N/A");
+#endif
+
+ std::cerr << "\nTC #" << i << "...\n";
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t_gen;
+#endif
+
+ std::vector<Point> points;
+ TC::TS_container tangent_spaces;
+
+ if (input == "generate_moment_curve") {
+ points = Gudhi::generate_points_on_moment_curve<Kernel>(
+ num_points, ambient_dim,
+ std::atof(param1.c_str()), std::atof(param2.c_str()));
+ } else if (input == "generate_plane") {
+ points = Gudhi::generate_points_on_plane<Kernel>(
+ num_points, intrinsic_dim, ambient_dim);
+ } else if (input == "generate_sphere_d") {
+ points = Gudhi::generate_points_on_sphere_d<Kernel>(
+ num_points, ambient_dim,
+ std::atof(param1.c_str()), // radius
+ std::atof(param2.c_str())); // radius_noise_percentage
+ } else if (input == "generate_two_spheres_d") {
+ points = Gudhi::generate_points_on_two_spheres_d<Kernel>(
+ num_points, ambient_dim,
+ std::atof(param1.c_str()),
+ std::atof(param2.c_str()),
+ std::atof(param3.c_str()));
+ } else if (input == "generate_3sphere_and_circle_d") {
+ GUDHI_CHECK(intrinsic_dim == 3,
+ std::logic_error("Intrinsic dim should be 3"));
+ GUDHI_CHECK(ambient_dim == 5,
+ std::logic_error("Ambient dim should be 5"));
+ points = Gudhi::generate_points_on_3sphere_and_circle<Kernel>(
+ num_points,
+ std::atof(param1.c_str()));
+ } else if (input == "generate_torus_3D") {
+ points = Gudhi::generate_points_on_torus_3D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()),
+ std::atof(param2.c_str()),
+ param3 == "Y");
+ } else if (input == "generate_torus_d") {
+ points = Gudhi::generate_points_on_torus_d<Kernel>(
+ num_points,
+ intrinsic_dim,
+ param1 == "Y", // uniform
+ std::atof(param2.c_str())); // radius_noise_percentage
+ } else if (input == "generate_klein_bottle_3D") {
+ points = Gudhi::generate_points_on_klein_bottle_3D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()), std::atof(param2.c_str()));
+ } else if (input == "generate_klein_bottle_4D") {
+ points = Gudhi::generate_points_on_klein_bottle_4D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()), std::atof(param2.c_str()),
+ std::atof(param3.c_str())); // noise
+ } else if (input == "generate_klein_bottle_variant_5D") {
+ points = Gudhi::generate_points_on_klein_bottle_variant_5D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()), std::atof(param2.c_str()));
+ } else {
+ // Contains tangent space basis
+ if (input.substr(input.size() - 3) == "pwt") {
+ load_points_and_tangent_space_basis_from_file
+ <Kernel, typename TC::Tangent_space_basis > (
+ input, std::back_inserter(points),
+ std::back_inserter(tangent_spaces),
+ intrinsic_dim,
+ ONLY_LOAD_THE_FIRST_N_POINTS);
+ } else {
+ load_points_from_file<Kernel>(
+ input, std::back_inserter(points),
+ ONLY_LOAD_THE_FIRST_N_POINTS);
+ }
+ }
+
+#ifdef GUDHI_TC_PROFILING
+ t_gen.end();
+ std::cerr << "Point set generated/loaded in " << t_gen.num_seconds()
+ << " seconds.\n";
+#endif
+
+ if (!points.empty()) {
+#if defined(TC_INPUT_STRIDES) && TC_INPUT_STRIDES > 1
+ auto p = points | boost::adaptors::strided(TC_INPUT_STRIDES);
+ std::vector<Point> points(p.begin(), p.end());
+ std::cerr << "****************************************\n"
+ << "WARNING: taking 1 point every " << TC_INPUT_STRIDES
+ << " points.\n"
+ << "****************************************\n";
+#endif
+
+ make_tc(points, tangent_spaces, intrinsic_dim,
+ sparsity, max_perturb,
+ perturb == 'Y', add_high_dim_simpl == 'Y', collapse == 'Y',
+ time_limit_for_perturb, input.c_str());
+
+ std::cerr << "TC #" << i++ << " done.\n";
+ std::cerr << "\n---------------------------------\n";
+ } else {
+ std::cerr << "TC #" << i++ << ": no points loaded.\n";
+ }
+
+ XML_perf_data::commit();
+ }
+ }
+ }
+ script_file.seekg(0);
+ script_file.clear();
+ }
+
+ script_file.close();
+ } // Or not script?
+ else {
+ std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' NOT found.\n";
+ }
+
+ // system("pause");
+ return 0;
+}
diff --git a/src/Tangential_complex/doc/COPYRIGHT b/src/Tangential_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..c4df0f64
--- /dev/null
+++ b/src/Tangential_complex/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Clement Jamin
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h
new file mode 100644
index 00000000..3d687c1d
--- /dev/null
+++ b/src/Tangential_complex/doc/Intro_tangential_complex.h
@@ -0,0 +1,119 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_
+#define DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_
+
+// needs namespaces for Doxygen to link on classes
+namespace Gudhi {
+namespace tangential_complex {
+
+/** \defgroup tangential_complex Tangential complex
+
+\author Cl&eacute;ment Jamin
+
+@{
+
+\section tangentialdefinition Definition
+
+A Tangential Delaunay complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
+designed to reconstruct a \f$k\f$-dimensional smooth manifold embedded in \f$d\f$-dimensional Euclidean space.
+The input is a point sample coming from an unknown manifold, which means that the points lie close to a structure of "small" intrinsic dimension.
+The running time depends only linearly on the extrinsic dimension \f$ d \f$
+and exponentially on the intrinsic dimension \f$ k \f$.
+
+An extensive description of the Tangential complex can be found in \cite tangentialcomplex2014.
+
+\subsection whatisthetc What is a Tangential Complex?
+
+Let us start with the description of the Tangential complex of a simple example, with \f$ k=1 \f$ and \f$ d=2 \f$.
+The input data is 4 points \f$ P \f$ located on a curve embedded in 2D.
+\image html "tc_example_01.png" "The input"
+For each point \f$ p \f$, estimate its tangent subspace \f$ T_p \f$ (e.g. using PCA).
+\image html "tc_example_02.png" "The estimated normals"
+Let us add the Voronoi diagram of the points in orange. For each point \f$ p \f$, construct its star in the Delaunay triangulation of \f$ P \f$ restricted to \f$ T_p \f$.
+\image html "tc_example_03.png" "The Voronoi diagram"
+The Tangential Delaunay complex is the union of those stars.
+
+In practice, neither the ambient Voronoi diagram nor the ambient Delaunay triangulation is computed.
+Instead, local \f$ k \f$-dimensional regular triangulations are computed with a limited number of points as we only need the star of each point.
+More details can be found in \cite tangentialcomplex2014.
+
+\subsection inconsistencies Inconsistencies
+
+Inconsistencies between the stars can occur.
+An inconsistency occurs when a simplex is not in the star of all its vertices.
+
+Let us take the same example.
+\image html "tc_example_07_before.png" "Before"
+Let us slightly move the tangent subspace \f$ T_q \f$
+\image html "tc_example_07_after.png" "After"
+Now, the star of \f$ Q \f$ contains \f$ QP \f$, but the star of \f$ P \f$ does not contain \f$ QP \f$. We have an inconsistency.
+\image html "tc_example_08.png" "After"
+
+One way to solve inconsistencies is to randomly perturb the positions of the points involved in an inconsistency.
+In the current implementation, this perturbation is done in the tangent subspace of each point.
+The maximum perturbation radius is given as a parameter to the constructor.
+
+In most cases, we recommend to provide a point set where the minimum distance between any two points
+is not too small. This can be achieved using the functions provided by the Subsampling module. Then, a good value to start with for
+the maximum perturbation radius would be around half the minimum distance between any two points.
+The \ref example_with_perturb below shows an example of such a process.
+
+In most cases, this process is able to dramatically reduce the number of inconsistencies, but is not guaranteed to succeed.
+
+\subsection output Output
+
+The result of the computation is exported as a `Simplex_tree`. It is the union of the stars of all the input points.
+A vertex in the Simplex Tree is the index of the point in the range provided by the user.
+The point corresponding to a vertex can also be obtained through the `Tangential_complex::get_point` function.
+Note that even if the positions of the points are perturbed, their original positions are kept (e.g. `Tangential_complex::get_point` returns the original position of the point).
+
+The result can be obtained after the computation of the Tangential complex itself and/or after the perturbation process.
+
+\section simple_example Simple example
+
+This example builds the Tangential complex of point set.
+Note that the dimension of the kernel here is dynamic, which is slower, but more flexible:
+the intrinsic and ambient dimensions does not have to be known at compile-time.
+
+\include Tangential_complex/example_basic.cpp
+
+\section example_with_perturb Example with perturbation
+
+This example builds the Tangential complex of a point set, then tries to solve inconsistencies
+by perturbing the positions of points involved in inconsistent simplices.
+Note that the dimension of the kernel here is static, which is the best choice when the
+dimensions are known at compile-time.
+
+\include Tangential_complex/example_with_perturb.cpp
+
+\copyright GNU General Public License v3.
+\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup tangential_complex
+
+} // namespace tangential_complex
+
+} // namespace Gudhi
+
+#endif // DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_
diff --git a/src/Tangential_complex/doc/tc_example_01.png b/src/Tangential_complex/doc/tc_example_01.png
new file mode 100644
index 00000000..8afe6198
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_01.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_02.png b/src/Tangential_complex/doc/tc_example_02.png
new file mode 100644
index 00000000..01591c1d
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_02.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_03.png b/src/Tangential_complex/doc/tc_example_03.png
new file mode 100644
index 00000000..5de04e01
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_03.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_05.png b/src/Tangential_complex/doc/tc_example_05.png
new file mode 100644
index 00000000..fdd5e5fa
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_05.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_06.png b/src/Tangential_complex/doc/tc_example_06.png
new file mode 100644
index 00000000..31ad3c43
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_06.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_07.png b/src/Tangential_complex/doc/tc_example_07.png
new file mode 100644
index 00000000..47e34de7
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_07.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_07_after.png b/src/Tangential_complex/doc/tc_example_07_after.png
new file mode 100644
index 00000000..981350d2
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_07_after.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_07_before.png b/src/Tangential_complex/doc/tc_example_07_before.png
new file mode 100644
index 00000000..ddc6bc7b
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_07_before.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_08.png b/src/Tangential_complex/doc/tc_example_08.png
new file mode 100644
index 00000000..119a87de
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_08.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_09.png b/src/Tangential_complex/doc/tc_example_09.png
new file mode 100644
index 00000000..31bac1e0
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_09.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_examples.png b/src/Tangential_complex/doc/tc_examples.png
new file mode 100644
index 00000000..b6544afe
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_examples.png
Binary files differ
diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt
new file mode 100644
index 00000000..47a56e3b
--- /dev/null
+++ b/src/Tangential_complex/example/CMakeLists.txt
@@ -0,0 +1,19 @@
+cmake_minimum_required(VERSION 2.6)
+project(Tangential_complex_examples)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable( Tangential_complex_example_basic example_basic.cpp )
+ target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY})
+ add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp )
+ target_link_libraries(Tangential_complex_example_with_perturb ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES})
+ target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(Tangential_complex_example_basic
+ ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_example_basic)
+
+ add_test(Tangential_complex_example_with_perturb
+ ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_example_with_perturb)
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Tangential_complex/example/example_basic.cpp b/src/Tangential_complex/example/example_basic.cpp
new file mode 100644
index 00000000..4f2b859e
--- /dev/null
+++ b/src/Tangential_complex/example/example_basic.cpp
@@ -0,0 +1,46 @@
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <array>
+#include <vector>
+
+namespace tc = Gudhi::tangential_complex;
+
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+typedef Kernel::FT FT;
+typedef Kernel::Point_d Point;
+typedef Kernel::Vector_d Vector;
+typedef tc::Tangential_complex<
+Kernel, CGAL::Dynamic_dimension_tag,
+CGAL::Parallel_tag> TC;
+
+int main(void) {
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 1000;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+
+ // Display stats about inconsistencies
+ tc.number_of_inconsistent_simplices(true); // verbose
+
+ return 0;
+}
diff --git a/src/Tangential_complex/example/example_with_perturb.cpp b/src/Tangential_complex/example/example_with_perturb.cpp
new file mode 100644
index 00000000..d0d877ea
--- /dev/null
+++ b/src/Tangential_complex/example/example_with_perturb.cpp
@@ -0,0 +1,53 @@
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <array>
+#include <vector>
+
+namespace subsampl = Gudhi::subsampling;
+namespace tc = Gudhi::tangential_complex;
+
+typedef CGAL::Epick_d<CGAL::Dimension_tag < 3 >> Kernel;
+typedef Kernel::FT FT;
+typedef Kernel::Point_d Point;
+typedef Kernel::Vector_d Vector;
+typedef tc::Tangential_complex<
+Kernel, CGAL::Dimension_tag<2>,
+CGAL::Parallel_tag> TC;
+
+int main(void) {
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 50;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Sparsify the point set
+ std::vector<Point> sparsified_points;
+ subsampl::sparsify_point_set(k, points, 0.1 * 0.1,
+ std::back_inserter(sparsified_points));
+ sparsified_points.swap(points);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+
+ // Try to fix inconsistencies. Give it 10 seconds to succeed
+ tc.fix_inconsistencies_using_perturbation(0.05, 10);
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+
+ return 0;
+}
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
new file mode 100644
index 00000000..cfc82eb1
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -0,0 +1,2276 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef TANGENTIAL_COMPLEX_H_
+#define TANGENTIAL_COMPLEX_H_
+
+#include <gudhi/Tangential_complex/config.h>
+#include <gudhi/Tangential_complex/Simplicial_complex.h>
+#include <gudhi/Tangential_complex/utilities.h>
+#include <gudhi/Kd_tree_search.h>
+#include <gudhi/console_color.h>
+#include <gudhi/Clock.h>
+#include <gudhi/Simplex_tree.h>
+
+#include <CGAL/Default.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/function_objects.h> // for CGAL::Identity
+#include <CGAL/Epick_d.h>
+#include <CGAL/Regular_triangulation_traits_adapter.h>
+#include <CGAL/Regular_triangulation.h>
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Combination_enumerator.h>
+#include <CGAL/point_generators_d.h>
+
+#include <Eigen/Core>
+#include <Eigen/Eigen>
+
+#include <boost/optional.hpp>
+#include <boost/iterator/transform_iterator.hpp>
+#include <boost/range/adaptor/transformed.hpp>
+#include <boost/range/counting_range.hpp>
+#include <boost/math/special_functions/factorials.hpp>
+#include <boost/container/flat_set.hpp>
+
+#include <tuple>
+#include <vector>
+#include <set>
+#include <utility>
+#include <sstream>
+#include <iostream>
+#include <limits>
+#include <algorithm>
+#include <functional>
+#include <iterator>
+#include <cmath> // for std::sqrt
+#include <string>
+#include <cstddef> // for std::size_t
+
+#ifdef GUDHI_USE_TBB
+#include <tbb/parallel_for.h>
+#include <tbb/combinable.h>
+#include <tbb/mutex.h>
+#endif
+
+// #define GUDHI_TC_EXPORT_NORMALS // Only for 3D surfaces (k=2, d=3)
+
+namespace sps = Gudhi::spatial_searching;
+
+namespace Gudhi {
+
+namespace tangential_complex {
+
+using namespace internal;
+
+class Vertex_data {
+ public:
+ Vertex_data(std::size_t data = (std::numeric_limits<std::size_t>::max)())
+ : m_data(data) { }
+
+ operator std::size_t() {
+ return m_data;
+ }
+
+ operator std::size_t() const {
+ return m_data;
+ }
+
+ private:
+ std::size_t m_data;
+};
+
+/**
+ * \class Tangential_complex Tangential_complex.h gudhi/Tangential_complex.h
+ * \brief Tangential complex data structure.
+ *
+ * \ingroup tangential_complex
+ *
+ * \details
+ * The class Tangential_complex represents a tangential complex.
+ * After the computation of the complex, an optional post-processing called perturbation can
+ * be run to attempt to remove inconsistencies.
+ *
+ * \tparam Kernel_ requires a <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Epick__d.html">CGAL::Epick_d</a> class, which
+ * can be static if you know the ambiant dimension at compile-time, or dynamic if you don't.
+ * \tparam DimensionTag can be either <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Dimension__tag.html">Dimension_tag<d></a>
+ * if you know the intrinsic dimension at compile-time,
+ * or <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Dynamic__dimension__tag.html">CGAL::Dynamic_dimension_tag</a>
+ * if you don't.
+ * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the default) and `CGAL::Sequential_tag`.
+ * \tparam Triangulation_ is the type used for storing the local regular triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`).
+ *
+ */
+template
+<
+ typename Kernel_, // ambiant kernel
+ typename DimensionTag, // intrinsic dimension
+ typename Concurrency_tag = CGAL::Parallel_tag,
+ typename Triangulation_ = CGAL::Default
+>
+class Tangential_complex {
+ typedef Kernel_ K;
+ typedef typename K::FT FT;
+ typedef typename K::Point_d Point;
+ typedef typename K::Weighted_point_d Weighted_point;
+ typedef typename K::Vector_d Vector;
+
+ typedef typename CGAL::Default::Get
+ <
+ Triangulation_,
+ CGAL::Regular_triangulation
+ <
+ CGAL::Epick_d<DimensionTag>,
+ CGAL::Triangulation_data_structure
+ <
+ typename CGAL::Epick_d<DimensionTag>::Dimension,
+ CGAL::Triangulation_vertex
+ <
+ CGAL::Regular_triangulation_traits_adapter< CGAL::Epick_d<DimensionTag> >, Vertex_data
+ >,
+ CGAL::Triangulation_full_cell<CGAL::Regular_triangulation_traits_adapter< CGAL::Epick_d<DimensionTag> > >
+ >
+ >
+ >::type Triangulation;
+ typedef typename Triangulation::Geom_traits Tr_traits;
+ typedef typename Triangulation::Weighted_point Tr_point;
+ typedef typename Triangulation::Bare_point Tr_bare_point;
+ typedef typename Triangulation::Vertex_handle Tr_vertex_handle;
+ typedef typename Triangulation::Full_cell_handle Tr_full_cell_handle;
+ typedef typename Tr_traits::Vector_d Tr_vector;
+
+#if defined(GUDHI_USE_TBB)
+ typedef tbb::mutex Mutex_for_perturb;
+ typedef Vector Translation_for_perturb;
+ typedef std::vector<Atomic_wrapper<FT> > Weights;
+#else
+ typedef Vector Translation_for_perturb;
+ typedef std::vector<FT> Weights;
+#endif
+ typedef std::vector<Translation_for_perturb> Translations_for_perturb;
+
+ // Store a local triangulation and a handle to its center vertex
+
+ struct Tr_and_VH {
+ public:
+ Tr_and_VH()
+ : m_tr(NULL) { }
+
+ Tr_and_VH(int dim)
+ : m_tr(new Triangulation(dim)) { }
+
+ ~Tr_and_VH() {
+ destroy_triangulation();
+ }
+
+ Triangulation & construct_triangulation(int dim) {
+ delete m_tr;
+ m_tr = new Triangulation(dim);
+ return tr();
+ }
+
+ void destroy_triangulation() {
+ delete m_tr;
+ m_tr = NULL;
+ }
+
+ Triangulation & tr() {
+ return *m_tr;
+ }
+
+ Triangulation const& tr() const {
+ return *m_tr;
+ }
+
+ Tr_vertex_handle const& center_vertex() const {
+ return m_center_vertex;
+ }
+
+ Tr_vertex_handle & center_vertex() {
+ return m_center_vertex;
+ }
+
+ private:
+ Triangulation* m_tr;
+ Tr_vertex_handle m_center_vertex;
+ };
+
+ public:
+ typedef Basis<K> Tangent_space_basis;
+ typedef Basis<K> Orthogonal_space_basis;
+ typedef std::vector<Tangent_space_basis> TS_container;
+ typedef std::vector<Orthogonal_space_basis> OS_container;
+
+ typedef std::vector<Point> Points;
+
+ typedef boost::container::flat_set<std::size_t> Simplex;
+ typedef std::set<Simplex> Simplex_set;
+
+ private:
+ typedef sps::Kd_tree_search<K, Points> Points_ds;
+ typedef typename Points_ds::KNS_range KNS_range;
+ typedef typename Points_ds::INS_range INS_range;
+
+ typedef std::vector<Tr_and_VH> Tr_container;
+ typedef std::vector<Vector> Vectors;
+
+ // An Incident_simplex is the list of the vertex indices
+ // except the center vertex
+ typedef boost::container::flat_set<std::size_t> Incident_simplex;
+ typedef std::vector<Incident_simplex> Star;
+ typedef std::vector<Star> Stars_container;
+
+ // For transform_iterator
+
+ static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) {
+ return vh->point();
+ }
+
+ template <typename P, typename VH>
+ static const P &vertex_handle_to_point(VH vh) {
+ return vh->point();
+ }
+
+ public:
+ typedef internal::Simplicial_complex Simplicial_complex;
+
+ /** \brief Constructor from a range of points.
+ * Points are copied into the instance, and a search data structure is initialized.
+ * Note the complex is not computed: `compute_tangential_complex` must be called after the creation
+ * of the object.
+ *
+ * @param[in] points Range of points (`Point_range::value_type` must be the same as `Kernel_::Point_d`).
+ * @param[in] intrinsic_dimension Intrinsic dimension of the manifold.
+ * @param[in] k Kernel instance.
+ */
+ template <typename Point_range>
+ Tangential_complex(Point_range points,
+ int intrinsic_dimension,
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ InputIterator first_for_tse, InputIterator last_for_tse,
+#endif
+ const K &k = K()
+ )
+ : m_k(k),
+ m_intrinsic_dim(intrinsic_dimension),
+ m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())),
+ m_points(points.begin(), points.end()),
+ m_weights(m_points.size(), FT(0))
+#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
+ , m_p_perturb_mutexes(NULL)
+#endif
+ , m_points_ds(m_points)
+ , m_last_max_perturb(0.)
+ , m_are_tangent_spaces_computed(m_points.size(), false)
+ , m_tangent_spaces(m_points.size(), Tangent_space_basis())
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ , m_orth_spaces(m_points.size(), Orthogonal_space_basis())
+#endif
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ , m_points_for_tse(first_for_tse, last_for_tse)
+ , m_points_ds_for_tse(m_points_for_tse)
+#endif
+ { }
+
+ /// Destructor
+ ~Tangential_complex() {
+#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
+ delete [] m_p_perturb_mutexes;
+#endif
+ }
+
+ /// Returns the intrinsic dimension of the manifold.
+ int intrinsic_dimension() const {
+ return m_intrinsic_dim;
+ }
+
+ /// Returns the ambient dimension.
+ int ambient_dimension() const {
+ return m_ambient_dim;
+ }
+
+ Points const& points() const {
+ return m_points;
+ }
+
+ /** \brief Returns the point corresponding to the vertex given as parameter.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The point found.
+ */
+ Point get_point(std::size_t vertex) const {
+ return m_points[vertex];
+ }
+
+ /** \brief Returns the perturbed position of the point corresponding to the vertex given as parameter.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The perturbed position of the point found.
+ */
+ Point get_perturbed_point(std::size_t vertex) const {
+ return compute_perturbed_point(vertex);
+ }
+
+ /// Returns the number of vertices.
+
+ std::size_t number_of_vertices() const {
+ return m_points.size();
+ }
+
+ void set_weights(const Weights& weights) {
+ m_weights = weights;
+ }
+
+ void set_tangent_planes(const TS_container& tangent_spaces
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ , const OS_container& orthogonal_spaces
+#endif
+ ) {
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ GUDHI_CHECK(
+ m_points.size() == tangent_spaces.size()
+ && m_points.size() == orthogonal_spaces.size(),
+ std::logic_error("Wrong sizes"));
+#else
+ GUDHI_CHECK(
+ m_points.size() == tangent_spaces.size(),
+ std::logic_error("Wrong sizes"));
+#endif
+ m_tangent_spaces = tangent_spaces;
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ m_orth_spaces = orthogonal_spaces;
+#endif
+ for (std::size_t i = 0; i < m_points.size(); ++i)
+ m_are_tangent_spaces_computed[i] = true;
+ }
+
+ /// Computes the tangential complex.
+ void compute_tangential_complex() {
+#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
+ std::cerr << red << "WARNING: GUDHI_TC_PERFORM_EXTRA_CHECKS is defined. "
+ << "Computation might be slower than usual.\n" << white;
+#endif
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
+ Gudhi::Clock t;
+#endif
+
+ // We need to do that because we don't want the container to copy the
+ // already-computed triangulations (while resizing) since it would
+ // invalidate the vertex handles stored beside the triangulations
+ m_triangulations.resize(m_points.size());
+ m_stars.resize(m_points.size());
+ m_squared_star_spheres_radii_incl_margin.resize(m_points.size(), FT(-1));
+#ifdef GUDHI_TC_PERTURB_POSITION
+ if (m_points.empty())
+ m_translations.clear();
+ else
+ m_translations.resize(m_points.size(),
+ m_k.construct_vector_d_object()(m_ambient_dim));
+#if defined(GUDHI_USE_TBB)
+ delete [] m_p_perturb_mutexes;
+ m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()];
+#endif
+#endif
+
+#ifdef GUDHI_USE_TBB
+ // Parallel
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
+ Compute_tangent_triangulation(*this));
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_points.size(); ++i)
+ compute_tangent_triangulation(i);
+#ifdef GUDHI_USE_TBB
+ }
+#endif // GUDHI_USE_TBB
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
+ t.end();
+ std::cerr << "Tangential complex computed in " << t.num_seconds()
+ << " seconds.\n";
+#endif
+ }
+
+ /// \brief Type returned by `Tangential_complex::fix_inconsistencies_using_perturbation`.
+ struct Fix_inconsistencies_info {
+ /// `true` if all inconsistencies could be removed, `false` if the time limit has been reached before
+ bool success = false;
+ /// number of steps performed
+ unsigned int num_steps = 0;
+ /// initial number of inconsistent stars
+ std::size_t initial_num_inconsistent_stars = 0;
+ /// best number of inconsistent stars during the process
+ std::size_t best_num_inconsistent_stars = 0;
+ /// final number of inconsistent stars
+ std::size_t final_num_inconsistent_stars = 0;
+ };
+
+ /** \brief Attempts to fix inconsistencies by perturbing the point positions.
+ *
+ * @param[in] max_perturb Maximum length of the translations used by the perturbation.
+ * @param[in] time_limit Time limit in seconds. If -1, no time limit is set.
+ */
+ Fix_inconsistencies_info fix_inconsistencies_using_perturbation(double max_perturb, double time_limit = -1.) {
+ Fix_inconsistencies_info info;
+
+ if (time_limit == 0.)
+ return info;
+
+ Gudhi::Clock t;
+
+#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_before =
+ number_of_inconsistent_simplices(false);
+
+ if (std::get<1>(stats_before) == 0) {
+#ifdef DEBUG_TRACES
+ std::cerr << "Nothing to fix.\n";
+#endif
+ info.success = false;
+ return info;
+ }
+#endif // GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+
+ m_last_max_perturb = max_perturb;
+
+ bool done = false;
+ info.best_num_inconsistent_stars = m_triangulations.size();
+ info.num_steps = 0;
+ while (!done) {
+#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+ std::cerr
+ << "\nBefore fix step:\n"
+ << " * Total number of simplices in stars (incl. duplicates): "
+ << std::get<0>(stats_before) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): "
+ << red << std::get<1>(stats_before) << white << " ("
+ << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: "
+ << red << std::get<2>(stats_before) << white << " ("
+ << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n";
+#endif
+
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow
+ << "\nAttempt to fix inconsistencies using perturbations - step #"
+ << info.num_steps + 1 << "... " << white;
+#endif
+
+ std::size_t num_inconsistent_stars = 0;
+ std::vector<std::size_t> updated_points;
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t_fix_step;
+#endif
+
+ // Parallel
+#if defined(GUDHI_USE_TBB)
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::combinable<std::size_t> num_inconsistencies;
+ tbb::combinable<std::vector<std::size_t> > tls_updated_points;
+ tbb::parallel_for(
+ tbb::blocked_range<size_t>(0, m_triangulations.size()),
+ Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb,
+ num_inconsistencies,
+ tls_updated_points));
+ num_inconsistent_stars =
+ num_inconsistencies.combine(std::plus<std::size_t>());
+ updated_points = tls_updated_points.combine(
+ [](std::vector<std::size_t> const& x,
+ std::vector<std::size_t> const& y) {
+ std::vector<std::size_t> res;
+ res.reserve(x.size() + y.size());
+ res.insert(res.end(), x.begin(), x.end());
+ res.insert(res.end(), y.begin(), y.end());
+ return res;
+ });
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_triangulations.size(); ++i) {
+ num_inconsistent_stars +=
+ try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb,
+ std::back_inserter(updated_points));
+ }
+#if defined(GUDHI_USE_TBB)
+ }
+#endif // GUDHI_USE_TBB
+
+#ifdef GUDHI_TC_PROFILING
+ t_fix_step.end();
+#endif
+
+#if defined(GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES) || defined(DEBUG_TRACES)
+ std::cerr
+ << "\nEncountered during fix:\n"
+ << " * Num stars containing inconsistent simplices: "
+ << red << num_inconsistent_stars << white
+ << " (" << 100. * num_inconsistent_stars / m_points.size() << "%)\n";
+#endif
+
+#ifdef GUDHI_TC_PROFILING
+ std::cerr << yellow << "done in " << t_fix_step.num_seconds()
+ << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+
+ if (num_inconsistent_stars > 0)
+ refresh_tangential_complex(updated_points);
+
+#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
+ // Confirm that all stars were actually refreshed
+ std::size_t num_inc_1 =
+ std::get<1>(number_of_inconsistent_simplices(false));
+ refresh_tangential_complex();
+ std::size_t num_inc_2 =
+ std::get<1>(number_of_inconsistent_simplices(false));
+ if (num_inc_1 != num_inc_2)
+ std::cerr << red << "REFRESHMENT CHECK: FAILED. ("
+ << num_inc_1 << " vs " << num_inc_2 << ")\n" << white;
+ else
+ std::cerr << green << "REFRESHMENT CHECK: PASSED.\n" << white;
+#endif
+
+#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_after =
+ number_of_inconsistent_simplices(false);
+
+ std::cerr
+ << "\nAfter fix:\n"
+ << " * Total number of simplices in stars (incl. duplicates): "
+ << std::get<0>(stats_after) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): "
+ << red << std::get<1>(stats_after) << white << " ("
+ << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: "
+ << red << std::get<2>(stats_after) << white << " ("
+ << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n";
+
+ stats_before = stats_after;
+#endif
+
+ if (info.num_steps == 0)
+ info.initial_num_inconsistent_stars = num_inconsistent_stars;
+
+ if (num_inconsistent_stars < info.best_num_inconsistent_stars)
+ info.best_num_inconsistent_stars = num_inconsistent_stars;
+
+ info.final_num_inconsistent_stars = num_inconsistent_stars;
+
+ done = (num_inconsistent_stars == 0);
+ if (!done) {
+ ++info.num_steps;
+ if (time_limit > 0. && t.num_seconds() > time_limit) {
+#ifdef DEBUG_TRACES
+ std::cerr << red << "Time limit reached.\n" << white;
+#endif
+ info.success = false;
+ return info;
+ }
+ }
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << green << "Fixed!\n" << white;
+#endif
+ info.success = true;
+ return info;
+ }
+
+ /// \brief Type returned by `Tangential_complex::number_of_inconsistent_simplices`.
+ struct Num_inconsistencies {
+ /// Total number of simplices in stars (including duplicates that appear in several stars)
+ std::size_t num_simplices = 0;
+ /// Number of inconsistent simplices
+ std::size_t num_inconsistent_simplices = 0;
+ /// Number of stars containing at least one inconsistent simplex
+ std::size_t num_inconsistent_stars = 0;
+ };
+
+ /// Returns the number of inconsistencies
+ /// @param[in] verbose If true, outputs a message into `std::cerr`.
+
+ Num_inconsistencies
+ number_of_inconsistent_simplices(
+#ifdef DEBUG_TRACES
+ bool verbose = true
+#else
+ bool verbose = false
+#endif
+ ) const {
+ Num_inconsistencies stats;
+
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ bool is_star_inconsistent = false;
+
+ // For each cell
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ // Don't check infinite cells
+ if (is_infinite(*it_inc_simplex))
+ continue;
+
+ Simplex c = *it_inc_simplex;
+ c.insert(idx); // Add the missing index
+
+ if (!is_simplex_consistent(c)) {
+ ++stats.num_inconsistent_simplices;
+ is_star_inconsistent = true;
+ }
+
+ ++stats.num_simplices;
+ }
+ stats.num_inconsistent_stars += is_star_inconsistent;
+ }
+
+ if (verbose) {
+ std::cerr
+ << "\n==========================================================\n"
+ << "Inconsistencies:\n"
+ << " * Total number of simplices in stars (incl. duplicates): "
+ << stats.num_simplices << "\n"
+ << " * Number of inconsistent simplices in stars (incl. duplicates): "
+ << stats.num_inconsistent_simplices << " ("
+ << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n"
+ << " * Number of stars containing inconsistent simplices: "
+ << stats.num_inconsistent_stars << " ("
+ << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n"
+ << "==========================================================\n";
+ }
+
+ return stats;
+ }
+
+ /** \brief Exports the complex into a Simplex_tree.
+ *
+ * \tparam Simplex_tree_ must be a `Simplex_tree`.
+ *
+ * @param[out] tree The result, where each `Vertex_handle` is the index of the
+ * corresponding point in the range provided to the constructor (it can also be
+ * retrieved through the `Tangential_complex::get_point` function.
+ * @param[in] export_inconsistent_simplices Also export inconsistent simplices or not?
+ * @return The maximal dimension of the simplices.
+ */
+ template <typename Simplex_tree_>
+ int create_complex(Simplex_tree_ &tree
+ , bool export_inconsistent_simplices = true
+ /// \cond ADVANCED_PARAMETERS
+ , bool export_infinite_simplices = false
+ , Simplex_set *p_inconsistent_simplices = NULL
+ /// \endcond
+ ) const {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow
+ << "\nExporting the TC as a Simplex_tree... " << white;
+#endif
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ int max_dim = -1;
+
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ // For each cell of the star
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ Simplex c = *it_inc_simplex;
+
+ // Don't export infinite cells
+ if (!export_infinite_simplices && is_infinite(c))
+ continue;
+
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c))
+ continue;
+
+ if (static_cast<int> (c.size()) > max_dim)
+ max_dim = static_cast<int> (c.size());
+ // Add the missing center vertex
+ c.insert(idx);
+
+ // Try to insert the simplex
+ bool inserted = tree.insert_simplex_and_subfaces(c).second;
+
+ // Inconsistent?
+ if (p_inconsistent_simplices && inserted && !is_simplex_consistent(c)) {
+ p_inconsistent_simplices->insert(c);
+ }
+ }
+ }
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds()
+ << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+
+ return max_dim;
+ }
+
+ // First clears the complex then exports the TC into it
+ // Returns the max dimension of the simplices
+ // check_lower_and_higher_dim_simplices : 0 (false), 1 (true), 2 (auto)
+ // If the check is enabled, the function:
+ // - won't insert the simplex if it is already in a higher dim simplex
+ // - will erase any lower-dim simplices that are faces of the new simplex
+ // "auto" (= 2) will enable the check as a soon as it encounters a
+ // simplex whose dimension is different from the previous ones.
+ // N.B.: The check is quite expensive.
+
+ int create_complex(Simplicial_complex &complex,
+ bool export_inconsistent_simplices = true,
+ bool export_infinite_simplices = false,
+ int check_lower_and_higher_dim_simplices = 2,
+ Simplex_set *p_inconsistent_simplices = NULL) const {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow
+ << "\nExporting the TC as a Simplicial_complex... " << white;
+#endif
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ int max_dim = -1;
+ complex.clear();
+
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ // For each cell of the star
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ Simplex c = *it_inc_simplex;
+
+ // Don't export infinite cells
+ if (!export_infinite_simplices && is_infinite(c))
+ continue;
+
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c))
+ continue;
+
+ // Unusual simplex dim?
+ if (check_lower_and_higher_dim_simplices == 2
+ && max_dim != -1
+ && static_cast<int> (c.size()) != max_dim) {
+ // Let's activate the check
+ std::cerr << red <<
+ "Info: check_lower_and_higher_dim_simplices ACTIVATED. "
+ "Export might be take some time...\n" << white;
+ check_lower_and_higher_dim_simplices = 1;
+ }
+
+ if (static_cast<int> (c.size()) > max_dim)
+ max_dim = static_cast<int> (c.size());
+ // Add the missing center vertex
+ c.insert(idx);
+
+ // Try to insert the simplex
+ bool added =
+ complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1);
+
+ // Inconsistent?
+ if (p_inconsistent_simplices && added && !is_simplex_consistent(c)) {
+ p_inconsistent_simplices->insert(c);
+ }
+ }
+ }
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds()
+ << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+
+ return max_dim;
+ }
+
+ template<typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(
+ const Simplicial_complex &complex, std::ostream & os,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL,
+ ProjectionFunctor const& point_projection = ProjectionFunctor())
+ const {
+ return export_to_off(
+ os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue, &complex, point_projection);
+ }
+
+ template<typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(
+ std::ostream & os, bool color_inconsistencies = false,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL,
+ const Simplicial_complex *p_complex = NULL,
+ ProjectionFunctor const& point_projection = ProjectionFunctor()) const {
+ if (m_points.empty())
+ return os;
+
+ if (m_ambient_dim < 2) {
+ std::cerr << "Error: export_to_off => ambient dimension should be >= 2.\n";
+ os << "Error: export_to_off => ambient dimension should be >= 2.\n";
+ return os;
+ }
+ if (m_ambient_dim > 3) {
+ std::cerr << "Warning: export_to_off => ambient dimension should be "
+ "<= 3. Only the first 3 coordinates will be exported.\n";
+ }
+
+ if (m_intrinsic_dim < 1 || m_intrinsic_dim > 3) {
+ std::cerr << "Error: export_to_off => intrinsic dimension should be "
+ "between 1 and 3.\n";
+ os << "Error: export_to_off => intrinsic dimension should be "
+ "between 1 and 3.\n";
+ return os;
+ }
+
+ std::stringstream output;
+ std::size_t num_simplices, num_vertices;
+ export_vertices_to_off(output, num_vertices, false, point_projection);
+ if (p_complex) {
+ export_simplices_to_off(
+ *p_complex, output, num_simplices, p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
+ } else {
+ export_simplices_to_off(
+ output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
+ }
+
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ os << "N";
+#endif
+
+ os << "OFF \n"
+ << num_vertices << " "
+ << num_simplices << " "
+ << "0 \n"
+ << output.str();
+
+ return os;
+ }
+
+ private:
+ void refresh_tangential_complex() {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nRefreshing TC... " << white;
+#endif
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+#ifdef GUDHI_USE_TBB
+ // Parallel
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
+ Compute_tangent_triangulation(*this));
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_points.size(); ++i)
+ compute_tangent_triangulation(i);
+#ifdef GUDHI_USE_TBB
+ }
+#endif // GUDHI_USE_TBB
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds()
+ << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+ }
+
+ // If the list of perturbed points is provided, it is much faster
+ template <typename Point_indices_range>
+ void refresh_tangential_complex(
+ Point_indices_range const& perturbed_points_indices) {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nRefreshing TC... " << white;
+#endif
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ // ANN tree containing only the perturbed points
+ Points_ds updated_pts_ds(m_points, perturbed_points_indices);
+
+#ifdef GUDHI_USE_TBB
+ // Parallel
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
+ Refresh_tangent_triangulation(*this, updated_pts_ds));
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_points.size(); ++i)
+ refresh_tangent_triangulation(i, updated_pts_ds);
+#ifdef GUDHI_USE_TBB
+ }
+#endif // GUDHI_USE_TBB
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds()
+ << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+ }
+
+ void export_inconsistent_stars_to_OFF_files(std::string const& filename_base) const {
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ // We build a SC along the way in case it's inconsistent
+ Simplicial_complex sc;
+ // For each cell
+ bool is_inconsistent = false;
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end;
+ ++it_inc_simplex) {
+ // Skip infinite cells
+ if (is_infinite(*it_inc_simplex))
+ continue;
+
+ Simplex c = *it_inc_simplex;
+ c.insert(idx); // Add the missing index
+
+ sc.add_simplex(c);
+
+ // If we do not already know this star is inconsistent, test it
+ if (!is_inconsistent && !is_simplex_consistent(c))
+ is_inconsistent = true;
+ }
+
+ if (is_inconsistent) {
+ // Export star to OFF file
+ std::stringstream output_filename;
+ output_filename << filename_base << "_" << idx << ".off";
+ std::ofstream off_stream(output_filename.str().c_str());
+ export_to_off(sc, off_stream);
+ }
+ }
+ }
+
+ class Compare_distance_to_ref_point {
+ public:
+ Compare_distance_to_ref_point(Point const& ref, K const& k)
+ : m_ref(ref), m_k(k) { }
+
+ bool operator()(Point const& p1, Point const& p2) {
+ typename K::Squared_distance_d sqdist =
+ m_k.squared_distance_d_object();
+ return sqdist(p1, m_ref) < sqdist(p2, m_ref);
+ }
+
+ private:
+ Point const& m_ref;
+ K const& m_k;
+ };
+
+#ifdef GUDHI_USE_TBB
+ // Functor for compute_tangential_complex function
+ class Compute_tangent_triangulation {
+ Tangential_complex & m_tc;
+
+ public:
+ // Constructor
+ Compute_tangent_triangulation(Tangential_complex &tc)
+ : m_tc(tc) { }
+
+ // Constructor
+ Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt)
+ : m_tc(ctt.m_tc) { }
+
+ // operator()
+ void operator()(const tbb::blocked_range<size_t>& r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i)
+ m_tc.compute_tangent_triangulation(i);
+ }
+ };
+
+ // Functor for refresh_tangential_complex function
+ class Refresh_tangent_triangulation {
+ Tangential_complex & m_tc;
+ Points_ds const& m_updated_pts_ds;
+
+ public:
+ // Constructor
+ Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const& updated_pts_ds)
+ : m_tc(tc), m_updated_pts_ds(updated_pts_ds) { }
+
+ // Constructor
+ Refresh_tangent_triangulation(const Refresh_tangent_triangulation &ctt)
+ : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) { }
+
+ // operator()
+ void operator()(const tbb::blocked_range<size_t>& r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i)
+ m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds);
+ }
+ };
+#endif // GUDHI_USE_TBB
+
+ bool is_infinite(Simplex const& s) const {
+ return *s.rbegin() == (std::numeric_limits<std::size_t>::max)();
+ }
+
+ // Output: "triangulation" is a Regular Triangulation containing at least the
+ // star of "center_pt"
+ // Returns the handle of the center vertex
+ Tr_vertex_handle compute_star(std::size_t i, const Point &center_pt, const Tangent_space_basis &tsb,
+ Triangulation &triangulation, bool verbose = false) {
+ int tangent_space_dim = tsb.dimension();
+ const Tr_traits &local_tr_traits = triangulation.geom_traits();
+ Tr_vertex_handle center_vertex;
+
+ // Kernel functor & objects
+ typename K::Squared_distance_d k_sqdist = m_k.squared_distance_d_object();
+
+ // Triangulation's traits functor & objects
+ typename Tr_traits::Compute_weight_d point_weight = local_tr_traits.compute_weight_d_object();
+ typename Tr_traits::Power_center_d power_center = local_tr_traits.power_center_d_object();
+
+ //***************************************************
+ // Build a minimal triangulation in the tangent space
+ // (we only need the star of p)
+ //***************************************************
+
+ // Insert p
+ Tr_point proj_wp;
+ if (i == tsb.origin()) {
+ // Insert {(0, 0, 0...), m_weights[i]}
+ proj_wp = local_tr_traits.construct_weighted_point_d_object()(local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN),
+ m_weights[i]);
+ } else {
+ const Weighted_point& wp = compute_perturbed_weighted_point(i);
+ proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits);
+ }
+
+ center_vertex = triangulation.insert(proj_wp);
+ center_vertex->data() = i;
+ if (verbose)
+ std::cerr << "* Inserted point #" << i << "\n";
+
+#ifdef GUDHI_TC_VERY_VERBOSE
+ std::size_t num_attempts_to_insert_points = 1;
+ std::size_t num_inserted_points = 1;
+#endif
+ // const int NUM_NEIGHBORS = 150;
+ // KNS_range ins_range = m_points_ds.query_k_nearest_neighbors(center_pt, NUM_NEIGHBORS);
+ INS_range ins_range = m_points_ds.query_incremental_nearest_neighbors(center_pt);
+
+ // While building the local triangulation, we keep the radius
+ // of the sphere "star sphere" centered at "center_vertex"
+ // and which contains all the
+ // circumspheres of the star of "center_vertex"
+ boost::optional<FT> squared_star_sphere_radius_plus_margin;
+
+ // Insert points until we find a point which is outside "star sphere"
+ for (auto nn_it = ins_range.begin();
+ nn_it != ins_range.end();
+ ++nn_it) {
+ std::size_t neighbor_point_idx = nn_it->first;
+
+ // ith point = p, which is already inserted
+ if (neighbor_point_idx != i) {
+ // No need to lock the Mutex_for_perturb here since this will not be
+ // called while other threads are perturbing the positions
+ Point neighbor_pt;
+ FT neighbor_weight;
+ compute_perturbed_weighted_point(neighbor_point_idx, neighbor_pt, neighbor_weight);
+
+ if (squared_star_sphere_radius_plus_margin &&
+ k_sqdist(center_pt, neighbor_pt) > *squared_star_sphere_radius_plus_margin)
+ break;
+
+ Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb,
+ local_tr_traits);
+
+#ifdef GUDHI_TC_VERY_VERBOSE
+ ++num_attempts_to_insert_points;
+#endif
+
+
+ Tr_vertex_handle vh = triangulation.insert_if_in_star(proj_pt, center_vertex);
+ // Tr_vertex_handle vh = triangulation.insert(proj_pt);
+ if (vh != Tr_vertex_handle() && vh->data() == (std::numeric_limits<std::size_t>::max)()) {
+#ifdef GUDHI_TC_VERY_VERBOSE
+ ++num_inserted_points;
+#endif
+ if (verbose)
+ std::cerr << "* Inserted point #" << neighbor_point_idx << "\n";
+
+ vh->data() = neighbor_point_idx;
+
+ // Let's recompute squared_star_sphere_radius_plus_margin
+ if (triangulation.current_dimension() >= tangent_space_dim) {
+ squared_star_sphere_radius_plus_margin = boost::none;
+ // Get the incident cells and look for the biggest circumsphere
+ std::vector<Tr_full_cell_handle> incident_cells;
+ triangulation.incident_full_cells(
+ center_vertex,
+ std::back_inserter(incident_cells));
+ for (typename std::vector<Tr_full_cell_handle>::iterator cit =
+ incident_cells.begin(); cit != incident_cells.end(); ++cit) {
+ Tr_full_cell_handle cell = *cit;
+ if (triangulation.is_infinite(cell)) {
+ squared_star_sphere_radius_plus_margin = boost::none;
+ break;
+ } else {
+ // Note that this uses the perturbed point since it uses
+ // the points of the local triangulation
+ Tr_point c = power_center(boost::make_transform_iterator(cell->vertices_begin(),
+ vertex_handle_to_point<Tr_point,
+ Tr_vertex_handle>),
+ boost::make_transform_iterator(cell->vertices_end(),
+ vertex_handle_to_point<Tr_point,
+ Tr_vertex_handle>));
+
+ FT sq_power_sphere_diam = 4 * point_weight(c);
+
+ if (!squared_star_sphere_radius_plus_margin ||
+ sq_power_sphere_diam > *squared_star_sphere_radius_plus_margin) {
+ squared_star_sphere_radius_plus_margin = sq_power_sphere_diam;
+ }
+ }
+ }
+
+ // Let's add the margin, now
+ // The value depends on whether we perturb weight or position
+ if (squared_star_sphere_radius_plus_margin) {
+ // "2*m_last_max_perturb" because both points can be perturbed
+ squared_star_sphere_radius_plus_margin = CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin)
+ + 2 * m_last_max_perturb);
+
+ // Save it in `m_squared_star_spheres_radii_incl_margin`
+ m_squared_star_spheres_radii_incl_margin[i] =
+ *squared_star_sphere_radius_plus_margin;
+ } else {
+ m_squared_star_spheres_radii_incl_margin[i] = FT(-1);
+ }
+ }
+ }
+ }
+ }
+
+ return center_vertex;
+ }
+
+ void refresh_tangent_triangulation(std::size_t i, Points_ds const& updated_pts_ds, bool verbose = false) {
+ if (verbose)
+ std::cerr << "** Refreshing tangent tri #" << i << " **\n";
+
+ if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1))
+ return compute_tangent_triangulation(i, verbose);
+
+ Point center_point = compute_perturbed_point(i);
+ // Among updated point, what is the closer from our center point?
+ std::size_t closest_pt_index =
+ updated_pts_ds.query_k_nearest_neighbors(center_point, 1, false).begin()->first;
+
+ typename K::Construct_weighted_point_d k_constr_wp =
+ m_k.construct_weighted_point_d_object();
+ typename K::Power_distance_d k_power_dist = m_k.power_distance_d_object();
+
+ // Construct a weighted point equivalent to the star sphere
+ Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i),
+ m_squared_star_spheres_radii_incl_margin[i]);
+ Weighted_point closest_updated_point =
+ compute_perturbed_weighted_point(closest_pt_index);
+
+ // Is the "closest point" inside our star sphere?
+ if (k_power_dist(star_sphere, closest_updated_point) <= FT(0))
+ compute_tangent_triangulation(i, verbose);
+ }
+
+ void compute_tangent_triangulation(std::size_t i, bool verbose = false) {
+ if (verbose)
+ std::cerr << "** Computing tangent tri #" << i << " **\n";
+ // std::cerr << "***********************************************\n";
+
+ // No need to lock the mutex here since this will not be called while
+ // other threads are perturbing the positions
+ const Point center_pt = compute_perturbed_point(i);
+ Tangent_space_basis &tsb = m_tangent_spaces[i];
+
+ // Estimate the tangent space
+ if (!m_are_tangent_spaces_computed[i]) {
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]);
+#else
+ tsb = compute_tangent_space(center_pt, i);
+#endif
+ }
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
+ Gudhi::Clock t;
+#endif
+ int tangent_space_dim = tangent_basis_dim(i);
+ Triangulation &local_tr =
+ m_triangulations[i].construct_triangulation(tangent_space_dim);
+
+ m_triangulations[i].center_vertex() =
+ compute_star(i, center_pt, tsb, local_tr, verbose);
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
+ t.end();
+ std::cerr << " - triangulation construction: " << t.num_seconds() << " s.\n";
+ t.reset();
+#endif
+
+#ifdef GUDHI_TC_VERY_VERBOSE
+ std::cerr << "Inserted " << num_inserted_points << " points / "
+ << num_attempts_to_insert_points << " attemps to compute the star\n";
+#endif
+
+ update_star(i);
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
+ t.end();
+ std::cerr << " - update_star: " << t.num_seconds() << " s.\n";
+#endif
+ }
+
+ // Updates m_stars[i] directly from m_triangulations[i]
+
+ void update_star(std::size_t i) {
+ Star &star = m_stars[i];
+ star.clear();
+ Triangulation &local_tr = m_triangulations[i].tr();
+ Tr_vertex_handle center_vertex = m_triangulations[i].center_vertex();
+ int cur_dim_plus_1 = local_tr.current_dimension() + 1;
+
+ std::vector<Tr_full_cell_handle> incident_cells;
+ local_tr.incident_full_cells(
+ center_vertex, std::back_inserter(incident_cells));
+
+ typename std::vector<Tr_full_cell_handle>::const_iterator it_c = incident_cells.begin();
+ typename std::vector<Tr_full_cell_handle>::const_iterator it_c_end = incident_cells.end();
+ // For each cell
+ for (; it_c != it_c_end; ++it_c) {
+ // Will contain all indices except center_vertex
+ Incident_simplex incident_simplex;
+ for (int j = 0; j < cur_dim_plus_1; ++j) {
+ std::size_t index = (*it_c)->vertex(j)->data();
+ if (index != i)
+ incident_simplex.insert(index);
+ }
+ GUDHI_CHECK(incident_simplex.size() == cur_dim_plus_1 - 1,
+ std::logic_error("update_star: wrong size of incident simplex"));
+ star.push_back(incident_simplex);
+ }
+ }
+
+ // Estimates tangent subspaces using PCA
+
+ Tangent_space_basis compute_tangent_space(const Point &p
+ , const std::size_t i
+ , bool normalize_basis = true
+ , Orthogonal_space_basis *p_orth_space_basis = NULL
+ ) {
+ unsigned int num_pts_for_pca = (std::min)(static_cast<unsigned int> (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int> (m_points.size()));
+
+ // Kernel functors
+ typename K::Construct_vector_d constr_vec =
+ m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord =
+ m_k.compute_coordinate_d_object();
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors(
+ p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points_for_tse;
+#else
+ KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points;
+#endif
+
+ // One row = one point
+ Eigen::MatrixXd mat_points(num_pts_for_pca, m_ambient_dim);
+ auto nn_it = kns_range.begin();
+ for (unsigned int j = 0;
+ j < num_pts_for_pca && nn_it != kns_range.end();
+ ++j, ++nn_it) {
+ for (int i = 0; i < m_ambient_dim; ++i) {
+ mat_points(j, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
+ }
+ }
+ Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean();
+ Eigen::MatrixXd cov = centered.adjoint() * centered;
+ Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eig(cov);
+
+ Tangent_space_basis tsb(i); // p = compute_perturbed_point(i) here
+
+ // The eigenvectors are sorted in increasing order of their corresponding
+ // eigenvalues
+ for (int j = m_ambient_dim - 1;
+ j >= m_ambient_dim - m_intrinsic_dim;
+ --j) {
+ if (normalize_basis) {
+ Vector v = constr_vec(m_ambient_dim,
+ eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim);
+ tsb.push_back(normalize_vector(v, m_k));
+ } else {
+ tsb.push_back(constr_vec(
+ m_ambient_dim,
+ eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim));
+ }
+ }
+
+ if (p_orth_space_basis) {
+ p_orth_space_basis->set_origin(i);
+ for (int j = m_ambient_dim - m_intrinsic_dim - 1;
+ j >= 0;
+ --j) {
+ if (normalize_basis) {
+ Vector v = constr_vec(m_ambient_dim,
+ eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim);
+ p_orth_space_basis->push_back(normalize_vector(v, m_k));
+ } else {
+ p_orth_space_basis->push_back(constr_vec(
+ m_ambient_dim,
+ eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim));
+ }
+ }
+ }
+
+ m_are_tangent_spaces_computed[i] = true;
+
+ return tsb;
+ }
+
+ // Compute the space tangent to a simplex (p1, p2, ... pn)
+ // TODO(CJ): Improve this?
+ // Basically, it takes all the neighbor points to p1, p2... pn and runs PCA
+ // on it. Note that most points are duplicated.
+
+ Tangent_space_basis compute_tangent_space(const Simplex &s, bool normalize_basis = true) {
+ unsigned int num_pts_for_pca = (std::min)(static_cast<unsigned int> (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int> (m_points.size()));
+
+ // Kernel functors
+ typename K::Construct_vector_d constr_vec =
+ m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord =
+ m_k.compute_coordinate_d_object();
+ typename K::Squared_length_d sqlen =
+ m_k.squared_length_d_object();
+ typename K::Scaled_vector_d scaled_vec =
+ m_k.scaled_vector_d_object();
+ typename K::Scalar_product_d scalar_pdct =
+ m_k.scalar_product_d_object();
+ typename K::Difference_of_vectors_d diff_vec =
+ m_k.difference_of_vectors_d_object();
+
+ // One row = one point
+ Eigen::MatrixXd mat_points(s.size() * num_pts_for_pca, m_ambient_dim);
+ unsigned int current_row = 0;
+
+ for (Simplex::const_iterator it_index = s.begin();
+ it_index != s.end(); ++it_index) {
+ const Point &p = m_points[*it_index];
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors(
+ p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points_for_tse;
+#else
+ KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points;
+#endif
+
+ auto nn_it = kns_range.begin();
+ for (;
+ current_row < num_pts_for_pca && nn_it != kns_range.end();
+ ++current_row, ++nn_it) {
+ for (int i = 0; i < m_ambient_dim; ++i) {
+ mat_points(current_row, i) =
+ CGAL::to_double(coord(points_for_pca[nn_it->first], i));
+ }
+ }
+ }
+ Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean();
+ Eigen::MatrixXd cov = centered.adjoint() * centered;
+ Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eig(cov);
+
+ Tangent_space_basis tsb;
+
+ // The eigenvectors are sorted in increasing order of their corresponding
+ // eigenvalues
+ for (int j = m_ambient_dim - 1;
+ j >= m_ambient_dim - m_intrinsic_dim;
+ --j) {
+ if (normalize_basis) {
+ Vector v = constr_vec(m_ambient_dim,
+ eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim);
+ tsb.push_back(normalize_vector(v, m_k));
+ } else {
+ tsb.push_back(constr_vec(
+ m_ambient_dim,
+ eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim));
+ }
+ }
+
+ return tsb;
+ }
+
+ // Returns the dimension of the ith local triangulation
+
+ int tangent_basis_dim(std::size_t i) const {
+ return m_tangent_spaces[i].dimension();
+ }
+
+ Point compute_perturbed_point(std::size_t pt_idx) const {
+#ifdef GUDHI_TC_PERTURB_POSITION
+ return m_k.translated_point_d_object()(
+ m_points[pt_idx], m_translations[pt_idx]);
+#else
+ return m_points[pt_idx];
+#endif
+ }
+
+ void compute_perturbed_weighted_point(std::size_t pt_idx, Point &p, FT &w) const {
+#ifdef GUDHI_TC_PERTURB_POSITION
+ p = m_k.translated_point_d_object()(
+ m_points[pt_idx], m_translations[pt_idx]);
+#else
+ p = m_points[pt_idx];
+#endif
+ w = m_weights[pt_idx];
+ }
+
+ Weighted_point compute_perturbed_weighted_point(std::size_t pt_idx) const {
+ typename K::Construct_weighted_point_d k_constr_wp =
+ m_k.construct_weighted_point_d_object();
+
+ Weighted_point wp = k_constr_wp(
+#ifdef GUDHI_TC_PERTURB_POSITION
+ m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]),
+#else
+ m_points[pt_idx],
+#endif
+ m_weights[pt_idx]);
+
+ return wp;
+ }
+
+ Point unproject_point(const Tr_point &p,
+ const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ typename K::Translated_point_d k_transl =
+ m_k.translated_point_d_object();
+ typename K::Scaled_vector_d k_scaled_vec =
+ m_k.scaled_vector_d_object();
+ typename Tr_traits::Compute_coordinate_d coord =
+ tr_traits.compute_coordinate_d_object();
+
+ Point global_point = compute_perturbed_point(tsb.origin());
+ for (int i = 0; i < m_intrinsic_dim; ++i)
+ global_point = k_transl(global_point,
+ k_scaled_vec(tsb[i], coord(p, i)));
+
+ return global_point;
+ }
+
+ // Project the point in the tangent space
+ // Resulting point coords are expressed in tsb's space
+ Tr_bare_point project_point(const Point &p,
+ const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ typename K::Scalar_product_d scalar_pdct =
+ m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points =
+ m_k.difference_of_points_d_object();
+
+ Vector v = diff_points(p, compute_perturbed_point(tsb.origin()));
+
+ std::vector<FT> coords;
+ // Ambiant-space coords of the projected point
+ coords.reserve(tsb.dimension());
+ for (std::size_t i = 0; i < m_intrinsic_dim; ++i) {
+ // Local coords are given by the scalar product with the vectors of tsb
+ FT coord = scalar_pdct(v, tsb[i]);
+ coords.push_back(coord);
+ }
+
+ return tr_traits.construct_point_d_object()(
+ static_cast<int> (coords.size()), coords.begin(), coords.end());
+ }
+
+ // Project the point in the tangent space
+ // The weight will be the squared distance between p and the projection of p
+ // Resulting point coords are expressed in tsb's space
+
+ Tr_point project_point_and_compute_weight(const Weighted_point &wp,
+ const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ typename K::Point_drop_weight_d k_drop_w =
+ m_k.point_drop_weight_d_object();
+ typename K::Compute_weight_d k_point_weight =
+ m_k.compute_weight_d_object();
+ return project_point_and_compute_weight(
+ k_drop_w(wp), k_point_weight(wp), tsb, tr_traits);
+ }
+
+ // Same as above, with slightly different parameters
+ Tr_point project_point_and_compute_weight(const Point &p, const FT w,
+ const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ const int point_dim = m_k.point_dimension_d_object()(p);
+
+ typename K::Construct_point_d constr_pt =
+ m_k.construct_point_d_object();
+ typename K::Scalar_product_d scalar_pdct =
+ m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points =
+ m_k.difference_of_points_d_object();
+ typename K::Compute_coordinate_d coord =
+ m_k.compute_coordinate_d_object();
+ typename K::Construct_cartesian_const_iterator_d ccci =
+ m_k.construct_cartesian_const_iterator_d_object();
+
+ Point origin = compute_perturbed_point(tsb.origin());
+ Vector v = diff_points(p, origin);
+
+ // Same dimension? Then the weight is 0
+ bool same_dim = (point_dim == tsb.dimension());
+
+ std::vector<FT> coords;
+ // Ambiant-space coords of the projected point
+ std::vector<FT> p_proj(ccci(origin), ccci(origin, 0));
+ coords.reserve(tsb.dimension());
+ for (int i = 0; i < tsb.dimension(); ++i) {
+ // Local coords are given by the scalar product with the vectors of tsb
+ FT c = scalar_pdct(v, tsb[i]);
+ coords.push_back(c);
+
+ // p_proj += c * tsb[i]
+ if (!same_dim) {
+ for (int j = 0; j < point_dim; ++j)
+ p_proj[j] += c * coord(tsb[i], j);
+ }
+ }
+
+ // Same dimension? Then the weight is 0
+ FT sq_dist_to_proj_pt = 0;
+ if (!same_dim) {
+ Point projected_pt = constr_pt(point_dim, p_proj.begin(), p_proj.end());
+ sq_dist_to_proj_pt = m_k.squared_distance_d_object()(p, projected_pt);
+ }
+
+ return tr_traits.construct_weighted_point_d_object()
+ (tr_traits.construct_point_d_object()(static_cast<int> (coords.size()), coords.begin(), coords.end()),
+ w - sq_dist_to_proj_pt);
+ }
+
+ // Project all the points in the tangent space
+
+ template <typename Indexed_point_range>
+ std::vector<Tr_point> project_points_and_compute_weights(
+ const Indexed_point_range &point_indices,
+ const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ std::vector<Tr_point> ret;
+ for (typename Indexed_point_range::const_iterator
+ it = point_indices.begin(), it_end = point_indices.end();
+ it != it_end; ++it) {
+ ret.push_back(project_point_and_compute_weight(
+ compute_perturbed_weighted_point(*it), tsb, tr_traits));
+ }
+ return ret;
+ }
+
+ // A simplex here is a local tri's full cell handle
+
+ bool is_simplex_consistent(Tr_full_cell_handle fch, int cur_dim) const {
+ Simplex c;
+ for (int i = 0; i < cur_dim + 1; ++i) {
+ std::size_t data = fch->vertex(i)->data();
+ c.insert(data);
+ }
+ return is_simplex_consistent(c);
+ }
+
+ // A simplex here is a list of point indices
+ // TODO(CJ): improve it like the other "is_simplex_consistent" below
+
+ bool is_simplex_consistent(Simplex const& simplex) const {
+ // Check if the simplex is in the stars of all its vertices
+ Simplex::const_iterator it_point_idx = simplex.begin();
+ // For each point p of the simplex, we parse the incidents cells of p
+ // and we check if "simplex" is among them
+ for (; it_point_idx != simplex.end(); ++it_point_idx) {
+ std::size_t point_idx = *it_point_idx;
+ // Don't check infinite simplices
+ if (point_idx == (std::numeric_limits<std::size_t>::max)())
+ continue;
+
+ Star const& star = m_stars[point_idx];
+
+ // What we're looking for is "simplex" \ point_idx
+ Incident_simplex is_to_find = simplex;
+ is_to_find.erase(point_idx);
+
+ // For each cell
+ if (std::find(star.begin(), star.end(), is_to_find) == star.end())
+ return false;
+ }
+
+ return true;
+ }
+
+ // A simplex here is a list of point indices
+ // "s" contains all the points of the simplex except "center_point"
+ // This function returns the points whose star doesn't contain the simplex
+ // N.B.: the function assumes that the simplex is contained in
+ // star(center_point)
+
+ template <typename OutputIterator> // value_type = std::size_t
+ bool is_simplex_consistent(
+ std::size_t center_point,
+ Incident_simplex const& s, // without "center_point"
+ OutputIterator points_whose_star_does_not_contain_s,
+ bool check_also_in_non_maximal_faces = false) const {
+ Simplex full_simplex = s;
+ full_simplex.insert(center_point);
+
+ // Check if the simplex is in the stars of all its vertices
+ Incident_simplex::const_iterator it_point_idx = s.begin();
+ // For each point p of the simplex, we parse the incidents cells of p
+ // and we check if "simplex" is among them
+ for (; it_point_idx != s.end(); ++it_point_idx) {
+ std::size_t point_idx = *it_point_idx;
+ // Don't check infinite simplices
+ if (point_idx == (std::numeric_limits<std::size_t>::max)())
+ continue;
+
+ Star const& star = m_stars[point_idx];
+
+ // What we're looking for is full_simplex \ point_idx
+ Incident_simplex is_to_find = full_simplex;
+ is_to_find.erase(point_idx);
+
+ if (check_also_in_non_maximal_faces) {
+ // For each simplex "is" of the star, check if ic_to_simplex is
+ // included in "is"
+ bool found = false;
+ for (Star::const_iterator is = star.begin(), is_end = star.end();
+ !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(),
+ is_to_find.begin(), is_to_find.end()))
+ found = true;
+ }
+
+ if (!found)
+ *points_whose_star_does_not_contain_s++ = point_idx;
+ } else {
+ // Does the star contain is_to_find?
+ if (std::find(star.begin(), star.end(), is_to_find) == star.end())
+ *points_whose_star_does_not_contain_s++ = point_idx;
+ }
+ }
+
+ return true;
+ }
+
+ // A simplex here is a list of point indices
+ // It looks for s in star(p).
+ // "s" contains all the points of the simplex except p.
+ bool is_simplex_in_star(std::size_t p,
+ Incident_simplex const& s,
+ bool check_also_in_non_maximal_faces = true) const {
+ Star const& star = m_stars[p];
+
+ if (check_also_in_non_maximal_faces) {
+ // For each simplex "is" of the star, check if ic_to_simplex is
+ // included in "is"
+ bool found = false;
+ for (Star::const_iterator is = star.begin(), is_end = star.end();
+ !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), s.begin(), s.end()))
+ found = true;
+ }
+
+ return found;
+ } else {
+ return !(std::find(star.begin(), star.end(), s) == star.end());
+ }
+ }
+
+#ifdef GUDHI_USE_TBB
+ // Functor for try_to_solve_inconsistencies_in_a_local_triangulation function
+ class Try_to_solve_inconsistencies_in_a_local_triangulation {
+ Tangential_complex & m_tc;
+ double m_max_perturb;
+ tbb::combinable<std::size_t> &m_num_inconsistencies;
+ tbb::combinable<std::vector<std::size_t> > &m_updated_points;
+
+ public:
+ // Constructor
+ Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc,
+ double max_perturb,
+ tbb::combinable<std::size_t> &num_inconsistencies,
+ tbb::combinable<std::vector<std::size_t> > &updated_points)
+ : m_tc(tc),
+ m_max_perturb(max_perturb),
+ m_num_inconsistencies(num_inconsistencies),
+ m_updated_points(updated_points) { }
+
+ // Constructor
+ Try_to_solve_inconsistencies_in_a_local_triangulation(const Try_to_solve_inconsistencies_in_a_local_triangulation&
+ tsilt)
+ : m_tc(tsilt.m_tc),
+ m_max_perturb(tsilt.m_max_perturb),
+ m_num_inconsistencies(tsilt.m_num_inconsistencies),
+ m_updated_points(tsilt.m_updated_points) { }
+
+ // operator()
+ void operator()(const tbb::blocked_range<size_t>& r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) {
+ m_num_inconsistencies.local() +=
+ m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(i, m_max_perturb,
+ std::back_inserter(m_updated_points.local()));
+ }
+ }
+ };
+#endif // GUDHI_USE_TBB
+
+ void perturb(std::size_t point_idx, double max_perturb) {
+ const Tr_traits &local_tr_traits =
+ m_triangulations[point_idx].tr().geom_traits();
+ typename Tr_traits::Compute_coordinate_d coord =
+ local_tr_traits.compute_coordinate_d_object();
+ typename K::Translated_point_d k_transl =
+ m_k.translated_point_d_object();
+ typename K::Construct_vector_d k_constr_vec =
+ m_k.construct_vector_d_object();
+ typename K::Scaled_vector_d k_scaled_vec =
+ m_k.scaled_vector_d_object();
+
+ CGAL::Random_points_in_ball_d<Tr_bare_point>
+ tr_point_in_ball_generator(m_intrinsic_dim,
+ m_random_generator.get_double(0., max_perturb));
+
+ Tr_point local_random_transl =
+ local_tr_traits.construct_weighted_point_d_object()(*tr_point_in_ball_generator++, 0);
+ Translation_for_perturb global_transl = k_constr_vec(m_ambient_dim);
+ const Tangent_space_basis &tsb = m_tangent_spaces[point_idx];
+ for (int i = 0; i < m_intrinsic_dim; ++i) {
+ global_transl = k_transl(global_transl,
+ k_scaled_vec(tsb[i], coord(local_random_transl, i)));
+ }
+ // Parallel
+#if defined(GUDHI_USE_TBB)
+ m_p_perturb_mutexes[point_idx].lock();
+ m_translations[point_idx] = global_transl;
+ m_p_perturb_mutexes[point_idx].unlock();
+ // Sequential
+#else
+ m_translations[point_idx] = global_transl;
+#endif
+ }
+
+ // Return true if inconsistencies were found
+ template <typename OutputIt>
+ bool try_to_solve_inconsistencies_in_a_local_triangulation(std::size_t tr_index,
+ double max_perturb,
+ OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) {
+ bool is_inconsistent = false;
+
+ Star const& star = m_stars[tr_index];
+ Tr_vertex_handle center_vh = m_triangulations[tr_index].center_vertex();
+
+ // For each incident simplex
+ Star::const_iterator it_inc_simplex = star.begin();
+ Star::const_iterator it_inc_simplex_end = star.end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ const Incident_simplex &incident_simplex = *it_inc_simplex;
+
+ // Don't check infinite cells
+ if (is_infinite(incident_simplex))
+ continue;
+
+ Simplex c = incident_simplex;
+ c.insert(tr_index); // Add the missing index
+
+ // Perturb the center point
+ if (!is_simplex_consistent(c)) {
+ is_inconsistent = true;
+
+ std::size_t idx = tr_index;
+
+ perturb(tr_index, max_perturb);
+ *perturbed_pts_indices++ = idx;
+
+ // We will try the other cells next time
+ break;
+ }
+ }
+
+ return is_inconsistent;
+ }
+
+
+ // 1st line: number of points
+ // Then one point per line
+ std::ostream &export_point_set(std::ostream & os,
+ bool use_perturbed_points = false,
+ const char *coord_separator = " ") const {
+ if (use_perturbed_points) {
+ std::vector<Point> perturbed_points;
+ perturbed_points.reserve(m_points.size());
+ for (std::size_t i = 0; i < m_points.size(); ++i)
+ perturbed_points.push_back(compute_perturbed_point(i));
+
+ return export_point_set(
+ m_k, perturbed_points, os, coord_separator);
+ } else {
+ return export_point_set(
+ m_k, m_points, os, coord_separator);
+ }
+ }
+
+ template<typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_vertices_to_off(
+ std::ostream & os, std::size_t &num_vertices,
+ bool use_perturbed_points = false,
+ ProjectionFunctor const& point_projection = ProjectionFunctor()) const {
+ if (m_points.empty()) {
+ num_vertices = 0;
+ return os;
+ }
+
+ // If m_intrinsic_dim = 1, we output each point two times
+ // to be able to export each segment as a flat triangle with 3 different
+ // indices (otherwise, Meshlab detects degenerated simplices)
+ const int N = (m_intrinsic_dim == 1 ? 2 : 1);
+
+ // Kernel functors
+ typename K::Compute_coordinate_d coord =
+ m_k.compute_coordinate_d_object();
+
+#ifdef GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF
+ int num_coords = m_ambient_dim;
+#else
+ int num_coords = (std::min)(m_ambient_dim, 3);
+#endif
+
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ OS_container::const_iterator it_os = m_orth_spaces.begin();
+#endif
+ typename Points::const_iterator it_p = m_points.begin();
+ typename Points::const_iterator it_p_end = m_points.end();
+ // For each point p
+ for (std::size_t i = 0; it_p != it_p_end; ++it_p, ++i) {
+ Point p = point_projection(
+ use_perturbed_points ? compute_perturbed_point(i) : *it_p);
+ for (int ii = 0; ii < N; ++ii) {
+ int j = 0;
+ for (; j < num_coords; ++j)
+ os << CGAL::to_double(coord(p, j)) << " ";
+ if (j == 2)
+ os << "0";
+
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ for (j = 0; j < num_coords; ++j)
+ os << " " << CGAL::to_double(coord(*it_os->begin(), j));
+#endif
+ os << "\n";
+ }
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ ++it_os;
+#endif
+ }
+
+ num_vertices = N * m_points.size();
+ return os;
+ }
+
+ std::ostream &export_simplices_to_off(std::ostream & os, std::size_t &num_OFF_simplices,
+ bool color_inconsistencies = false,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL)
+ const {
+ // If m_intrinsic_dim = 1, each point is output two times
+ // (see export_vertices_to_off)
+ num_OFF_simplices = 0;
+ std::size_t num_maximal_simplices = 0;
+ std::size_t num_inconsistent_maximal_simplices = 0;
+ std::size_t num_inconsistent_stars = 0;
+ typename Tr_container::const_iterator it_tr = m_triangulations.begin();
+ typename Tr_container::const_iterator it_tr_end = m_triangulations.end();
+ // For each triangulation
+ for (std::size_t idx = 0; it_tr != it_tr_end; ++it_tr, ++idx) {
+ bool is_star_inconsistent = false;
+
+ Triangulation const& tr = it_tr->tr();
+ Tr_vertex_handle center_vh = it_tr->center_vertex();
+
+ if (tr.current_dimension() < m_intrinsic_dim)
+ continue;
+
+ // Color for this star
+ std::stringstream color;
+ // color << rand()%256 << " " << 100+rand()%156 << " " << 100+rand()%156;
+ color << 128 << " " << 128 << " " << 128;
+
+ // Gather the triangles here, with an int telling its color
+ typedef std::vector<std::pair<Simplex, int> > Star_using_triangles;
+ Star_using_triangles star_using_triangles;
+
+ // For each cell of the star
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ Simplex c = *it_inc_simplex;
+ c.insert(idx);
+ std::size_t num_vertices = c.size();
+ ++num_maximal_simplices;
+
+ int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue
+ if (color_inconsistencies && !is_simplex_consistent(c)) {
+ ++num_inconsistent_maximal_simplices;
+ color_simplex = 0;
+ is_star_inconsistent = true;
+ } else {
+ if (p_simpl_to_color_in_red &&
+ std::find(
+ p_simpl_to_color_in_red->begin(),
+ p_simpl_to_color_in_red->end(),
+ c) != p_simpl_to_color_in_red->end()) {
+ color_simplex = 1;
+ } else if (p_simpl_to_color_in_green &&
+ std::find(
+ p_simpl_to_color_in_green->begin(),
+ p_simpl_to_color_in_green->end(),
+ c) != p_simpl_to_color_in_green->end()) {
+ color_simplex = 2;
+ } else if (p_simpl_to_color_in_blue &&
+ std::find(
+ p_simpl_to_color_in_blue->begin(),
+ p_simpl_to_color_in_blue->end(),
+ c) != p_simpl_to_color_in_blue->end()) {
+ color_simplex = 3;
+ }
+ }
+
+ // If m_intrinsic_dim = 1, each point is output two times,
+ // so we need to multiply each index by 2
+ // And if only 2 vertices, add a third one (each vertex is duplicated in
+ // the file when m_intrinsic dim = 2)
+ if (m_intrinsic_dim == 1) {
+ Simplex tmp_c;
+ Simplex::iterator it = c.begin();
+ for (; it != c.end(); ++it)
+ tmp_c.insert(*it * 2);
+ if (num_vertices == 2)
+ tmp_c.insert(*tmp_c.rbegin() + 1);
+
+ c = tmp_c;
+ }
+
+ if (num_vertices <= 3) {
+ star_using_triangles.push_back(std::make_pair(c, color_simplex));
+ } else {
+ // num_vertices >= 4: decompose the simplex in triangles
+ std::vector<bool> booleans(num_vertices, false);
+ std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true);
+ do {
+ Simplex triangle;
+ Simplex::iterator it = c.begin();
+ for (int i = 0; it != c.end(); ++i, ++it) {
+ if (booleans[i])
+ triangle.insert(*it);
+ }
+ star_using_triangles.push_back(
+ std::make_pair(triangle, color_simplex));
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
+ }
+ }
+
+ // For each cell
+ Star_using_triangles::const_iterator it_simplex =
+ star_using_triangles.begin();
+ Star_using_triangles::const_iterator it_simplex_end =
+ star_using_triangles.end();
+ for (; it_simplex != it_simplex_end; ++it_simplex) {
+ const Simplex &c = it_simplex->first;
+
+ // Don't export infinite cells
+ if (is_infinite(c))
+ continue;
+
+ int color_simplex = it_simplex->second;
+
+ std::stringstream sstr_c;
+
+ Simplex::const_iterator it_point_idx = c.begin();
+ for (; it_point_idx != c.end(); ++it_point_idx) {
+ sstr_c << *it_point_idx << " ";
+ }
+
+ os << 3 << " " << sstr_c.str();
+ if (color_inconsistencies || p_simpl_to_color_in_red
+ || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
+ switch (color_simplex) {
+ case 0: os << " 255 255 0";
+ break;
+ case 1: os << " 255 0 0";
+ break;
+ case 2: os << " 0 255 0";
+ break;
+ case 3: os << " 0 0 255";
+ break;
+ default: os << " " << color.str();
+ break;
+ }
+ }
+ ++num_OFF_simplices;
+ os << "\n";
+ }
+ if (is_star_inconsistent)
+ ++num_inconsistent_stars;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr
+ << "\n==========================================================\n"
+ << "Export from list of stars to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices
+ << "\n";
+ if (color_inconsistencies) {
+ std::cerr
+ << " * Number of inconsistent stars: "
+ << num_inconsistent_stars << " ("
+ << (m_points.size() > 0 ?
+ 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n"
+ << " * Number of inconsistent maximal simplices: "
+ << num_inconsistent_maximal_simplices << " ("
+ << (num_maximal_simplices > 0 ?
+ 100. * num_inconsistent_maximal_simplices / num_maximal_simplices
+ : 0.) << "%)\n";
+ }
+ std::cerr << "==========================================================\n";
+#endif
+
+ return os;
+ }
+
+ public:
+ std::ostream &export_simplices_to_off(
+ const Simplicial_complex &complex,
+ std::ostream & os, std::size_t &num_OFF_simplices,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL)
+ const {
+ typedef Simplicial_complex::Simplex Simplex;
+ typedef Simplicial_complex::Simplex_set Simplex_set;
+
+ // If m_intrinsic_dim = 1, each point is output two times
+ // (see export_vertices_to_off)
+ num_OFF_simplices = 0;
+ std::size_t num_maximal_simplices = 0;
+
+ typename Simplex_set::const_iterator it_s =
+ complex.simplex_range().begin();
+ typename Simplex_set::const_iterator it_s_end =
+ complex.simplex_range().end();
+ // For each simplex
+ for (; it_s != it_s_end; ++it_s) {
+ Simplex c = *it_s;
+ ++num_maximal_simplices;
+
+ int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue
+ if (p_simpl_to_color_in_red &&
+ std::find(
+ p_simpl_to_color_in_red->begin(),
+ p_simpl_to_color_in_red->end(),
+ c) != p_simpl_to_color_in_red->end()) {
+ color_simplex = 1;
+ } else if (p_simpl_to_color_in_green &&
+ std::find(p_simpl_to_color_in_green->begin(),
+ p_simpl_to_color_in_green->end(),
+ c) != p_simpl_to_color_in_green->end()) {
+ color_simplex = 2;
+ } else if (p_simpl_to_color_in_blue &&
+ std::find(p_simpl_to_color_in_blue->begin(),
+ p_simpl_to_color_in_blue->end(),
+ c) != p_simpl_to_color_in_blue->end()) {
+ color_simplex = 3;
+ }
+
+ // Gather the triangles here
+ typedef std::vector<Simplex> Triangles;
+ Triangles triangles;
+
+ int num_vertices = static_cast<int>(c.size());
+ // Do not export smaller dimension simplices
+ if (num_vertices < m_intrinsic_dim + 1)
+ continue;
+
+ // If m_intrinsic_dim = 1, each point is output two times,
+ // so we need to multiply each index by 2
+ // And if only 2 vertices, add a third one (each vertex is duplicated in
+ // the file when m_intrinsic dim = 2)
+ if (m_intrinsic_dim == 1) {
+ Simplex tmp_c;
+ Simplex::iterator it = c.begin();
+ for (; it != c.end(); ++it)
+ tmp_c.insert(*it * 2);
+ if (num_vertices == 2)
+ tmp_c.insert(*tmp_c.rbegin() + 1);
+
+ c = tmp_c;
+ }
+
+ if (num_vertices <= 3) {
+ triangles.push_back(c);
+ } else {
+ // num_vertices >= 4: decompose the simplex in triangles
+ std::vector<bool> booleans(num_vertices, false);
+ std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true);
+ do {
+ Simplex triangle;
+ Simplex::iterator it = c.begin();
+ for (int i = 0; it != c.end(); ++i, ++it) {
+ if (booleans[i])
+ triangle.insert(*it);
+ }
+ triangles.push_back(triangle);
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
+ }
+
+ // For each cell
+ Triangles::const_iterator it_tri = triangles.begin();
+ Triangles::const_iterator it_tri_end = triangles.end();
+ for (; it_tri != it_tri_end; ++it_tri) {
+ // Don't export infinite cells
+ if (is_infinite(*it_tri))
+ continue;
+
+ os << 3 << " ";
+ Simplex::const_iterator it_point_idx = it_tri->begin();
+ for (; it_point_idx != it_tri->end(); ++it_point_idx) {
+ os << *it_point_idx << " ";
+ }
+
+ if (p_simpl_to_color_in_red || p_simpl_to_color_in_green
+ || p_simpl_to_color_in_blue) {
+ switch (color_simplex) {
+ case 0: os << " 255 255 0";
+ break;
+ case 1: os << " 255 0 0";
+ break;
+ case 2: os << " 0 255 0";
+ break;
+ case 3: os << " 0 0 255";
+ break;
+ default: os << " 128 128 128";
+ break;
+ }
+ }
+
+ ++num_OFF_simplices;
+ os << "\n";
+ }
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr
+ << "\n==========================================================\n"
+ << "Export from complex to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices
+ << "\n"
+ << "==========================================================\n";
+#endif
+
+ return os;
+ }
+
+ private:
+ const K m_k;
+ const int m_intrinsic_dim;
+ const int m_ambient_dim;
+
+ Points m_points;
+ Weights m_weights;
+#ifdef GUDHI_TC_PERTURB_POSITION
+ Translations_for_perturb m_translations;
+#if defined(GUDHI_USE_TBB)
+ Mutex_for_perturb *m_p_perturb_mutexes;
+#endif
+#endif
+
+ Points_ds m_points_ds;
+ double m_last_max_perturb;
+ std::vector<bool> m_are_tangent_spaces_computed;
+ TS_container m_tangent_spaces;
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ OS_container m_orth_spaces;
+#endif
+ Tr_container m_triangulations; // Contains the triangulations
+ // and their center vertex
+ Stars_container m_stars;
+ std::vector<FT> m_squared_star_spheres_radii_incl_margin;
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ Points m_points_for_tse;
+ Points_ds m_points_ds_for_tse;
+#endif
+
+ mutable CGAL::Random m_random_generator;
+}; // /class Tangential_complex
+
+} // end namespace tangential_complex
+} // end namespace Gudhi
+
+#endif // TANGENTIAL_COMPLEX_H_
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h
new file mode 100644
index 00000000..65c74ca5
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h
@@ -0,0 +1,539 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_
+#define TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_
+
+#include <gudhi/Tangential_complex/config.h>
+#include <gudhi/Tangential_complex/utilities.h>
+#include <gudhi/Debug_utils.h>
+#include <gudhi/console_color.h>
+
+#include <CGAL/iterator.h>
+
+// For is_pure_pseudomanifold
+#include <boost/graph/graph_traits.hpp>
+#include <boost/graph/adjacency_list.hpp>
+#include <boost/graph/connected_components.hpp>
+#include <boost/container/flat_set.hpp>
+
+#include <algorithm>
+#include <string>
+#include <fstream>
+#include <map> // for map<>
+#include <vector> // for vector<>
+#include <set> // for set<>
+
+namespace Gudhi {
+namespace tangential_complex {
+namespace internal {
+
+class Simplicial_complex {
+ public:
+ typedef boost::container::flat_set<std::size_t> Simplex;
+ typedef std::set<Simplex> Simplex_set;
+
+ // If perform_checks = true, the function:
+ // - won't insert the simplex if it is already in a higher dim simplex
+ // - will erase any lower-dim simplices that are faces of the new simplex
+ // Returns true if the simplex was added
+ bool add_simplex(
+ const Simplex &s, bool perform_checks = true) {
+ if (perform_checks) {
+ unsigned int num_pts = static_cast<int> (s.size());
+ std::vector<Complex::iterator> to_erase;
+ bool check_higher_dim_simpl = true;
+ for (Complex::iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ // Check if the simplex is not already in a higher dim simplex
+ if (check_higher_dim_simpl
+ && it_simplex->size() > num_pts
+ && std::includes(it_simplex->begin(), it_simplex->end(),
+ s.begin(), s.end())) {
+ // No need to insert it, then
+ return false;
+ }
+ // Check if the simplex includes some lower-dim simplices
+ if (it_simplex->size() < num_pts
+ && std::includes(s.begin(), s.end(),
+ it_simplex->begin(), it_simplex->end())) {
+ to_erase.push_back(it_simplex);
+ // We don't need to check higher-sim simplices any more
+ check_higher_dim_simpl = false;
+ }
+ }
+ for (std::vector<Complex::iterator>::const_iterator it = to_erase.begin();
+ it != to_erase.end(); ++it) {
+ m_complex.erase(*it);
+ }
+ }
+ return m_complex.insert(s).second;
+ }
+
+ const Simplex_set &simplex_range() const {
+ return m_complex;
+ }
+
+ bool empty() {
+ return m_complex.empty();
+ }
+
+ void clear() {
+ m_complex.clear();
+ }
+
+ template <typename Test, typename Output_it>
+ void get_simplices_matching_test(Test test, Output_it out) {
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (test(*it_simplex))
+ *out++ = *it_simplex;
+ }
+ }
+
+ // When a simplex S has only one co-face C, we can remove S and C
+ // without changing the topology
+
+ void collapse(int max_simplex_dim, bool quiet = false) {
+#ifdef DEBUG_TRACES
+ if (!quiet)
+ std::cerr << "Collapsing... ";
+#endif
+ // We note k = max_simplex_dim - 1
+ int k = max_simplex_dim - 1;
+
+ typedef Complex::iterator Simplex_iterator;
+ typedef std::vector<Simplex_iterator> Simplex_iterator_list;
+ typedef std::map<Simplex, Simplex_iterator_list> Cofaces_map;
+
+ std::size_t num_collapsed_maximal_simplices = 0;
+ do {
+ num_collapsed_maximal_simplices = 0;
+ // Create a map associating each non-maximal k-faces to the list of its
+ // maximal cofaces
+ Cofaces_map cofaces_map;
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (static_cast<int> (it_simplex->size()) > k + 1) {
+ std::vector<Simplex> k_faces;
+ // Get the k-faces composing the simplex
+ combinations(*it_simplex, k + 1, std::back_inserter(k_faces));
+ for (const auto &comb : k_faces)
+ cofaces_map[comb].push_back(it_simplex);
+ }
+ }
+
+ // For each non-maximal k-face F, if F has only one maximal coface Cf:
+ // - Look for the other k-faces F2, F3... of Cf in the map and:
+ // * if the list contains only Cf, clear the list (we don't remove the
+ // list since it creates troubles with the iterators) and add the F2,
+ // F3... to the complex
+ // * otherwise, remove Cf from the associated list
+ // - Remove Cf from the complex
+ for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(),
+ it_map_end = cofaces_map.end();
+ it_map_elt != it_map_end;
+ ++it_map_elt) {
+ if (it_map_elt->second.size() == 1) {
+ std::vector<Simplex> k_faces;
+ const Simplex_iterator_list::value_type &it_Cf =
+ *it_map_elt->second.begin();
+ GUDHI_CHECK(it_Cf->size() == max_simplex_dim + 1,
+ std::logic_error("Wrong dimension"));
+ // Get the k-faces composing the simplex
+ combinations(*it_Cf, k + 1, std::back_inserter(k_faces));
+ for (const auto &f2 : k_faces) {
+ // Skip F
+ if (f2 != it_map_elt->first) {
+ Cofaces_map::iterator it_comb_in_map = cofaces_map.find(f2);
+ if (it_comb_in_map->second.size() == 1) {
+ it_comb_in_map->second.clear();
+ m_complex.insert(f2);
+ } else { // it_comb_in_map->second.size() > 1
+ Simplex_iterator_list::iterator it = std::find(it_comb_in_map->second.begin(),
+ it_comb_in_map->second.end(),
+ it_Cf);
+ GUDHI_CHECK(it != it_comb_in_map->second.end(),
+ std::logic_error("Error: it == it_comb_in_map->second.end()"));
+ it_comb_in_map->second.erase(it);
+ }
+ }
+ }
+ m_complex.erase(it_Cf);
+ ++num_collapsed_maximal_simplices;
+ }
+ }
+ // Repeat until no maximal simplex got removed
+ } while (num_collapsed_maximal_simplices > 0);
+
+ // Collapse the lower dimension simplices
+ if (k > 0)
+ collapse(max_simplex_dim - 1, true);
+
+#ifdef DEBUG_TRACES
+ if (!quiet)
+ std::cerr << "done.\n";
+#endif
+ }
+
+ void display_stats() const {
+ std::cerr << yellow << "Complex stats:\n" << white;
+
+ if (m_complex.empty()) {
+ std::cerr << " * No simplices.\n";
+ } else {
+ // Number of simplex for each dimension
+ std::map<int, std::size_t> simplex_stats;
+
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ ++simplex_stats[static_cast<int> (it_simplex->size()) - 1];
+ }
+
+ for (std::map<int, std::size_t>::const_iterator it_map = simplex_stats.begin();
+ it_map != simplex_stats.end(); ++it_map) {
+ std::cerr << " * " << it_map->first << "-simplices: "
+ << it_map->second << "\n";
+ }
+ }
+ }
+
+ // verbose_level = 0, 1 or 2
+ bool is_pure_pseudomanifold__do_not_check_if_stars_are_connected(int simplex_dim,
+ bool allow_borders = false,
+ bool exit_at_the_first_problem = false,
+ int verbose_level = 0,
+ std::size_t *p_num_wrong_dim_simplices = NULL,
+ std::size_t *p_num_wrong_number_of_cofaces = NULL) const {
+ typedef Simplex K_1_face;
+ typedef std::map<K_1_face, std::size_t> Cofaces_map;
+
+ std::size_t num_wrong_dim_simplices = 0;
+ std::size_t num_wrong_number_of_cofaces = 0;
+
+ // Counts the number of cofaces of each K_1_face
+
+ // Create a map associating each non-maximal k-faces to the list of its
+ // maximal cofaces
+ Cofaces_map cofaces_map;
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (static_cast<int> (it_simplex->size()) != simplex_dim + 1) {
+ if (verbose_level >= 2)
+ std::cerr << "Found a simplex with dim = "
+ << it_simplex->size() - 1 << "\n";
+ ++num_wrong_dim_simplices;
+ } else {
+ std::vector<K_1_face> k_1_faces;
+ // Get the facets composing the simplex
+ combinations(
+ *it_simplex, simplex_dim, std::back_inserter(k_1_faces));
+ for (const auto &k_1_face : k_1_faces) {
+ ++cofaces_map[k_1_face];
+ }
+ }
+ }
+
+ for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(),
+ it_map_end = cofaces_map.end();
+ it_map_elt != it_map_end;
+ ++it_map_elt) {
+ if (it_map_elt->second != 2
+ && (!allow_borders || it_map_elt->second != 1)) {
+ if (verbose_level >= 2)
+ std::cerr << "Found a k-1-face with "
+ << it_map_elt->second << " cofaces\n";
+
+ if (exit_at_the_first_problem)
+ return false;
+ else
+ ++num_wrong_number_of_cofaces;
+ }
+ }
+
+ bool ret = num_wrong_dim_simplices == 0 && num_wrong_number_of_cofaces == 0;
+
+ if (verbose_level >= 1) {
+ std::cerr << "Pure pseudo-manifold: ";
+ if (ret) {
+ std::cerr << green << "YES" << white << "\n";
+ } else {
+ std::cerr << red << "NO" << white << "\n"
+ << " * Number of wrong dimension simplices: "
+ << num_wrong_dim_simplices << "\n"
+ << " * Number of wrong number of cofaces: "
+ << num_wrong_number_of_cofaces << "\n";
+ }
+ }
+
+ if (p_num_wrong_dim_simplices)
+ *p_num_wrong_dim_simplices = num_wrong_dim_simplices;
+ if (p_num_wrong_number_of_cofaces)
+ *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces;
+
+ return ret;
+ }
+
+ template <int K>
+ std::size_t num_K_simplices() const {
+ Simplex_set k_simplices;
+
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (it_simplex->size() == K + 1) {
+ k_simplices.insert(*it_simplex);
+ } else if (it_simplex->size() > K + 1) {
+ // Get the k-faces composing the simplex
+ combinations(
+ *it_simplex, K + 1, std::inserter(k_simplices, k_simplices.begin()));
+ }
+ }
+
+ return k_simplices.size();
+ }
+
+ std::ptrdiff_t euler_characteristic(bool verbose = false) const {
+ if (verbose)
+ std::cerr << "\nComputing Euler characteristic of the complex...\n";
+
+ std::size_t num_vertices = num_K_simplices<0>();
+ std::size_t num_edges = num_K_simplices<1>();
+ std::size_t num_triangles = num_K_simplices<2>();
+
+ std::ptrdiff_t ec =
+ (std::ptrdiff_t) num_vertices
+ - (std::ptrdiff_t) num_edges
+ + (std::ptrdiff_t) num_triangles;
+
+ if (verbose)
+ std::cerr << "Euler characteristic: V - E + F = "
+ << num_vertices << " - " << num_edges << " + " << num_triangles << " = "
+ << blue
+ << ec
+ << white << "\n";
+
+ return ec;
+ }
+
+ // TODO(CJ): ADD COMMENTS
+
+ bool is_pure_pseudomanifold(
+ int simplex_dim,
+ std::size_t num_vertices,
+ bool allow_borders = false,
+ bool exit_at_the_first_problem = false,
+ int verbose_level = 0,
+ std::size_t *p_num_wrong_dim_simplices = NULL,
+ std::size_t *p_num_wrong_number_of_cofaces = NULL,
+ std::size_t *p_num_unconnected_stars = NULL,
+ Simplex_set *p_wrong_dim_simplices = NULL,
+ Simplex_set *p_wrong_number_of_cofaces_simplices = NULL,
+ Simplex_set *p_unconnected_stars_simplices = NULL) const {
+ // If simplex_dim == 1, we do not need to check if stars are connected
+ if (simplex_dim == 1) {
+ if (p_num_unconnected_stars)
+ *p_num_unconnected_stars = 0;
+ return is_pure_pseudomanifold__do_not_check_if_stars_are_connected(simplex_dim,
+ allow_borders,
+ exit_at_the_first_problem,
+ verbose_level,
+ p_num_wrong_dim_simplices,
+ p_num_wrong_number_of_cofaces);
+ }
+ // Associates each vertex (= the index in the vector)
+ // to its star (list of simplices)
+ typedef std::vector<std::vector<Complex::const_iterator> > Stars;
+ std::size_t num_wrong_dim_simplices = 0;
+ std::size_t num_wrong_number_of_cofaces = 0;
+ std::size_t num_unconnected_stars = 0;
+
+ // Fills a Stars data structure
+ Stars stars;
+ stars.resize(num_vertices);
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (static_cast<int> (it_simplex->size()) != simplex_dim + 1) {
+ if (verbose_level >= 2)
+ std::cerr << "Found a simplex with dim = "
+ << it_simplex->size() - 1 << "\n";
+ ++num_wrong_dim_simplices;
+ if (p_wrong_dim_simplices)
+ p_wrong_dim_simplices->insert(*it_simplex);
+ } else {
+ for (Simplex::const_iterator it_point_idx = it_simplex->begin();
+ it_point_idx != it_simplex->end();
+ ++it_point_idx) {
+ stars[*it_point_idx].push_back(it_simplex);
+ }
+ }
+ }
+
+ // Now, for each star, we have a vector of its d-simplices
+ // i.e. one index for each d-simplex
+ // Boost Graph only deals with indexes, so we also need indexes for the
+ // (d-1)-simplices
+ std::size_t center_vertex_index = 0;
+ for (Stars::const_iterator it_star = stars.begin();
+ it_star != stars.end();
+ ++it_star, ++center_vertex_index) {
+ typedef std::map<Simplex, std::vector<std::size_t> >
+ Dm1_faces_to_adj_D_faces;
+ Dm1_faces_to_adj_D_faces dm1_faces_to_adj_d_faces;
+
+ for (std::size_t i_dsimpl = 0; i_dsimpl < it_star->size(); ++i_dsimpl) {
+ Simplex dm1_simpl_of_link = *((*it_star)[i_dsimpl]);
+ dm1_simpl_of_link.erase(center_vertex_index);
+ // Copy it to a vector so that we can use operator[] on it
+ std::vector<std::size_t> dm1_simpl_of_link_vec(
+ dm1_simpl_of_link.begin(), dm1_simpl_of_link.end());
+
+ CGAL::Combination_enumerator<int> dm2_simplices(
+ simplex_dim - 1, 0, simplex_dim);
+ for (; !dm2_simplices.finished(); ++dm2_simplices) {
+ Simplex dm2_simpl;
+ for (int j = 0; j < simplex_dim - 1; ++j)
+ dm2_simpl.insert(dm1_simpl_of_link_vec[dm2_simplices[j]]);
+ dm1_faces_to_adj_d_faces[dm2_simpl].push_back(i_dsimpl);
+ }
+ }
+
+ Adj_graph adj_graph;
+ std::vector<Graph_vertex> d_faces_descriptors;
+ d_faces_descriptors.resize(it_star->size());
+ for (std::size_t j = 0; j < it_star->size(); ++j)
+ d_faces_descriptors[j] = boost::add_vertex(adj_graph);
+
+ Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it =
+ dm1_faces_to_adj_d_faces.begin();
+ Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it_end =
+ dm1_faces_to_adj_d_faces.end();
+ for (std::size_t i_km1_face = 0;
+ dm1_to_d_it != dm1_to_d_it_end;
+ ++dm1_to_d_it, ++i_km1_face) {
+ Graph_vertex km1_gv = boost::add_vertex(adj_graph);
+
+ for (std::vector<std::size_t>::const_iterator kface_it =
+ dm1_to_d_it->second.begin();
+ kface_it != dm1_to_d_it->second.end();
+ ++kface_it) {
+ boost::add_edge(km1_gv, *kface_it, adj_graph);
+ }
+
+ if (dm1_to_d_it->second.size() != 2
+ && (!allow_borders || dm1_to_d_it->second.size() != 1)) {
+ ++num_wrong_number_of_cofaces;
+ if (p_wrong_number_of_cofaces_simplices) {
+ for (auto idx : dm1_to_d_it->second)
+ p_wrong_number_of_cofaces_simplices->insert(*((*it_star)[idx]));
+ }
+ }
+ }
+
+ // What is left is to check the connexity
+ bool is_connected = true;
+ if (boost::num_vertices(adj_graph) > 0) {
+ std::vector<int> components(boost::num_vertices(adj_graph));
+ is_connected =
+ (boost::connected_components(adj_graph, &components[0]) == 1);
+ }
+
+ if (!is_connected) {
+ if (verbose_level >= 2)
+ std::cerr << "Error: star #" << center_vertex_index
+ << " is not connected\n";
+ ++num_unconnected_stars;
+ if (p_unconnected_stars_simplices) {
+ for (std::vector<Complex::const_iterator>::const_iterator
+ it_simpl = it_star->begin(),
+ it_simpl_end = it_star->end();
+ it_simpl != it_simpl_end;
+ ++it_simpl) {
+ p_unconnected_stars_simplices->insert(**it_simpl);
+ }
+ }
+ }
+ }
+
+ // Each one has been counted several times ("simplex_dim" times)
+ num_wrong_number_of_cofaces /= simplex_dim;
+
+ bool ret =
+ num_wrong_dim_simplices == 0
+ && num_wrong_number_of_cofaces == 0
+ && num_unconnected_stars == 0;
+
+ if (verbose_level >= 1) {
+ std::cerr << "Pure pseudo-manifold: ";
+ if (ret) {
+ std::cerr << green << "YES" << white << "\n";
+ } else {
+ std::cerr << red << "NO" << white << "\n"
+ << " * Number of wrong dimension simplices: "
+ << num_wrong_dim_simplices << "\n"
+ << " * Number of wrong number of cofaces: "
+ << num_wrong_number_of_cofaces << "\n"
+ << " * Number of not-connected stars: "
+ << num_unconnected_stars << "\n";
+ }
+ }
+
+ if (p_num_wrong_dim_simplices)
+ *p_num_wrong_dim_simplices = num_wrong_dim_simplices;
+ if (p_num_wrong_number_of_cofaces)
+ *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces;
+ if (p_num_unconnected_stars)
+ *p_num_unconnected_stars = num_unconnected_stars;
+
+ return ret;
+ }
+
+ private:
+ typedef Simplex_set Complex;
+
+ // graph is an adjacency list
+ typedef boost::adjacency_list<boost::vecS, boost::vecS, boost::undirectedS> Adj_graph;
+ // map that gives to a certain simplex its node in graph and its dimension
+ typedef boost::graph_traits<Adj_graph>::vertex_descriptor Graph_vertex;
+ typedef boost::graph_traits<Adj_graph>::edge_descriptor Graph_edge;
+
+ Complex m_complex;
+}; // class Simplicial_complex
+
+} // namespace internal
+} // namespace tangential_complex
+} // namespace Gudhi
+
+#endif // TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h
new file mode 100644
index 00000000..ffefcd6b
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h
@@ -0,0 +1,43 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef TANGENTIAL_COMPLEX_CONFIG_H_
+#define TANGENTIAL_COMPLEX_CONFIG_H_
+
+#include <cstddef>
+
+// ========================= Debugging & profiling =============================
+// #define GUDHI_TC_PROFILING
+// #define GUDHI_TC_VERY_VERBOSE
+// #define GUDHI_TC_PERFORM_EXTRA_CHECKS
+// #define GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+
+// ========================= Strategy ==========================================
+#define GUDHI_TC_PERTURB_POSITION
+// #define GUDHI_TC_PERTURB_WEIGHT
+
+// ========================= Parameters ========================================
+
+// PCA will use GUDHI_TC_BASE_VALUE_FOR_PCA^intrinsic_dim points
+const std::size_t GUDHI_TC_BASE_VALUE_FOR_PCA = 5;
+
+#endif // TANGENTIAL_COMPLEX_CONFIG_H_
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h
new file mode 100644
index 00000000..b2d6d674
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h
@@ -0,0 +1,195 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef TANGENTIAL_COMPLEX_UTILITIES_H_
+#define TANGENTIAL_COMPLEX_UTILITIES_H_
+
+#include <CGAL/Dimension.h>
+#include <CGAL/Combination_enumerator.h>
+#include <CGAL/IO/Triangulation_off_ostream.h>
+
+#include <boost/container/flat_set.hpp>
+
+#include <Eigen/Core>
+#include <Eigen/Eigen>
+
+#include <set>
+#include <vector>
+#include <array>
+#include <fstream>
+#include <atomic>
+#include <cmath> // for std::sqrt
+
+namespace Gudhi {
+namespace tangential_complex {
+namespace internal {
+
+// Provides copy constructors to std::atomic so that
+// it can be used in a vector
+template <typename T>
+struct Atomic_wrapper
+: public std::atomic<T> {
+ typedef std::atomic<T> Base;
+
+ Atomic_wrapper() { }
+
+ Atomic_wrapper(const T &t) : Base(t) { }
+
+ Atomic_wrapper(const std::atomic<T> &a) : Base(a.load()) { }
+
+ Atomic_wrapper(const Atomic_wrapper &other) : Base(other.load()) { }
+
+ Atomic_wrapper &operator=(const T &other) {
+ Base::store(other);
+ return *this;
+ }
+
+ Atomic_wrapper &operator=(const std::atomic<T> &other) {
+ Base::store(other.load());
+ return *this;
+ }
+
+ Atomic_wrapper &operator=(const Atomic_wrapper &other) {
+ Base::store(other.load());
+ return *this;
+ }
+};
+
+// Modifies v in-place
+template <typename K>
+typename K::Vector_d& normalize_vector(typename K::Vector_d& v,
+ K const& k) {
+ v = k.scaled_vector_d_object()(
+ v, typename K::FT(1) / std::sqrt(k.squared_length_d_object()(v)));
+ return v;
+}
+
+template<typename Kernel>
+struct Basis {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::Vector_d Vector;
+ typedef typename std::vector<Vector>::const_iterator const_iterator;
+
+ std::size_t m_origin;
+ std::vector<Vector> m_vectors;
+
+ std::size_t origin() const {
+ return m_origin;
+ }
+
+ void set_origin(std::size_t o) {
+ m_origin = o;
+ }
+
+ const_iterator begin() const {
+ return m_vectors.begin();
+ }
+
+ const_iterator end() const {
+ return m_vectors.end();
+ }
+
+ std::size_t size() const {
+ return m_vectors.size();
+ }
+
+ Vector& operator[](const std::size_t i) {
+ return m_vectors[i];
+ }
+
+ const Vector& operator[](const std::size_t i) const {
+ return m_vectors[i];
+ }
+
+ void push_back(const Vector& v) {
+ m_vectors.push_back(v);
+ }
+
+ void reserve(const std::size_t s) {
+ m_vectors.reserve(s);
+ }
+
+ Basis() { }
+
+ Basis(std::size_t origin) : m_origin(origin) { }
+
+ Basis(std::size_t origin, const std::vector<Vector>& vectors)
+ : m_origin(origin), m_vectors(vectors) { }
+
+ int dimension() const {
+ return static_cast<int> (m_vectors.size());
+ }
+};
+
+// 1st line: number of points
+// Then one point per line
+template <typename Kernel, typename Point_range>
+std::ostream &export_point_set(
+ Kernel const& k,
+ Point_range const& points,
+ std::ostream & os,
+ const char *coord_separator = " ") {
+ // Kernel functors
+ typename Kernel::Construct_cartesian_const_iterator_d ccci =
+ k.construct_cartesian_const_iterator_d_object();
+
+ os << points.size() << "\n";
+
+ typename Point_range::const_iterator it_p = points.begin();
+ typename Point_range::const_iterator it_p_end = points.end();
+ // For each point p
+ for (; it_p != it_p_end; ++it_p) {
+ for (auto it = ccci(*it_p); it != ccci(*it_p, 0); ++it)
+ os << CGAL::to_double(*it) << coord_separator;
+
+ os << "\n";
+ }
+
+ return os;
+}
+
+// Compute all the k-combinations of elements
+// Output_iterator::value_type must be
+// boost::container::flat_set<std::size_t>
+template <typename Elements_container, typename Output_iterator>
+void combinations(const Elements_container elements, int k,
+ Output_iterator combinations) {
+ std::size_t n = elements.size();
+ std::vector<bool> booleans(n, false);
+ std::fill(booleans.begin() + n - k, booleans.end(), true);
+ do {
+ boost::container::flat_set<std::size_t> combination;
+ typename Elements_container::const_iterator it_elt = elements.begin();
+ for (std::size_t i = 0; i < n; ++i, ++it_elt) {
+ if (booleans[i])
+ combination.insert(*it_elt);
+ }
+ *combinations++ = combination;
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
+}
+
+} // namespace internal
+} // namespace tangential_complex
+} // namespace Gudhi
+
+#endif // TANGENTIAL_COMPLEX_UTILITIES_H_
diff --git a/src/Tangential_complex/test/CMakeLists.txt b/src/Tangential_complex/test/CMakeLists.txt
new file mode 100644
index 00000000..b2bf5dd7
--- /dev/null
+++ b/src/Tangential_complex/test/CMakeLists.txt
@@ -0,0 +1,23 @@
+cmake_minimum_required(VERSION 2.6)
+project(Tangential_complex_tests)
+
+if (GCOVR_PATH)
+ # for gcovr to make coverage reports - Corbera Jenkins plugin
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
+endif()
+if (GPROF_PATH)
+ # for gprof to make coverage reports - Jenkins
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
+endif()
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable( Tangential_complex_test_TC test_tangential_complex.cpp )
+ target_link_libraries(Tangential_complex_test_TC ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Tangential_complex_test_TC ${TBB_LIBRARIES})
+ endif()
+ add_test(Tangential_complex_test_TC ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_test_TC
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Tangential_complex_UT.xml --log_level=test_suite --report_level=no)
+
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp
new file mode 100644
index 00000000..48156440
--- /dev/null
+++ b/src/Tangential_complex/test/test_tangential_complex.cpp
@@ -0,0 +1,128 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE Tangential_complex - test tangential complex
+#include <boost/test/unit_test.hpp>
+
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <array>
+#include <vector>
+
+namespace tc = Gudhi::tangential_complex;
+
+BOOST_AUTO_TEST_CASE(test_Spatial_tree_data_structure) {
+ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+ typedef Kernel::Point_d Point;
+ typedef tc::Tangential_complex<
+ Kernel, CGAL::Dynamic_dimension_tag,
+ CGAL::Parallel_tag> TC;
+
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 50;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+
+ // Try to fix inconsistencies. Give it 60 seconds to succeed
+ auto perturb_ret = tc.fix_inconsistencies_using_perturbation(0.01, 60);
+
+ BOOST_CHECK(perturb_ret.success);
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+}
+
+BOOST_AUTO_TEST_CASE(test_mini_tangential) {
+ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+ typedef Kernel::Point_d Point;
+ typedef tc::Tangential_complex<Kernel, CGAL::Dynamic_dimension_tag, CGAL::Parallel_tag> TC;
+
+
+ const int INTRINSIC_DIM = 1;
+
+ // Generate points on a 2-sphere
+ std::vector<Point> points;
+ // [[0, 0], [1, 0], [0, 1], [1, 1]]
+ std::vector<double> point = {0.0, 0.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ point = {1.0, 0.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ point = {0.0, 1.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ point = {1.0, 1.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ std::cout << "points = " << points.size() << std::endl;
+ Kernel k;
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+ TC::Num_inconsistencies num_inc = tc.number_of_inconsistent_simplices();
+ std::cout << "TC vertices = " << tc.number_of_vertices() << " - simplices = " << num_inc.num_simplices <<
+ " - inc simplices = " << num_inc.num_inconsistent_simplices <<
+ " - inc stars = " << num_inc.num_inconsistent_stars << std::endl;
+
+ BOOST_CHECK(tc.number_of_vertices() == 4);
+ BOOST_CHECK(num_inc.num_simplices == 4);
+ BOOST_CHECK(num_inc.num_inconsistent_simplices == 0);
+ BOOST_CHECK(num_inc.num_inconsistent_stars == 0);
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+ std::cout << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
+
+ BOOST_CHECK(stree.num_vertices() == 4);
+ BOOST_CHECK(stree.num_simplices() == 6);
+
+ tc.fix_inconsistencies_using_perturbation(0.01, 30.0);
+
+ BOOST_CHECK(tc.number_of_vertices() == 4);
+ BOOST_CHECK(num_inc.num_simplices == 4);
+ BOOST_CHECK(num_inc.num_inconsistent_simplices == 0);
+ BOOST_CHECK(num_inc.num_inconsistent_stars == 0);
+
+ // Export the TC into a Simplex_tree
+ tc.create_complex(stree);
+ std::cout << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
+
+ BOOST_CHECK(stree.num_vertices() == 4);
+ BOOST_CHECK(stree.num_simplices() == 6);
+}
diff --git a/src/Witness_complex/concept/Simplicial_complex_for_witness.h b/src/Witness_complex/concept/SimplicialComplexForWitness.h
index caaf0db6..d78cc83f 100644
--- a/src/Witness_complex/concept/Simplicial_complex_for_witness.h
+++ b/src/Witness_complex/concept/SimplicialComplexForWitness.h
@@ -27,57 +27,70 @@ namespace Gudhi {
namespace witness_complex {
-/** \brief The concept Simplicial_Complex describes the requirements
+/** \brief The concept SimplicialComplexForWitness describes the requirements
* for a type to implement a simplicial complex,
- * used for example to build a 'Witness_complex'.
+ * used for example to build a Witness_complex or Strong_witness_complex.
*/
struct SimplicialComplexForWitness {
/** Handle to specify a simplex. */
typedef unspecified Simplex_handle;
- /** Handle to specify a vertex. Must be a non-negative integer. */
- typedef unspecified Vertex_handle;
+ // /** Handle to specify a vertex. Must be a non-negative integer. */
+ // typedef unspecified Vertex_handle;
- /** Returns a Simplex_hanlde that is different from all simplex handles
+ /** \brief Returns a Simplex_hanlde that is different from all simplex handles
* of the simplices. */
Simplex_handle null_simplex();
- /** \brief Iterator over the simplices of the complex,
- * in an arbitrary order.
- *
- * 'value_type' must be 'Simplex_handle'.*/
- typedef unspecified Complex_simplex_range;
-
- /**
- * \brief Returns a range over all the simplices of a
- * complex.
+ /** Returns the number of vertices in the simplicial complex
*/
- Complex_simplex_range complex_simplex_range();
-
- /** \brief Iterator over vertices of a simplex.
- *
- * 'value type' must be 'Vertex_handle'.*/
- typedef unspecified Simplex_vertex_range;
-
- /** \brief Returns a range over vertices of a given
- * simplex. */
- Simplex_vertex_range simplex_vertex_range(Simplex_handle const & simplex);
-
+ std::size_t num_vertices();
+
/** \brief Return type of an insertion of a simplex
*/
typedef unspecified Insertion_result_type;
/** \brief Inserts a simplex with vertices from a given range
* 'vertex_range' in the simplicial complex.
+ * The function is only used in Witness_complex class
+ * and by construction, it is not necessary to check if
+ * the faces are in the simplicial complex before insertion.
+ * The simplex is given the filtration value 'filtration'.
+ * Filtration_value should be convertible from double.
+ * The return type is not used.
* */
template< typedef Input_vertex_range >
- Insertion_result_type insert_simplex(Input_vertex_range const & vertex_range);
+ Insertion_result_type insert_simplex(Input_vertex_range const & vertex_range, Filtration_value filtration);
+ /** \brief Inserts a simplex and all its faces
+ * with vertices from a given range
+ * 'vertex_range' in the simplicial complex.
+ * The function is only used in Strong_witness_complex class.
+ * All inserted simplices are given the filtration
+ * value 'filtration'.
+ * Filtration_value should be convertible from double.
+ * The return type is not used.
+ */
+
+ template< typedef Input_vertex_range,
+ typedef Filtration_value>
+ Insertion_result_type insert_simplex_and_subfaces(Input_vertex_range const & vertex_range, Filtration_value filtration);
+
/** \brief Finds a simplex with vertices given by a range
*
* If a simplex exists, its Simplex_handle is returned.
* Otherwise null_simplex() is returned. */
template< typedef Input_vertex_range >
Simplex_handle find(Input_vertex_range const & vertex_range);
+
+ /** \brief Sets the dimension of the simplicial complex to
+ * 'dimension'.
+ */
+ void set_dimension(int dimension);
+
+ /** \brief Returns the filtration of the simplex given by
+ * the simplex handle 'sh'.
+ */
+ double filtration(Simplex_handle sh);
};
} // namespace witness_complex
diff --git a/src/Witness_complex/doc/COPYRIGHT b/src/Witness_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..7d032c87
--- /dev/null
+++ b/src/Witness_complex/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Siargey Kachanovich
+
+Copyright (C) 2015 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Witness_complex/doc/Witness_complex_doc.h b/src/Witness_complex/doc/Witness_complex_doc.h
index 60dfd27b..171a185f 100644
--- a/src/Witness_complex/doc/Witness_complex_doc.h
+++ b/src/Witness_complex/doc/Witness_complex_doc.h
@@ -8,31 +8,106 @@
\image html "Witness_complex_representation.png" "Witness complex representation"
- \section Definitions
+ \section witnessdefinitions Definitions
- Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$:
+ Witness complex is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$:
\li \f$W\f$ set of **witnesses** and
- \li \f$L \subseteq W\f$ set of **landmarks**.
+ \li \f$L\f$ set of **landmarks**.
- The simplices are based on landmarks
- and a simplex belongs to the witness complex if and only if it is witnessed, that is:
+ Even though often the set of landmarks \f$L\f$ is a subset of the set of witnesses \f$ W\f$, it is not a requirement for the current implementation.
- \f$ \sigma \subset L \f$ is witnessed if there exists a point \f$w \in W\f$ such that
- w is closer to the vertices of \f$ \sigma \f$ than other points in \f$ L \f$ and all of its faces are witnessed as well.
-
- The data structure is described in \cite boissonnatmariasimplextreealgorithmica .
+ Landmarks are the vertices of the simplicial complex
+ and witnesses help to decide on which simplices are inserted via a predicate "is witnessed".
- \section Implementation
+ De Silva and Carlsson in their paper \cite de2004topological differentiate **weak witnessing** and **strong witnessing**:
- The principal class of this module is Gudhi::Witness_complex.
+ - *weak*: \f$ \sigma \subset L \f$ is witnessed by \f$ w \in W\f$ if \f$ \forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l) \leq d(w,l') \f$
+ - *strong*: \f$ \sigma \subset L \f$ is witnessed by \f$ w \in W\f$ if \f$ \forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l) \leq d(w,l') \f$
- In both cases, the constructor for this class takes a {witness}x{closest_landmarks} table, where each row represents a witness and consists of landmarks sorted by distance to this witness.
- This table can be constructed by two additional classes Landmark_choice_by_furthest_point and Landmark_choice_by_random_point also included in the module.
+ where \f$ d(.,.) \f$ is a distance function.
- *\image html "bench_Cy8.png" "Running time as function on number of landmarks" width=10cm
- *\image html "bench_sphere.png" "Running time as function on number of witnesses for |L|=300" width=10cm
+ Both definitions can be relaxed by a real value \f$\alpha\f$:
+
+ - *weak*: \f$ \sigma \subset L \f$ is \f$\alpha\f$-witnessed by \f$ w \in W\f$ if \f$ \forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2 \f$
+ - *strong*: \f$ \sigma \subset L \f$ is \f$\alpha\f$-witnessed by \f$ w \in W\f$ if \f$ \forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2 \f$
+
+ which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and **strong relaxed witness complex** respectively.
+
+ \image html "swit.svg" "Strongly witnessed simplex"
+
+ In particular case of 0-relaxation, weak complex corresponds to **witness complex** introduced in \cite de2004topological, whereas 0-relaxed strong witness complex consists of just vertices and is not very interesting.
+ Hence for small relaxation weak version is preferable.
+ However, to capture the homotopy type (for example using Gudhi::persistent_cohomology::Persistent_cohomology) it is often necessary to work with higher filtration values. In this case strong relaxed witness complex is faster to compute and offers similar results.
+
+ \section witnessimplementation Implementation
+ The two complexes described above are implemented in the corresponding classes
+ - Gudhi::witness_complex::Witness_complex
+ - Gudhi::witness_complex::Euclidean_witness_complex
+ - Gudhi::witness_complex::Strong_witness_complex
+ - Gudhi::witness_complex::Euclidean_strong_witness_complex
+
+ The construction of the Euclidean versions of complexes follow the same scheme:
+ 1. Construct a search tree on landmarks (for that Gudhi::spatial_searching::Kd_tree_search is used internally).
+ 2. Construct lists of nearest landmarks for each witness (special structure Gudhi::witness_complex::Active_witness is used internally).
+ 3. Construct the witness complex for nearest landmark lists.
+
+ In the non-Euclidean classes, the lists of nearest landmarks are supposed to be given as input.
+
+ The constructors take on the steps 1 and 2, while the function 'create_complex' executes the step 3.
+
+ \section witnessexample1 Example 1: Constructing weak relaxed witness complex from an off file
+
+ Let's start with a simple example, which reads an off point file and computes a weak witness complex.
+
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp}
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_witness_complex.h>
+#include <gudhi/pick_n_random_points.h>
+#include <gudhi/Points_off_io.h>
+
+#include <CGAL/Epick_d.h>
+
+#include <string>
+#include <vector>
+
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> K;
+typedef typename K::Point_d Point_d;
+typedef typename Gudhi::witness_complex::Euclidean_witness_complex<K> Witness_complex;
+typedef std::vector< Vertex_handle > typeVectorVertex;
+typedef std::vector< Point_d > Point_vector;
+
+int main(int argc, char * const argv[]) {
+ std::string file_name = argv[1];
+ int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]);
+ double alpha2 = atof(argv[3]);
+ Gudhi::Simplex_tree<> simplex_tree;
+
+ // Read the point file
+ Point_vector point_vector, landmarks;
+ Gudhi::Points_off_reader<Point_d> off_reader(file_name);
+ point_vector = Point_vector(off_reader.get_point_cloud());
+
+ // Choose landmarks
+ Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+
+ // Compute witness complex
+ Witness_complex witness_complex(landmarks,
+ point_vector);
+
+ witness_complex.create_complex(simplex_tree, alpha2, lim_dim);
+}
+
+
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ \section witnessexample2 Example2: Computing persistence using strong relaxed witness complex
+
+ Here is an example of constructing a strong witness complex filtration and computing persistence on it:
+
+ \include Witness_complex/example_strong_witness_persistence.cpp
\copyright GNU General Public License v3.
diff --git a/src/Witness_complex/doc/Witness_complex_representation.ipe b/src/Witness_complex/doc/Witness_complex_representation.ipe
new file mode 100644
index 00000000..f9c45d5d
--- /dev/null
+++ b/src/Witness_complex/doc/Witness_complex_representation.ipe
@@ -0,0 +1,280 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20161010162425" modified="D:20161010162828"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/ptarc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fptarc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="small" value="\small"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textsize name="tiny" value="\tiny"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<opacity name="10%" value="0.1"/>
+<opacity name="30%" value="0.3"/>
+<opacity name="50%" value="0.5"/>
+<opacity name="75%" value="0.75"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<use layer="alpha" name="mark/fdisk(sfx)" pos="288 672" size="normal" stroke="darkblue" fill="white"/>
+<path stroke="darkblue">
+48.8262 0 0 48.8262 288 672 e
+</path>
+<text transformations="translations" pos="292 676" stroke="darkblue" type="label" width="6.559" height="4.289" depth="0" valign="baseline">$\omega$</text>
+<path stroke="black">
+284 720 m
+280 624 l
+268 648 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="284 720" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="268 648" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="280 624" size="normal" stroke="black" fill="white"/>
+<text matrix="1 0 0 1 0 8" transformations="translations" pos="268 672" stroke="black" type="label" width="6.05" height="4.289" depth="0" valign="baseline">$\sigma$</text>
+<use name="mark/fdisk(sfx)" pos="344 672" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="356 716" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="364 628" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="244 708" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="196 632" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="200 696" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="168 716" size="normal" stroke="black" fill="white"/>
+</page>
+</ipe>
diff --git a/src/Witness_complex/doc/Witness_complex_representation.png b/src/Witness_complex/doc/Witness_complex_representation.png
index 1d31a490..16e0504e 100644
--- a/src/Witness_complex/doc/Witness_complex_representation.png
+++ b/src/Witness_complex/doc/Witness_complex_representation.png
Binary files differ
diff --git a/src/Witness_complex/doc/swit.svg b/src/Witness_complex/doc/swit.svg
new file mode 100644
index 00000000..6ffb5fff
--- /dev/null
+++ b/src/Witness_complex/doc/swit.svg
@@ -0,0 +1,1303 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:ns0="http://www.iki.fi/pav/software/textext/"
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:xlink="http://www.w3.org/1999/xlink"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="113.73116mm"
+ height="84.14254mm"
+ viewBox="0 0 402.98441 298.14286"
+ id="svg2"
+ version="1.1"
+ inkscape:version="0.91 r13725"
+ sodipodi:docname="swit.svg">
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="0.98994949"
+ inkscape:cx="402.72174"
+ inkscape:cy="258.46971"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ inkscape:window-width="1366"
+ inkscape:window-height="704"
+ inkscape:window-x="0"
+ inkscape:window-y="27"
+ inkscape:window-maximized="1"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0" />
+ <defs
+ id="defs4">
+ <marker
+ inkscape:stockid="Arrow1Lend"
+ orient="auto"
+ refY="0"
+ refX="0"
+ id="Arrow1Lend"
+ style="overflow:visible"
+ inkscape:isstock="true">
+ <path
+ inkscape:connector-curvature="0"
+ id="path5009"
+ d="M 0,0 5,-5 -12.5,0 5,5 0,0 Z"
+ style="fill:#000080;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1pt;stroke-opacity:1"
+ transform="matrix(-0.8,0,0,-0.8,-10,0)" />
+ </marker>
+ <marker
+ inkscape:stockid="Arrow1Lend"
+ orient="auto"
+ refY="0"
+ refX="0"
+ id="Arrow1Lend-8"
+ style="overflow:visible"
+ inkscape:isstock="true">
+ <path
+ inkscape:connector-curvature="0"
+ id="path5009-5"
+ d="M 0,0 5,-5 -12.5,0 5,5 0,0 Z"
+ style="fill:#000080;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1pt;stroke-opacity:1"
+ transform="matrix(-0.8,0,0,-0.8,-10,0)" />
+ </marker>
+ </defs>
+ <metadata
+ id="metadata7">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ transform="translate(-130.29351,-300.82484)"
+ id="layer1"
+ inkscape:groupmode="layer"
+ inkscape:label="Layer 1">
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="148.57143"
+ cy="449.89627"
+ cx="338.71756"
+ id="path4136"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <path
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:3;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+ d="m 318.19805,571.02449 0,-94.95433 -64.64976,-92.42896 164.14979,-30.80966 42.4264,120.71323 -141.92643,3.03046 100.0051,-123.23861 z"
+ id="path4301"
+ inkscape:connector-curvature="0" />
+ <path
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ style="fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:3;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1"
+ d="m 318.70313,571.52957 -65.65992,-187.3833 206.5762,88.89343 z"
+ id="path4303"
+ inkscape:connector-curvature="0" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="450.05875"
+ cx="338.13837"
+ id="path4138"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <g
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ id="g4147"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ ns0:text="$w$"
+ transform="matrix(2.7020226,0,0,2.7020226,-261.85036,103.80999)"
+ style="fill:#000080">
+ <defs
+ id="defs4149">
+ <g
+ id="g4151">
+ <symbol
+ id="textext-20f8880a-0"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4154"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-20f8880a-1"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4157"
+ d="M 4.609375,-3.375 C 4.65625,-3.59375 4.75,-3.96875 4.75,-4.03125 c 0,-0.171875 -0.140625,-0.265625 -0.28125,-0.265625 -0.125,0 -0.296875,0.078125 -0.375,0.28125 -0.03125,0.0625 -0.5,1.96875 -0.5625,2.234375 C 3.453125,-1.484375 3.4375,-1.3125 3.4375,-1.125 c 0,0.109375 0,0.125 0.015625,0.171875 -0.234375,0.53125 -0.53125,0.84375 -0.921875,0.84375 -0.796875,0 -0.796875,-0.734375 -0.796875,-0.90625 0,-0.3125 0.046875,-0.703125 0.515625,-1.9375 0.109375,-0.296875 0.171875,-0.4375 0.171875,-0.640625 0,-0.4375 -0.328125,-0.8125 -0.8125,-0.8125 -0.953125,0 -1.3125,1.453125 -1.3125,1.53125 0,0.109375 0.09375,0.109375 0.109375,0.109375 0.109375,0 0.109375,-0.03125 0.15625,-0.1875 C 0.84375,-3.875 1.21875,-4.1875 1.578125,-4.1875 c 0.09375,0 0.25,0.015625 0.25,0.328125 0,0.25 -0.109375,0.53125 -0.1875,0.703125 -0.4375,1.171875 -0.546875,1.625 -0.546875,2.015625 0,0.90625 0.65625,1.25 1.40625,1.25 0.171875,0 0.640625,0 1.03125,-0.703125 0.265625,0.640625 0.953125,0.703125 1.25,0.703125 0.75,0 1.1875,-0.625 1.453125,-1.21875 0.328125,-0.78125 0.65625,-2.125 0.65625,-2.59375 0,-0.546875 -0.265625,-0.703125 -0.4375,-0.703125 -0.25,0 -0.5,0.265625 -0.5,0.484375 0,0.125 0.0625,0.1875 0.140625,0.265625 0.109375,0.109375 0.359375,0.359375 0.359375,0.84375 0,0.34375 -0.28125,1.3125 -0.546875,1.828125 -0.25,0.53125 -0.609375,0.875 -1.09375,0.875 -0.46875,0 -0.734375,-0.296875 -0.734375,-0.875 0,-0.265625 0.0625,-0.578125 0.109375,-0.71875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ id="textext-20f8880a-2"
+ style="fill:#000080">
+ <g
+ id="g4160"
+ style="fill:#000080;fill-opacity:1">
+ <use
+ id="use4162"
+ y="134.765"
+ x="223.43201"
+ xlink:href="#textext-20f8880a-1"
+ width="100%"
+ height="100%"
+ style="fill:#000080" />
+ </g>
+ </g>
+ </g>
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="383.79077"
+ cx="252.85715"
+ id="path4138-3"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="353.07648"
+ cx="418.57144"
+ id="path4138-3-7"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="475.93362"
+ cx="317.85715"
+ id="path4138-3-0"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="570.21936"
+ cx="317.85715"
+ id="path4138-3-9"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="473.07648"
+ cx="459.28571"
+ id="path4138-3-3"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="478.07648"
+ cx="133.57143"
+ id="path4138-3-6"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="320.21936"
+ cx="155.71428"
+ id="path4138-3-06"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="340.73929"
+ cx="490.7774"
+ id="path4138-3-2"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="559.76758"
+ cx="490.60406"
+ id="path4138-3-61"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <g
+ id="g4147-8"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ ns0:text="$w$"
+ transform="matrix(2.7020226,0,0,2.7020226,-152.29409,72.785446)"
+ style="fill:#000000">
+ <defs
+ id="defs4149-7">
+ <g
+ id="g4151-9">
+ <symbol
+ id="textext-20f8880a-0-2"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4154-0"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-20f8880a-1-2"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4157-3"
+ d="M 4.609375,-3.375 C 4.65625,-3.59375 4.75,-3.96875 4.75,-4.03125 c 0,-0.171875 -0.140625,-0.265625 -0.28125,-0.265625 -0.125,0 -0.296875,0.078125 -0.375,0.28125 -0.03125,0.0625 -0.5,1.96875 -0.5625,2.234375 C 3.453125,-1.484375 3.4375,-1.3125 3.4375,-1.125 c 0,0.109375 0,0.125 0.015625,0.171875 -0.234375,0.53125 -0.53125,0.84375 -0.921875,0.84375 -0.796875,0 -0.796875,-0.734375 -0.796875,-0.90625 0,-0.3125 0.046875,-0.703125 0.515625,-1.9375 0.109375,-0.296875 0.171875,-0.4375 0.171875,-0.640625 0,-0.4375 -0.328125,-0.8125 -0.8125,-0.8125 -0.953125,0 -1.3125,1.453125 -1.3125,1.53125 0,0.109375 0.09375,0.109375 0.109375,0.109375 0.109375,0 0.109375,-0.03125 0.15625,-0.1875 C 0.84375,-3.875 1.21875,-4.1875 1.578125,-4.1875 c 0.09375,0 0.25,0.015625 0.25,0.328125 0,0.25 -0.109375,0.53125 -0.1875,0.703125 -0.4375,1.171875 -0.546875,1.625 -0.546875,2.015625 0,0.90625 0.65625,1.25 1.40625,1.25 0.171875,0 0.640625,0 1.03125,-0.703125 0.265625,0.640625 0.953125,0.703125 1.25,0.703125 0.75,0 1.1875,-0.625 1.453125,-1.21875 0.328125,-0.78125 0.65625,-2.125 0.65625,-2.59375 0,-0.546875 -0.265625,-0.703125 -0.4375,-0.703125 -0.25,0 -0.5,0.265625 -0.5,0.484375 0,0.125 0.0625,0.1875 0.140625,0.265625 0.109375,0.109375 0.359375,0.359375 0.359375,0.84375 0,0.34375 -0.28125,1.3125 -0.546875,1.828125 -0.25,0.53125 -0.609375,0.875 -1.09375,0.875 -0.46875,0 -0.734375,-0.296875 -0.734375,-0.875 0,-0.265625 0.0625,-0.578125 0.109375,-0.71875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ id="textext-20f8880a-2-7"
+ style="fill:#000000" />
+ </g>
+ <g
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ id="g4558"
+ style="fill:#000080"
+ transform="matrix(2.7020226,0,0,2.7020226,-254.3195,202.59004)"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ ns0:text="$\\sigma \\subset L$">
+ <defs
+ id="defs4560">
+ <g
+ id="g4562">
+ <symbol
+ id="textext-b73c230a-0"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4565"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-1"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4568"
+ d="m 5.15625,-3.71875 c 0.140625,0 0.5,0 0.5,-0.34375 0,-0.234375 -0.21875,-0.234375 -0.390625,-0.234375 l -2.28125,0 c -1.5,0 -2.609375,1.640625 -2.609375,2.828125 0,0.875 0.59375,1.578125 1.5,1.578125 1.171875,0 2.5,-1.203125 2.5,-2.734375 0,-0.171875 0,-0.65625 -0.3125,-1.09375 z M 1.890625,-0.109375 C 1.390625,-0.109375 1,-0.46875 1,-1.1875 c 0,-0.296875 0.109375,-1.109375 0.46875,-1.703125 0.421875,-0.6875 1.015625,-0.828125 1.359375,-0.828125 0.828125,0 0.90625,0.65625 0.90625,0.96875 0,0.46875 -0.203125,1.28125 -0.53125,1.796875 -0.390625,0.578125 -0.9375,0.84375 -1.3125,0.84375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-2"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4571"
+ d="M 3.734375,-6.03125 C 3.8125,-6.390625 3.84375,-6.5 4.78125,-6.5 c 0.296875,0 0.375,0 0.375,-0.1875 0,-0.125 -0.109375,-0.125 -0.15625,-0.125 -0.328125,0 -1.140625,0.03125 -1.46875,0.03125 -0.296875,0 -1.03125,-0.03125 -1.328125,-0.03125 -0.0625,0 -0.1875,0 -0.1875,0.203125 0,0.109375 0.09375,0.109375 0.28125,0.109375 0.015625,0 0.203125,0 0.375,0.015625 0.171875,0.03125 0.265625,0.03125 0.265625,0.171875 0,0.03125 0,0.0625 -0.03125,0.1875 L 1.5625,-0.78125 c -0.09375,0.390625 -0.109375,0.46875 -0.90625,0.46875 -0.171875,0 -0.265625,0 -0.265625,0.203125 C 0.390625,0 0.484375,0 0.65625,0 l 4.625,0 C 5.515625,0 5.515625,0 5.578125,-0.171875 L 6.375,-2.328125 c 0.03125,-0.109375 0.03125,-0.125 0.03125,-0.140625 0,-0.03125 -0.03125,-0.109375 -0.109375,-0.109375 -0.09375,0 -0.109375,0.0625 -0.171875,0.21875 -0.34375,0.90625 -0.78125,2.046875 -2.5,2.046875 l -0.9375,0 c -0.140625,0 -0.171875,0 -0.21875,0 -0.109375,-0.015625 -0.140625,-0.03125 -0.140625,-0.109375 0,-0.03125 0,-0.046875 0.046875,-0.21875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-3"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4574"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-4"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4577"
+ d="m 6.5625,-4.984375 c 0.171875,0 0.359375,0 0.359375,-0.203125 0,-0.203125 -0.1875,-0.203125 -0.359375,-0.203125 l -2.671875,0 c -1.703125,0 -3.0625,1.296875 -3.0625,2.890625 0,1.609375 1.359375,2.90625 3.0625,2.90625 l 2.671875,0 c 0.171875,0 0.359375,0 0.359375,-0.203125 C 6.921875,0 6.734375,0 6.5625,0 L 3.90625,0 c -1.546875,0 -2.6875,-1.15625 -2.6875,-2.5 0,-1.328125 1.140625,-2.484375 2.6875,-2.484375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ id="textext-b73c230a-5">
+ <g
+ id="g4580"
+ style="fill:#000000;fill-opacity:1">
+ <use
+ id="use4582"
+ y="134.765"
+ x="223.43201"
+ xlink:href="#textext-b73c230a-1"
+ width="100%"
+ height="100%" />
+ </g>
+ <g
+ id="g4584"
+ style="fill:#000000;fill-opacity:1">
+ <use
+ id="use4586"
+ y="134.765"
+ x="232.25"
+ xlink:href="#textext-b73c230a-4"
+ width="100%"
+ height="100%" />
+ </g>
+ <g
+ id="g4588"
+ style="fill:#000000;fill-opacity:1">
+ <use
+ id="use4590"
+ y="134.765"
+ x="242.76601"
+ xlink:href="#textext-b73c230a-2"
+ width="100%"
+ height="100%" />
+ </g>
+ </g>
+ </g>
+ <path
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ style="fill:#000080;fill-rule:evenodd;stroke:#000080;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend)"
+ d="m 337.85714,449.50504 148.57143,-8.57143"
+ id="path5000"
+ inkscape:connector-curvature="0" />
+ <g
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ ns0:text="$\\sqrt{d(w,L)^2 + \\alpha^2}$"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ transform="matrix(1.3935573,-0.10159094,0.10159094,1.3935573,55.220877,276.38005)"
+ style="fill:#000080;stroke:#000080"
+ id="g5407">
+ <defs
+ id="defs5409">
+ <g
+ id="g5411">
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-0">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d=""
+ id="path5414" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-1">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 4.234375,11.5625 c 0.296875,0 0.3125,-0.01563 0.40625,-0.203125 l 5.453125,-11.375 c 0.07813,-0.140625 0.07813,-0.15625 0.07813,-0.1875 0,-0.109375 -0.07813,-0.203125 -0.203125,-0.203125 -0.125,0 -0.171875,0.09375 -0.21875,0.203125 L 4.609375,10.53125 2.484375,5.578125 1.09375,6.65625 1.25,6.8125 1.953125,6.265625 Z m 0,0"
+ id="path5417" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-2">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d=""
+ id="path5420" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-3">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 5.140625,-6.8125 c 0,0 0,-0.109375 -0.125,-0.109375 -0.15625,0 -1.09375,0.09375 -1.265625,0.109375 -0.078125,0.015625 -0.140625,0.0625 -0.140625,0.1875 0,0.125 0.09375,0.125 0.234375,0.125 0.484375,0 0.5,0.0625 0.5,0.171875 L 4.3125,-6.125 3.71875,-3.765625 C 3.53125,-4.140625 3.25,-4.40625 2.796875,-4.40625 c -1.15625,0 -2.390625,1.46875 -2.390625,2.921875 0,0.9375 0.546875,1.59375 1.3125,1.59375 0.203125,0 0.703125,-0.046875 1.296875,-0.75 0.078125,0.421875 0.4375,0.75 0.90625,0.75 0.359375,0 0.578125,-0.234375 0.75,-0.546875 0.15625,-0.359375 0.296875,-0.96875 0.296875,-0.984375 0,-0.109375 -0.09375,-0.109375 -0.125,-0.109375 -0.09375,0 -0.109375,0.046875 -0.140625,0.1875 -0.171875,0.640625 -0.34375,1.234375 -0.75,1.234375 -0.28125,0 -0.296875,-0.265625 -0.296875,-0.453125 0,-0.25 0.015625,-0.3125 0.046875,-0.484375 z m -2.0625,5.625 C 3.015625,-1 3.015625,-0.984375 2.875,-0.8125 2.4375,-0.265625 2.03125,-0.109375 1.75,-0.109375 c -0.5,0 -0.640625,-0.546875 -0.640625,-0.9375 0,-0.5 0.3125,-1.71875 0.546875,-2.1875 0.3125,-0.578125 0.75,-0.953125 1.15625,-0.953125 0.640625,0 0.78125,0.8125 0.78125,0.875 0,0.0625 -0.015625,0.125 -0.03125,0.171875 z m 0,0"
+ id="path5423" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-4">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="M 4.609375,-3.375 C 4.65625,-3.59375 4.75,-3.96875 4.75,-4.03125 c 0,-0.171875 -0.140625,-0.265625 -0.28125,-0.265625 -0.125,0 -0.296875,0.078125 -0.375,0.28125 -0.03125,0.0625 -0.5,1.96875 -0.5625,2.234375 C 3.453125,-1.484375 3.4375,-1.3125 3.4375,-1.125 c 0,0.109375 0,0.125 0.015625,0.171875 -0.234375,0.53125 -0.53125,0.84375 -0.921875,0.84375 -0.796875,0 -0.796875,-0.734375 -0.796875,-0.90625 0,-0.3125 0.046875,-0.703125 0.515625,-1.9375 0.109375,-0.296875 0.171875,-0.4375 0.171875,-0.640625 0,-0.4375 -0.328125,-0.8125 -0.8125,-0.8125 -0.953125,0 -1.3125,1.453125 -1.3125,1.53125 0,0.109375 0.09375,0.109375 0.109375,0.109375 0.109375,0 0.109375,-0.03125 0.15625,-0.1875 C 0.84375,-3.875 1.21875,-4.1875 1.578125,-4.1875 c 0.09375,0 0.25,0.015625 0.25,0.328125 0,0.25 -0.109375,0.53125 -0.1875,0.703125 -0.4375,1.171875 -0.546875,1.625 -0.546875,2.015625 0,0.90625 0.65625,1.25 1.40625,1.25 0.171875,0 0.640625,0 1.03125,-0.703125 0.265625,0.640625 0.953125,0.703125 1.25,0.703125 0.75,0 1.1875,-0.625 1.453125,-1.21875 0.328125,-0.78125 0.65625,-2.125 0.65625,-2.59375 0,-0.546875 -0.265625,-0.703125 -0.4375,-0.703125 -0.25,0 -0.5,0.265625 -0.5,0.484375 0,0.125 0.0625,0.1875 0.140625,0.265625 0.109375,0.109375 0.359375,0.359375 0.359375,0.84375 0,0.34375 -0.28125,1.3125 -0.546875,1.828125 -0.25,0.53125 -0.609375,0.875 -1.09375,0.875 -0.46875,0 -0.734375,-0.296875 -0.734375,-0.875 0,-0.265625 0.0625,-0.578125 0.109375,-0.71875 z m 0,0"
+ id="path5426" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-5">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 2.03125,-0.015625 c 0,-0.65625 -0.25,-1.046875 -0.640625,-1.046875 -0.328125,0 -0.53125,0.25 -0.53125,0.53125 C 0.859375,-0.265625 1.0625,0 1.390625,0 1.5,0 1.640625,-0.046875 1.734375,-0.125 1.765625,-0.15625 1.78125,-0.15625 1.78125,-0.15625 c 0.015625,0 0.015625,0 0.015625,0.140625 0,0.75 -0.34375,1.34375 -0.671875,1.671875 -0.109375,0.109375 -0.109375,0.125 -0.109375,0.15625 0,0.078125 0.046875,0.109375 0.09375,0.109375 0.109375,0 0.921875,-0.765625 0.921875,-1.9375 z m 0,0"
+ id="path5429" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-6">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="M 3.734375,-6.03125 C 3.8125,-6.390625 3.84375,-6.5 4.78125,-6.5 c 0.296875,0 0.375,0 0.375,-0.1875 0,-0.125 -0.109375,-0.125 -0.15625,-0.125 -0.328125,0 -1.140625,0.03125 -1.46875,0.03125 -0.296875,0 -1.03125,-0.03125 -1.328125,-0.03125 -0.0625,0 -0.1875,0 -0.1875,0.203125 0,0.109375 0.09375,0.109375 0.28125,0.109375 0.015625,0 0.203125,0 0.375,0.015625 0.171875,0.03125 0.265625,0.03125 0.265625,0.171875 0,0.03125 0,0.0625 -0.03125,0.1875 L 1.5625,-0.78125 c -0.09375,0.390625 -0.109375,0.46875 -0.90625,0.46875 -0.171875,0 -0.265625,0 -0.265625,0.203125 C 0.390625,0 0.484375,0 0.65625,0 l 4.625,0 C 5.515625,0 5.515625,0 5.578125,-0.171875 L 6.375,-2.328125 c 0.03125,-0.109375 0.03125,-0.125 0.03125,-0.140625 0,-0.03125 -0.03125,-0.109375 -0.109375,-0.109375 -0.09375,0 -0.109375,0.0625 -0.171875,0.21875 -0.34375,0.90625 -0.78125,2.046875 -2.5,2.046875 l -0.9375,0 c -0.140625,0 -0.171875,0 -0.21875,0 -0.109375,-0.015625 -0.140625,-0.03125 -0.140625,-0.109375 0,-0.03125 0,-0.046875 0.046875,-0.21875 z m 0,0"
+ id="path5432" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-7">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 4.75,-2.359375 c 0,-1.5625 -0.921875,-2.046875 -1.65625,-2.046875 -1.375,0 -2.6875,1.421875 -2.6875,2.828125 0,0.9375 0.59375,1.6875 1.625,1.6875 0.625,0 1.34375,-0.234375 2.09375,-0.84375 0.125,0.53125 0.453125,0.84375 0.90625,0.84375 0.53125,0 0.84375,-0.546875 0.84375,-0.703125 0,-0.078125 -0.0625,-0.109375 -0.125,-0.109375 -0.0625,0 -0.09375,0.03125 -0.125,0.109375 -0.1875,0.484375 -0.546875,0.484375 -0.5625,0.484375 -0.3125,0 -0.3125,-0.78125 -0.3125,-1.015625 0,-0.203125 0,-0.234375 0.109375,-0.34375 C 5.796875,-2.65625 6,-3.8125 6,-3.8125 6,-3.84375 5.984375,-3.921875 5.875,-3.921875 c -0.09375,0 -0.09375,0.03125 -0.140625,0.21875 -0.1875,0.625 -0.515625,1.375 -0.984375,1.96875 z m -0.65625,1.375 c -0.890625,0.765625 -1.65625,0.875 -2.046875,0.875 -0.59375,0 -0.90625,-0.453125 -0.90625,-1.09375 0,-0.484375 0.265625,-1.5625 0.578125,-2.0625 C 2.1875,-4 2.734375,-4.1875 3.078125,-4.1875 c 0.984375,0 0.984375,1.3125 0.984375,2.078125 0,0.375 0,0.953125 0.03125,1.125 z m 0,0"
+ id="path5435" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-8">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d=""
+ id="path5438" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-9">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 3.296875,2.390625 c 0,-0.03125 0,-0.046875 -0.171875,-0.21875 C 1.890625,0.921875 1.5625,-0.96875 1.5625,-2.5 c 0,-1.734375 0.375,-3.46875 1.609375,-4.703125 0.125,-0.125 0.125,-0.140625 0.125,-0.171875 0,-0.078125 -0.03125,-0.109375 -0.09375,-0.109375 -0.109375,0 -1,0.6875 -1.59375,1.953125 -0.5,1.09375 -0.625,2.203125 -0.625,3.03125 0,0.78125 0.109375,1.984375 0.65625,3.125 C 2.25,1.84375 3.09375,2.5 3.203125,2.5 c 0.0625,0 0.09375,-0.03125 0.09375,-0.109375 z m 0,0"
+ id="path5441" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-10">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 2.875,-2.5 c 0,-0.765625 -0.109375,-1.96875 -0.65625,-3.109375 -0.59375,-1.21875 -1.453125,-1.875 -1.546875,-1.875 -0.0625,0 -0.109375,0.046875 -0.109375,0.109375 0,0.03125 0,0.046875 0.1875,0.234375 0.984375,0.984375 1.546875,2.5625 1.546875,4.640625 0,1.71875 -0.359375,3.46875 -1.59375,4.71875 C 0.5625,2.34375 0.5625,2.359375 0.5625,2.390625 0.5625,2.453125 0.609375,2.5 0.671875,2.5 0.765625,2.5 1.671875,1.8125 2.25,0.546875 2.765625,-0.546875 2.875,-1.65625 2.875,-2.5 Z m 0,0"
+ id="path5444" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-11">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 4.078125,-2.296875 2.78125,0 C 7,-2.296875 7.1875,-2.296875 7.1875,-2.5 7.1875,-2.6875 7,-2.6875 6.859375,-2.6875 l -2.78125,0 0,-2.796875 c 0,-0.140625 0,-0.328125 -0.203125,-0.328125 -0.203125,0 -0.203125,0.1875 -0.203125,0.328125 l 0,2.796875 -2.78125,0 c -0.140625,0 -0.328125,0 -0.328125,0.1875 0,0.203125 0.1875,0.203125 0.328125,0.203125 l 2.78125,0 0,2.796875 c 0,0.140625 0,0.328125 0.203125,0.328125 0.203125,0 0.203125,-0.1875 0.203125,-0.328125 z m 0,0"
+ id="path5447" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-12">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d=""
+ id="path5450" />
+ </symbol>
+ <symbol
+ style="overflow:visible"
+ overflow="visible"
+ id="textext-da5ef958-13">
+ <path
+ inkscape:connector-curvature="0"
+ style="stroke:none"
+ d="m 3.515625,-1.265625 -0.234375,0 c -0.015625,0.15625 -0.09375,0.5625 -0.1875,0.625 -0.046875,0.046875 -0.578125,0.046875 -0.6875,0.046875 l -1.28125,0 c 0.734375,-0.640625 0.984375,-0.84375 1.390625,-1.171875 0.515625,-0.40625 1,-0.84375 1,-1.5 0,-0.84375 -0.734375,-1.359375 -1.625,-1.359375 -0.859375,0 -1.453125,0.609375 -1.453125,1.25 0,0.34375 0.296875,0.390625 0.375,0.390625 0.15625,0 0.359375,-0.125 0.359375,-0.375 0,-0.125 -0.046875,-0.375 -0.40625,-0.375 C 0.984375,-4.21875 1.453125,-4.375 1.78125,-4.375 c 0.703125,0 1.0625,0.546875 1.0625,1.109375 0,0.609375 -0.4375,1.078125 -0.65625,1.328125 L 0.515625,-0.265625 C 0.4375,-0.203125 0.4375,-0.1875 0.4375,0 l 2.875,0 z m 0,0"
+ id="path5453" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ style="fill:#000080;stroke:#000080"
+ id="textext-da5ef958-14">
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5456">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-1"
+ x="223.43201"
+ y="126.247"
+ id="use5458" />
+ </g>
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#000080;stroke:#000080;stroke-width:0.398;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10;stroke-opacity:1"
+ d="m -4.6875e-4,0.001125 59.07031275,0"
+ transform="matrix(1,0,0,-1,233.395,126.048)"
+ id="path5460" />
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5462">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-3"
+ x="233.395"
+ y="134.765"
+ id="use5464" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5466">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-9"
+ x="238.58"
+ y="134.765"
+ id="use5468" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5470">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-4"
+ x="242.455"
+ y="134.765"
+ id="use5472" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5474">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-5"
+ x="249.85622"
+ y="134.765"
+ id="use5476" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5478">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-6"
+ x="254.28758"
+ y="134.765"
+ id="use5480" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5482">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-10"
+ x="261.06299"
+ y="134.765"
+ id="use5484" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5486">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-13"
+ x="264.93701"
+ y="131.88699"
+ id="use5488" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5490">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-11"
+ x="271.621"
+ y="134.765"
+ id="use5492" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5494">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-7"
+ x="281.58301"
+ y="134.765"
+ id="use5496" />
+ </g>
+ <g
+ style="fill:#000080;fill-opacity:1;stroke:#000080"
+ id="g5498">
+ <use
+ style="fill:#000080;stroke:#000080"
+ height="100%"
+ width="100%"
+ xlink:href="#textext-da5ef958-13"
+ x="287.99301"
+ y="131.88699"
+ id="use5500" />
+ </g>
+ </g>
+ </g>
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="148.57143"
+ cy="449.89627"
+ cx="738.71753"
+ id="path4136-7"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="450.05875"
+ cx="738.13837"
+ id="path4138-8"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <g
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ id="g4147-5"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ ns0:text="$w$"
+ transform="matrix(2.7020226,0,0,2.7020226,138.14964,103.80999)"
+ style="fill:#000080">
+ <defs
+ id="defs4149-9">
+ <g
+ id="g4151-7">
+ <symbol
+ id="textext-20f8880a-0-5"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4154-3"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-20f8880a-1-8"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4157-8"
+ d="M 4.609375,-3.375 C 4.65625,-3.59375 4.75,-3.96875 4.75,-4.03125 c 0,-0.171875 -0.140625,-0.265625 -0.28125,-0.265625 -0.125,0 -0.296875,0.078125 -0.375,0.28125 -0.03125,0.0625 -0.5,1.96875 -0.5625,2.234375 C 3.453125,-1.484375 3.4375,-1.3125 3.4375,-1.125 c 0,0.109375 0,0.125 0.015625,0.171875 -0.234375,0.53125 -0.53125,0.84375 -0.921875,0.84375 -0.796875,0 -0.796875,-0.734375 -0.796875,-0.90625 0,-0.3125 0.046875,-0.703125 0.515625,-1.9375 0.109375,-0.296875 0.171875,-0.4375 0.171875,-0.640625 0,-0.4375 -0.328125,-0.8125 -0.8125,-0.8125 -0.953125,0 -1.3125,1.453125 -1.3125,1.53125 0,0.109375 0.09375,0.109375 0.109375,0.109375 0.109375,0 0.109375,-0.03125 0.15625,-0.1875 C 0.84375,-3.875 1.21875,-4.1875 1.578125,-4.1875 c 0.09375,0 0.25,0.015625 0.25,0.328125 0,0.25 -0.109375,0.53125 -0.1875,0.703125 -0.4375,1.171875 -0.546875,1.625 -0.546875,2.015625 0,0.90625 0.65625,1.25 1.40625,1.25 0.171875,0 0.640625,0 1.03125,-0.703125 0.265625,0.640625 0.953125,0.703125 1.25,0.703125 0.75,0 1.1875,-0.625 1.453125,-1.21875 0.328125,-0.78125 0.65625,-2.125 0.65625,-2.59375 0,-0.546875 -0.265625,-0.703125 -0.4375,-0.703125 -0.25,0 -0.5,0.265625 -0.5,0.484375 0,0.125 0.0625,0.1875 0.140625,0.265625 0.109375,0.109375 0.359375,0.359375 0.359375,0.84375 0,0.34375 -0.28125,1.3125 -0.546875,1.828125 -0.25,0.53125 -0.609375,0.875 -1.09375,0.875 -0.46875,0 -0.734375,-0.296875 -0.734375,-0.875 0,-0.265625 0.0625,-0.578125 0.109375,-0.71875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ id="textext-20f8880a-2-3"
+ style="fill:#000080">
+ <g
+ id="g4160-1"
+ style="fill:#000080;fill-opacity:1">
+ <use
+ id="use4162-8"
+ y="134.765"
+ x="223.43201"
+ xlink:href="#textext-20f8880a-1-8"
+ width="100%"
+ height="100%"
+ style="fill:#000080" />
+ </g>
+ </g>
+ </g>
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="342.37451"
+ cx="681.14148"
+ id="path4138-3-96"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="353.07648"
+ cx="818.57141"
+ id="path4138-3-7-4"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="492.09607"
+ cx="668.35968"
+ id="path4138-3-0-3"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="570.21936"
+ cx="717.85718"
+ id="path4138-3-9-3"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="473.07648"
+ cx="859.28564"
+ id="path4138-3-3-3"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="478.07648"
+ cx="533.57141"
+ id="path4138-3-6-8"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="350.52393"
+ cx="594.1001"
+ id="path4138-3-06-6"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="331.64792"
+ cx="927.14288"
+ id="path4138-3-2-0"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <circle
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ r="2.7779195"
+ cy="505.21936"
+ cx="930"
+ id="path4138-3-61-4"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:1;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:#ffffff;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <g
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ id="g4558-8"
+ style="fill:#000080"
+ transform="matrix(2.7020226,0,0,2.7020226,145.6805,202.59004)"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ ns0:text="$\\sigma \\subset L$">
+ <defs
+ id="defs4560-8">
+ <g
+ id="g4562-8">
+ <symbol
+ id="textext-b73c230a-0-9"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4565-7"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-1-7"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4568-6"
+ d="m 5.15625,-3.71875 c 0.140625,0 0.5,0 0.5,-0.34375 0,-0.234375 -0.21875,-0.234375 -0.390625,-0.234375 l -2.28125,0 c -1.5,0 -2.609375,1.640625 -2.609375,2.828125 0,0.875 0.59375,1.578125 1.5,1.578125 1.171875,0 2.5,-1.203125 2.5,-2.734375 0,-0.171875 0,-0.65625 -0.3125,-1.09375 z M 1.890625,-0.109375 C 1.390625,-0.109375 1,-0.46875 1,-1.1875 c 0,-0.296875 0.109375,-1.109375 0.46875,-1.703125 0.421875,-0.6875 1.015625,-0.828125 1.359375,-0.828125 0.828125,0 0.90625,0.65625 0.90625,0.96875 0,0.46875 -0.203125,1.28125 -0.53125,1.796875 -0.390625,0.578125 -0.9375,0.84375 -1.3125,0.84375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-2-4"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4571-3"
+ d="M 3.734375,-6.03125 C 3.8125,-6.390625 3.84375,-6.5 4.78125,-6.5 c 0.296875,0 0.375,0 0.375,-0.1875 0,-0.125 -0.109375,-0.125 -0.15625,-0.125 -0.328125,0 -1.140625,0.03125 -1.46875,0.03125 -0.296875,0 -1.03125,-0.03125 -1.328125,-0.03125 -0.0625,0 -0.1875,0 -0.1875,0.203125 0,0.109375 0.09375,0.109375 0.28125,0.109375 0.015625,0 0.203125,0 0.375,0.015625 0.171875,0.03125 0.265625,0.03125 0.265625,0.171875 0,0.03125 0,0.0625 -0.03125,0.1875 L 1.5625,-0.78125 c -0.09375,0.390625 -0.109375,0.46875 -0.90625,0.46875 -0.171875,0 -0.265625,0 -0.265625,0.203125 C 0.390625,0 0.484375,0 0.65625,0 l 4.625,0 C 5.515625,0 5.515625,0 5.578125,-0.171875 L 6.375,-2.328125 c 0.03125,-0.109375 0.03125,-0.125 0.03125,-0.140625 0,-0.03125 -0.03125,-0.109375 -0.109375,-0.109375 -0.09375,0 -0.109375,0.0625 -0.171875,0.21875 -0.34375,0.90625 -0.78125,2.046875 -2.5,2.046875 l -0.9375,0 c -0.140625,0 -0.171875,0 -0.21875,0 -0.109375,-0.015625 -0.140625,-0.03125 -0.140625,-0.109375 0,-0.03125 0,-0.046875 0.046875,-0.21875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-3-0"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4574-3"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-b73c230a-4-0"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path4577-9"
+ d="m 6.5625,-4.984375 c 0.171875,0 0.359375,0 0.359375,-0.203125 0,-0.203125 -0.1875,-0.203125 -0.359375,-0.203125 l -2.671875,0 c -1.703125,0 -3.0625,1.296875 -3.0625,2.890625 0,1.609375 1.359375,2.90625 3.0625,2.90625 l 2.671875,0 c 0.171875,0 0.359375,0 0.359375,-0.203125 C 6.921875,0 6.734375,0 6.5625,0 L 3.90625,0 c -1.546875,0 -2.6875,-1.15625 -2.6875,-2.5 0,-1.328125 1.140625,-2.484375 2.6875,-2.484375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ id="textext-b73c230a-5-2">
+ <g
+ id="g4580-5"
+ style="fill:#000000;fill-opacity:1">
+ <use
+ id="use4582-4"
+ y="134.765"
+ x="223.43201"
+ xlink:href="#textext-b73c230a-1-7"
+ width="100%"
+ height="100%" />
+ </g>
+ <g
+ id="g4584-0"
+ style="fill:#000000;fill-opacity:1">
+ <use
+ id="use4586-5"
+ y="134.765"
+ x="232.25"
+ xlink:href="#textext-b73c230a-4-0"
+ width="100%"
+ height="100%" />
+ </g>
+ <g
+ id="g4588-9"
+ style="fill:#000000;fill-opacity:1">
+ <use
+ id="use4590-4"
+ y="134.765"
+ x="242.76601"
+ xlink:href="#textext-b73c230a-2-4"
+ width="100%"
+ height="100%" />
+ </g>
+ </g>
+ </g>
+ <path
+ inkscape:export-ydpi="90"
+ inkscape:export-xdpi="90"
+ style="fill:#000080;fill-rule:evenodd;stroke:#000080;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend-8)"
+ d="m 737.85714,449.50504 148.57143,-8.57143"
+ id="path5000-6"
+ inkscape:connector-curvature="0" />
+ <circle
+ r="80.779091"
+ cy="449.46512"
+ cx="737.3952"
+ id="path6334"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.98999999;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1.06622958;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:3.19868871, 3.19868871;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ <g
+ id="g6495"
+ style="fill:#000080;stroke:#000080"
+ transform="matrix(1.3952558,-0.07472613,0.07472613,1.3952558,431.03302,272.05923)"
+ ns0:preamble="/home/siarzhuk/GitDrive/2015Gudhi/Aid/preamble.ini"
+ ns0:text="$\\sqrt{d(w,L \\setminus \\sigma)^2 + \\alpha^2}$">
+ <defs
+ id="defs6497">
+ <g
+ id="g6499">
+ <symbol
+ id="textext-c0d6e8dc-0"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6502"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-1"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6505"
+ d="m 4.234375,11.5625 c 0.296875,0 0.3125,-0.01563 0.40625,-0.203125 l 5.453125,-11.375 c 0.07813,-0.140625 0.07813,-0.15625 0.07813,-0.1875 0,-0.109375 -0.07813,-0.203125 -0.203125,-0.203125 -0.125,0 -0.171875,0.09375 -0.21875,0.203125 L 4.609375,10.53125 2.484375,5.578125 1.09375,6.65625 1.25,6.8125 1.953125,6.265625 Z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-2"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6508"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-3"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6511"
+ d="m 5.140625,-6.8125 c 0,0 0,-0.109375 -0.125,-0.109375 -0.15625,0 -1.09375,0.09375 -1.265625,0.109375 -0.078125,0.015625 -0.140625,0.0625 -0.140625,0.1875 0,0.125 0.09375,0.125 0.234375,0.125 0.484375,0 0.5,0.0625 0.5,0.171875 L 4.3125,-6.125 3.71875,-3.765625 C 3.53125,-4.140625 3.25,-4.40625 2.796875,-4.40625 c -1.15625,0 -2.390625,1.46875 -2.390625,2.921875 0,0.9375 0.546875,1.59375 1.3125,1.59375 0.203125,0 0.703125,-0.046875 1.296875,-0.75 0.078125,0.421875 0.4375,0.75 0.90625,0.75 0.359375,0 0.578125,-0.234375 0.75,-0.546875 0.15625,-0.359375 0.296875,-0.96875 0.296875,-0.984375 0,-0.109375 -0.09375,-0.109375 -0.125,-0.109375 -0.09375,0 -0.109375,0.046875 -0.140625,0.1875 -0.171875,0.640625 -0.34375,1.234375 -0.75,1.234375 -0.28125,0 -0.296875,-0.265625 -0.296875,-0.453125 0,-0.25 0.015625,-0.3125 0.046875,-0.484375 z m -2.0625,5.625 C 3.015625,-1 3.015625,-0.984375 2.875,-0.8125 2.4375,-0.265625 2.03125,-0.109375 1.75,-0.109375 c -0.5,0 -0.640625,-0.546875 -0.640625,-0.9375 0,-0.5 0.3125,-1.71875 0.546875,-2.1875 0.3125,-0.578125 0.75,-0.953125 1.15625,-0.953125 0.640625,0 0.78125,0.8125 0.78125,0.875 0,0.0625 -0.015625,0.125 -0.03125,0.171875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-4"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6514"
+ d="M 4.609375,-3.375 C 4.65625,-3.59375 4.75,-3.96875 4.75,-4.03125 c 0,-0.171875 -0.140625,-0.265625 -0.28125,-0.265625 -0.125,0 -0.296875,0.078125 -0.375,0.28125 -0.03125,0.0625 -0.5,1.96875 -0.5625,2.234375 C 3.453125,-1.484375 3.4375,-1.3125 3.4375,-1.125 c 0,0.109375 0,0.125 0.015625,0.171875 -0.234375,0.53125 -0.53125,0.84375 -0.921875,0.84375 -0.796875,0 -0.796875,-0.734375 -0.796875,-0.90625 0,-0.3125 0.046875,-0.703125 0.515625,-1.9375 0.109375,-0.296875 0.171875,-0.4375 0.171875,-0.640625 0,-0.4375 -0.328125,-0.8125 -0.8125,-0.8125 -0.953125,0 -1.3125,1.453125 -1.3125,1.53125 0,0.109375 0.09375,0.109375 0.109375,0.109375 0.109375,0 0.109375,-0.03125 0.15625,-0.1875 C 0.84375,-3.875 1.21875,-4.1875 1.578125,-4.1875 c 0.09375,0 0.25,0.015625 0.25,0.328125 0,0.25 -0.109375,0.53125 -0.1875,0.703125 -0.4375,1.171875 -0.546875,1.625 -0.546875,2.015625 0,0.90625 0.65625,1.25 1.40625,1.25 0.171875,0 0.640625,0 1.03125,-0.703125 0.265625,0.640625 0.953125,0.703125 1.25,0.703125 0.75,0 1.1875,-0.625 1.453125,-1.21875 0.328125,-0.78125 0.65625,-2.125 0.65625,-2.59375 0,-0.546875 -0.265625,-0.703125 -0.4375,-0.703125 -0.25,0 -0.5,0.265625 -0.5,0.484375 0,0.125 0.0625,0.1875 0.140625,0.265625 0.109375,0.109375 0.359375,0.359375 0.359375,0.84375 0,0.34375 -0.28125,1.3125 -0.546875,1.828125 -0.25,0.53125 -0.609375,0.875 -1.09375,0.875 -0.46875,0 -0.734375,-0.296875 -0.734375,-0.875 0,-0.265625 0.0625,-0.578125 0.109375,-0.71875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-5"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6517"
+ d="m 2.03125,-0.015625 c 0,-0.65625 -0.25,-1.046875 -0.640625,-1.046875 -0.328125,0 -0.53125,0.25 -0.53125,0.53125 C 0.859375,-0.265625 1.0625,0 1.390625,0 1.5,0 1.640625,-0.046875 1.734375,-0.125 1.765625,-0.15625 1.78125,-0.15625 1.78125,-0.15625 c 0.015625,0 0.015625,0 0.015625,0.140625 0,0.75 -0.34375,1.34375 -0.671875,1.671875 -0.109375,0.109375 -0.109375,0.125 -0.109375,0.15625 0,0.078125 0.046875,0.109375 0.09375,0.109375 0.109375,0 0.921875,-0.765625 0.921875,-1.9375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-6"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6520"
+ d="M 3.734375,-6.03125 C 3.8125,-6.390625 3.84375,-6.5 4.78125,-6.5 c 0.296875,0 0.375,0 0.375,-0.1875 0,-0.125 -0.109375,-0.125 -0.15625,-0.125 -0.328125,0 -1.140625,0.03125 -1.46875,0.03125 -0.296875,0 -1.03125,-0.03125 -1.328125,-0.03125 -0.0625,0 -0.1875,0 -0.1875,0.203125 0,0.109375 0.09375,0.109375 0.28125,0.109375 0.015625,0 0.203125,0 0.375,0.015625 0.171875,0.03125 0.265625,0.03125 0.265625,0.171875 0,0.03125 0,0.0625 -0.03125,0.1875 L 1.5625,-0.78125 c -0.09375,0.390625 -0.109375,0.46875 -0.90625,0.46875 -0.171875,0 -0.265625,0 -0.265625,0.203125 C 0.390625,0 0.484375,0 0.65625,0 l 4.625,0 C 5.515625,0 5.515625,0 5.578125,-0.171875 L 6.375,-2.328125 c 0.03125,-0.109375 0.03125,-0.125 0.03125,-0.140625 0,-0.03125 -0.03125,-0.109375 -0.109375,-0.109375 -0.09375,0 -0.109375,0.0625 -0.171875,0.21875 -0.34375,0.90625 -0.78125,2.046875 -2.5,2.046875 l -0.9375,0 c -0.140625,0 -0.171875,0 -0.21875,0 -0.109375,-0.015625 -0.140625,-0.03125 -0.140625,-0.109375 0,-0.03125 0,-0.046875 0.046875,-0.21875 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-7"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6523"
+ d="m 5.15625,-3.71875 c 0.140625,0 0.5,0 0.5,-0.34375 0,-0.234375 -0.21875,-0.234375 -0.390625,-0.234375 l -2.28125,0 c -1.5,0 -2.609375,1.640625 -2.609375,2.828125 0,0.875 0.59375,1.578125 1.5,1.578125 1.171875,0 2.5,-1.203125 2.5,-2.734375 0,-0.171875 0,-0.65625 -0.3125,-1.09375 z M 1.890625,-0.109375 C 1.390625,-0.109375 1,-0.46875 1,-1.1875 c 0,-0.296875 0.109375,-1.109375 0.46875,-1.703125 0.421875,-0.6875 1.015625,-0.828125 1.359375,-0.828125 0.828125,0 0.90625,0.65625 0.90625,0.96875 0,0.46875 -0.203125,1.28125 -0.53125,1.796875 -0.390625,0.578125 -0.9375,0.84375 -1.3125,0.84375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-8"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6526"
+ d="m 4.75,-2.359375 c 0,-1.5625 -0.921875,-2.046875 -1.65625,-2.046875 -1.375,0 -2.6875,1.421875 -2.6875,2.828125 0,0.9375 0.59375,1.6875 1.625,1.6875 0.625,0 1.34375,-0.234375 2.09375,-0.84375 0.125,0.53125 0.453125,0.84375 0.90625,0.84375 0.53125,0 0.84375,-0.546875 0.84375,-0.703125 0,-0.078125 -0.0625,-0.109375 -0.125,-0.109375 -0.0625,0 -0.09375,0.03125 -0.125,0.109375 -0.1875,0.484375 -0.546875,0.484375 -0.5625,0.484375 -0.3125,0 -0.3125,-0.78125 -0.3125,-1.015625 0,-0.203125 0,-0.234375 0.109375,-0.34375 C 5.796875,-2.65625 6,-3.8125 6,-3.8125 6,-3.84375 5.984375,-3.921875 5.875,-3.921875 c -0.09375,0 -0.09375,0.03125 -0.140625,0.21875 -0.1875,0.625 -0.515625,1.375 -0.984375,1.96875 z m -0.65625,1.375 c -0.890625,0.765625 -1.65625,0.875 -2.046875,0.875 -0.59375,0 -0.90625,-0.453125 -0.90625,-1.09375 0,-0.484375 0.265625,-1.5625 0.578125,-2.0625 C 2.1875,-4 2.734375,-4.1875 3.078125,-4.1875 c 0.984375,0 0.984375,1.3125 0.984375,2.078125 0,0.375 0,0.953125 0.03125,1.125 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-9"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6529"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-10"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6532"
+ d="m 3.296875,2.390625 c 0,-0.03125 0,-0.046875 -0.171875,-0.21875 C 1.890625,0.921875 1.5625,-0.96875 1.5625,-2.5 c 0,-1.734375 0.375,-3.46875 1.609375,-4.703125 0.125,-0.125 0.125,-0.140625 0.125,-0.171875 0,-0.078125 -0.03125,-0.109375 -0.09375,-0.109375 -0.109375,0 -1,0.6875 -1.59375,1.953125 -0.5,1.09375 -0.625,2.203125 -0.625,3.03125 0,0.78125 0.109375,1.984375 0.65625,3.125 C 2.25,1.84375 3.09375,2.5 3.203125,2.5 c 0.0625,0 0.09375,-0.03125 0.09375,-0.109375 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-11"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6535"
+ d="m 2.875,-2.5 c 0,-0.765625 -0.109375,-1.96875 -0.65625,-3.109375 -0.59375,-1.21875 -1.453125,-1.875 -1.546875,-1.875 -0.0625,0 -0.109375,0.046875 -0.109375,0.109375 0,0.03125 0,0.046875 0.1875,0.234375 0.984375,0.984375 1.546875,2.5625 1.546875,4.640625 0,1.71875 -0.359375,3.46875 -1.59375,4.71875 C 0.5625,2.34375 0.5625,2.359375 0.5625,2.390625 0.5625,2.453125 0.609375,2.5 0.671875,2.5 0.765625,2.5 1.671875,1.8125 2.25,0.546875 2.765625,-0.546875 2.875,-1.65625 2.875,-2.5 Z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-12"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6538"
+ d="m 4.078125,-2.296875 2.78125,0 C 7,-2.296875 7.1875,-2.296875 7.1875,-2.5 7.1875,-2.6875 7,-2.6875 6.859375,-2.6875 l -2.78125,0 0,-2.796875 c 0,-0.140625 0,-0.328125 -0.203125,-0.328125 -0.203125,0 -0.203125,0.1875 -0.203125,0.328125 l 0,2.796875 -2.78125,0 c -0.140625,0 -0.328125,0 -0.328125,0.1875 0,0.203125 0.1875,0.203125 0.328125,0.203125 l 2.78125,0 0,2.796875 c 0,0.140625 0,0.328125 0.203125,0.328125 0.203125,0 0.203125,-0.1875 0.203125,-0.328125 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-13"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6541"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-14"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6544"
+ d="m 4,2.25 c 0.046875,0.140625 0.09375,0.25 0.234375,0.25 0.109375,0 0.1875,-0.09375 0.1875,-0.203125 0,-0.03125 0,-0.046875 -0.046875,-0.15625 l -3.40625,-9.375 c -0.0625,-0.171875 -0.09375,-0.25 -0.21875,-0.25 -0.109375,0 -0.203125,0.09375 -0.203125,0.203125 0,0.03125 0,0.046875 0.046875,0.15625 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-15"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6547"
+ d=""
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ <symbol
+ id="textext-c0d6e8dc-16"
+ overflow="visible"
+ style="overflow:visible">
+ <path
+ id="path6550"
+ d="m 3.515625,-1.265625 -0.234375,0 c -0.015625,0.15625 -0.09375,0.5625 -0.1875,0.625 -0.046875,0.046875 -0.578125,0.046875 -0.6875,0.046875 l -1.28125,0 c 0.734375,-0.640625 0.984375,-0.84375 1.390625,-1.171875 0.515625,-0.40625 1,-0.84375 1,-1.5 0,-0.84375 -0.734375,-1.359375 -1.625,-1.359375 -0.859375,0 -1.453125,0.609375 -1.453125,1.25 0,0.34375 0.296875,0.390625 0.375,0.390625 0.15625,0 0.359375,-0.125 0.359375,-0.375 0,-0.125 -0.046875,-0.375 -0.40625,-0.375 C 0.984375,-4.21875 1.453125,-4.375 1.78125,-4.375 c 0.703125,0 1.0625,0.546875 1.0625,1.109375 0,0.609375 -0.4375,1.078125 -0.65625,1.328125 L 0.515625,-0.265625 C 0.4375,-0.203125 0.4375,-0.1875 0.4375,0 l 2.875,0 z m 0,0"
+ style="stroke:none"
+ inkscape:connector-curvature="0" />
+ </symbol>
+ </g>
+ </defs>
+ <g
+ id="textext-c0d6e8dc-17"
+ style="fill:#000080;stroke:#000080">
+ <g
+ id="g6553"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6555"
+ y="126.247"
+ x="223.43201"
+ xlink:href="#textext-c0d6e8dc-1"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <path
+ id="path6557"
+ transform="matrix(1,0,0,-1,233.395,126.048)"
+ d="m -4.6875e-4,0.001125 74.52734375,0"
+ style="fill:#000080;stroke:#000080;stroke-width:0.398;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:10;stroke-opacity:1"
+ inkscape:connector-curvature="0" />
+ <g
+ id="g6559"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6561"
+ y="134.765"
+ x="233.395"
+ xlink:href="#textext-c0d6e8dc-3"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6563"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6565"
+ y="134.765"
+ x="238.58"
+ xlink:href="#textext-c0d6e8dc-10"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6567"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6569"
+ y="134.765"
+ x="242.455"
+ xlink:href="#textext-c0d6e8dc-4"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6571"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6573"
+ y="134.765"
+ x="249.85622"
+ xlink:href="#textext-c0d6e8dc-5"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6575"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6577"
+ y="134.765"
+ x="254.28758"
+ xlink:href="#textext-c0d6e8dc-6"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6579"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6581"
+ y="134.765"
+ x="263.27701"
+ xlink:href="#textext-c0d6e8dc-14"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6583"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6585"
+ y="134.765"
+ x="270.47198"
+ xlink:href="#textext-c0d6e8dc-7"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6587"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6589"
+ y="134.765"
+ x="276.522"
+ xlink:href="#textext-c0d6e8dc-11"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6591"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6593"
+ y="131.88699"
+ x="280.397"
+ xlink:href="#textext-c0d6e8dc-16"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6595"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6597"
+ y="134.765"
+ x="287.07999"
+ xlink:href="#textext-c0d6e8dc-12"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6599"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6601"
+ y="134.765"
+ x="297.043"
+ xlink:href="#textext-c0d6e8dc-8"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ <g
+ id="g6603"
+ style="fill:#000080;fill-opacity:1;stroke:#000080">
+ <use
+ id="use6605"
+ y="131.88699"
+ x="303.453"
+ xlink:href="#textext-c0d6e8dc-16"
+ width="100%"
+ height="100%"
+ style="fill:#000080;stroke:#000080" />
+ </g>
+ </g>
+ </g>
+ </g>
+ <g
+ transform="translate(-130.29351,-300.82484)"
+ style="display:none"
+ inkscape:label="Layer 2"
+ id="layer2"
+ inkscape:groupmode="layer">
+ <circle
+ r="32.857143"
+ cy="448.79074"
+ cx="337.85715"
+ id="path5639"
+ style="color:#000000;clip-rule:nonzero;display:inline;overflow:visible;visibility:visible;opacity:0.98999999;isolation:auto;mix-blend-mode:normal;color-interpolation:sRGB;color-interpolation-filters:linearRGB;solid-color:#000000;solid-opacity:1;fill:none;fill-opacity:1;fill-rule:evenodd;stroke:#000080;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:3, 3;stroke-dashoffset:0;stroke-opacity:1;marker:none;color-rendering:auto;image-rendering:auto;shape-rendering:auto;text-rendering:auto;enable-background:accumulate" />
+ </g>
+</svg>
diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt
index 4d67e0d0..549a85be 100644
--- a/src/Witness_complex/example/CMakeLists.txt
+++ b/src/Witness_complex/example/CMakeLists.txt
@@ -1,16 +1,44 @@
cmake_minimum_required(VERSION 2.6)
project(Witness_complex_examples)
-# A simple example
- add_executable( witness_complex_from_file witness_complex_from_file.cpp )
- add_test( witness_complex_from_bunny ${CMAKE_CURRENT_BINARY_DIR}/witness_complex_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 100)
-
-if(CGAL_FOUND)
- if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
- if (EIGEN3_FOUND)
- add_executable ( witness_complex_sphere witness_complex_sphere.cpp )
- target_link_libraries(witness_complex_sphere ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
- add_test( witness_complex_sphere_10 ${CMAKE_CURRENT_BINARY_DIR}/witness_complex_sphere 10)
- endif(EIGEN3_FOUND)
- endif (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+add_executable ( Witness_complex_example_nearest_landmark_table example_nearest_landmark_table.cpp )
+target_link_libraries(Witness_complex_example_nearest_landmark_table ${Boost_SYSTEM_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Witness_complex_example_nearest_landmark_table ${TBB_LIBRARIES})
endif()
+add_test(Witness_complex_test_nearest_landmark_table Witness_complex_example_nearest_landmark_table)
+
+# CGAL and Eigen3 are required for Euclidean version of Witness
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ add_executable( Witness_complex_example_off example_witness_complex_off.cpp )
+ target_link_libraries(Witness_complex_example_off ${Boost_SYSTEM_LIBRARY})
+ add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
+ target_link_libraries(Witness_complex_example_strong_off ${Boost_SYSTEM_LIBRARY})
+ add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp )
+ target_link_libraries(Witness_complex_example_sphere ${Boost_SYSTEM_LIBRARY})
+
+ add_executable ( Witness_complex_example_witness_persistence example_witness_complex_persistence.cpp )
+ target_link_libraries(Witness_complex_example_witness_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ add_executable ( Witness_complex_example_strong_witness_persistence example_strong_witness_persistence.cpp )
+ target_link_libraries(Witness_complex_example_strong_witness_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_example_witness_persistence ${TBB_LIBRARIES})
+ target_link_libraries(Witness_complex_example_strong_witness_persistence ${TBB_LIBRARIES})
+ endif()
+
+ add_test(Witness_complex_off_test_torus
+ ${CMAKE_CURRENT_BINARY_DIR}/Witness_complex_example_off
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off 20 1.0 3)
+ add_test(Witness_complex_strong_off_test_torus
+ ${CMAKE_CURRENT_BINARY_DIR}/Witness_complex_example_strong_off
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off 20 1.0 3)
+ add_test(Witness_complex_test_sphere_10 Witness_complex_example_sphere 10)
+ add_test(Witness_complex_test_torus_persistence
+ ${CMAKE_CURRENT_BINARY_DIR}/Witness_complex_example_witness_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off -l 20 -a 0.5)
+ add_test(Witness_complex_strong_test_torus_persistence
+ ${CMAKE_CURRENT_BINARY_DIR}/Witness_complex_example_strong_witness_persistence
+ ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off -l 20 -a 0.5)
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/Witness_complex/example/example_nearest_landmark_table.cpp b/src/Witness_complex/example/example_nearest_landmark_table.cpp
new file mode 100644
index 00000000..b8594212
--- /dev/null
+++ b/src/Witness_complex/example/example_nearest_landmark_table.cpp
@@ -0,0 +1,69 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_PARAMETER_MAX_ARITY 12
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Witness_complex.h>
+#include <gudhi/Persistent_cohomology.h>
+
+#include <iostream>
+#include <fstream>
+#include <utility>
+#include <string>
+#include <vector>
+
+int main(int argc, char * const argv[]) {
+ using Nearest_landmark_range = std::vector<std::pair<std::size_t, double>>;
+ using Nearest_landmark_table = std::vector<Nearest_landmark_range>;
+ using Witness_complex = Gudhi::witness_complex::Witness_complex<Nearest_landmark_table>;
+ using Simplex_tree = Gudhi::Simplex_tree<>;
+ using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+ using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
+
+ Simplex_tree simplex_tree;
+ Nearest_landmark_table nlt;
+
+ // Example contains 5 witnesses and 5 landmarks
+ Nearest_landmark_range w0 = {std::make_pair(0, 0), std::make_pair(1, 1), std::make_pair(2, 2),
+ std::make_pair(3, 3), std::make_pair(4, 4)}; nlt.push_back(w0);
+ Nearest_landmark_range w1 = {std::make_pair(1, 0), std::make_pair(2, 1), std::make_pair(3, 2),
+ std::make_pair(4, 3), std::make_pair(0, 4)}; nlt.push_back(w1);
+ Nearest_landmark_range w2 = {std::make_pair(2, 0), std::make_pair(3, 1), std::make_pair(4, 2),
+ std::make_pair(0, 3), std::make_pair(1, 4)}; nlt.push_back(w2);
+ Nearest_landmark_range w3 = {std::make_pair(3, 0), std::make_pair(4, 1), std::make_pair(0, 2),
+ std::make_pair(1, 3), std::make_pair(2, 4)}; nlt.push_back(w3);
+ Nearest_landmark_range w4 = {std::make_pair(4, 0), std::make_pair(0, 1), std::make_pair(1, 2),
+ std::make_pair(2, 3), std::make_pair(3, 4)}; nlt.push_back(w4);
+
+ Witness_complex witness_complex(nlt);
+ witness_complex.create_complex(simplex_tree, 4.1);
+
+ std::cout << "Number of simplices: " << simplex_tree.num_simplices() << std::endl;
+
+ Persistent_cohomology pcoh(simplex_tree);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(11);
+
+ pcoh.compute_persistent_cohomology(-0.1);
+ pcoh.output_diagram();
+}
diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
new file mode 100644
index 00000000..0ee9ee90
--- /dev/null
+++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
@@ -0,0 +1,79 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_strong_witness_complex.h>
+#include <gudhi/pick_n_random_points.h>
+#include <gudhi/Points_off_io.h>
+
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <fstream>
+#include <ctime>
+#include <string>
+#include <vector>
+
+using K = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+using Point_d = typename K::Point_d;
+using Witness_complex = Gudhi::witness_complex::Euclidean_strong_witness_complex<K>;
+using Point_vector = std::vector<Point_d>;
+
+int main(int argc, char * const argv[]) {
+ if (argc != 5) {
+ std::cerr << "Usage: " << argv[0]
+ << " path_to_point_file number_of_landmarks max_squared_alpha limit_dimension\n";
+ return 0;
+ }
+
+ std::string file_name = argv[1];
+ int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]);
+ double alpha2 = atof(argv[3]);
+ clock_t start, end;
+ Gudhi::Simplex_tree<> simplex_tree;
+
+ // Read the point file
+ Point_vector point_vector, landmarks;
+ Gudhi::Points_off_reader<Point_d> off_reader(file_name);
+ if (!off_reader.is_valid()) {
+ std::cerr << "Strong witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
+ }
+ point_vector = Point_vector(off_reader.get_point_cloud());
+
+ std::cout << "Successfully read " << point_vector.size() << " points.\n";
+ std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
+
+ // Choose landmarks
+ Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+
+ // Compute witness complex
+ start = clock();
+ Witness_complex witness_complex(landmarks,
+ point_vector);
+
+ witness_complex.create_complex(simplex_tree, alpha2, lim_dim);
+ end = clock();
+ std::cout << "Strong witness complex took "
+ << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
+ std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
+}
diff --git a/src/Witness_complex/example/example_strong_witness_persistence.cpp b/src/Witness_complex/example/example_strong_witness_persistence.cpp
new file mode 100644
index 00000000..f786fe7b
--- /dev/null
+++ b/src/Witness_complex/example/example_strong_witness_persistence.cpp
@@ -0,0 +1,171 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_strong_witness_complex.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/pick_n_random_points.h>
+
+#include <boost/program_options.hpp>
+
+#include <CGAL/Epick_d.h>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+
+using K = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+using Point_d = K::Point_d;
+
+using Point_vector = std::vector<Point_d>;
+using Strong_witness_complex = Gudhi::witness_complex::Euclidean_strong_witness_complex<K>;
+using SimplexTree = Gudhi::Simplex_tree<>;
+
+using Filtration_value = SimplexTree::Filtration_value;
+
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<SimplexTree, Field_Zp>;
+
+void program_options(int argc, char * argv[]
+ , int & nbL
+ , std::string & file_name
+ , std::string & filediag
+ , Filtration_value & max_squared_alpha
+ , int & p
+ , int & dim_max
+ , Filtration_value & min_persistence);
+
+int main(int argc, char * argv[]) {
+ std::string file_name;
+ std::string filediag;
+ Filtration_value max_squared_alpha;
+ int p, nbL, lim_d;
+ Filtration_value min_persistence;
+ SimplexTree simplex_tree;
+
+ program_options(argc, argv, nbL, file_name, filediag, max_squared_alpha, p, lim_d, min_persistence);
+
+ // Extract the points from the file file_name
+ Point_vector witnesses, landmarks;
+ Gudhi::Points_off_reader<Point_d> off_reader(file_name);
+ if (!off_reader.is_valid()) {
+ std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
+ }
+ witnesses = Point_vector(off_reader.get_point_cloud());
+ std::cout << "Successfully read " << witnesses.size() << " points.\n";
+ std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
+
+ // Choose landmarks from witnesses
+ Gudhi::subsampling::pick_n_random_points(witnesses, nbL, std::back_inserter(landmarks));
+
+ // Compute witness complex
+ Strong_witness_complex strong_witness_complex(landmarks,
+ witnesses);
+
+ strong_witness_complex.create_complex(simplex_tree, max_squared_alpha, lim_d);
+
+ std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(simplex_tree);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(p);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (filediag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::ofstream out(filediag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+
+ return 0;
+}
+
+void program_options(int argc, char * argv[]
+ , int & nbL
+ , std::string & file_name
+ , std::string & filediag
+ , Filtration_value & max_squared_alpha
+ , int & p
+ , int & dim_max
+ , Filtration_value & min_persistence) {
+ namespace po = boost::program_options;
+
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&file_name),
+ "Name of file containing a point set in off format.");
+
+ po::options_description visible("Allowed options", 100);
+ Filtration_value default_alpha = std::numeric_limits<Filtration_value>::infinity();
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("landmarks,l", po::value<int>(&nbL),
+ "Number of landmarks to choose from the point cloud.")
+ ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")
+ ("max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
+ "Maximal squared relaxation parameter.")
+ ("field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")
+ ("min-persistence,m", po::value<Filtration_value>(&min_persistence)->default_value(0),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals")
+ ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(std::numeric_limits<int>::max()),
+ "Maximal dimension of the strong witness complex we want to compute.");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+ po::variables_map vm;
+
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a Strong witness complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
+
diff --git a/src/Witness_complex/example/example_witness_complex_off.cpp b/src/Witness_complex/example/example_witness_complex_off.cpp
new file mode 100644
index 00000000..b36dac0d
--- /dev/null
+++ b/src/Witness_complex/example/example_witness_complex_off.cpp
@@ -0,0 +1,60 @@
+#include <sys/types.h>
+#include <sys/stat.h>
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_witness_complex.h>
+#include <gudhi/pick_n_random_points.h>
+#include <gudhi/Points_off_io.h>
+
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <fstream>
+#include <ctime>
+#include <string>
+#include <vector>
+
+using K = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+using Point_d = K::Point_d;
+using Witness_complex = Gudhi::witness_complex::Euclidean_witness_complex<K>;
+using Point_vector = std::vector< Point_d >;
+
+int main(int argc, char * const argv[]) {
+ if (argc != 5) {
+ std::cerr << "Usage: " << argv[0]
+ << " path_to_point_file number_of_landmarks max_squared_alpha limit_dimension\n";
+ return 0;
+ }
+
+ std::string file_name = argv[1];
+ int nbL = atoi(argv[2]), lim_dim = atoi(argv[4]);
+ double alpha2 = atof(argv[3]);
+ clock_t start, end;
+ Gudhi::Simplex_tree<> simplex_tree;
+
+ // Read the point file
+ Point_vector point_vector, landmarks;
+ Gudhi::Points_off_reader<Point_d> off_reader(file_name);
+ if (!off_reader.is_valid()) {
+ std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
+ }
+ point_vector = Point_vector(off_reader.get_point_cloud());
+
+ std::cout << "Successfully read " << point_vector.size() << " points.\n";
+ std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
+
+ // Choose landmarks
+ Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+
+ // Compute witness complex
+ start = clock();
+ Witness_complex witness_complex(landmarks,
+ point_vector);
+
+ witness_complex.create_complex(simplex_tree, alpha2, lim_dim);
+ end = clock();
+ std::cout << "Witness complex took "
+ << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
+ std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
+}
diff --git a/src/Witness_complex/example/example_witness_complex_persistence.cpp b/src/Witness_complex/example/example_witness_complex_persistence.cpp
new file mode 100644
index 00000000..a1146922
--- /dev/null
+++ b/src/Witness_complex/example/example_witness_complex_persistence.cpp
@@ -0,0 +1,171 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_witness_complex.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/pick_n_random_points.h>
+
+#include <boost/program_options.hpp>
+
+#include <CGAL/Epick_d.h>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+
+using K = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+using Point_d = K::Point_d;
+
+using Point_vector = std::vector<Point_d>;
+using Witness_complex = Gudhi::witness_complex::Euclidean_witness_complex<K>;
+using SimplexTree = Gudhi::Simplex_tree<>;
+
+using Filtration_value = SimplexTree::Filtration_value;
+
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<SimplexTree, Field_Zp>;
+
+void program_options(int argc, char * argv[]
+ , int & nbL
+ , std::string & file_name
+ , std::string & filediag
+ , Filtration_value & max_squared_alpha
+ , int & p
+ , int & dim_max
+ , Filtration_value & min_persistence);
+
+int main(int argc, char * argv[]) {
+ std::string file_name;
+ std::string filediag;
+ Filtration_value max_squared_alpha;
+ int p, nbL, lim_d;
+ Filtration_value min_persistence;
+ SimplexTree simplex_tree;
+
+ program_options(argc, argv, nbL, file_name, filediag, max_squared_alpha, p, lim_d, min_persistence);
+
+ // Extract the points from the file file_name
+ Point_vector witnesses, landmarks;
+ Gudhi::Points_off_reader<Point_d> off_reader(file_name);
+ if (!off_reader.is_valid()) {
+ std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
+ }
+ witnesses = Point_vector(off_reader.get_point_cloud());
+ std::cout << "Successfully read " << witnesses.size() << " points.\n";
+ std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
+
+ // Choose landmarks from witnesses
+ Gudhi::subsampling::pick_n_random_points(witnesses, nbL, std::back_inserter(landmarks));
+
+ // Compute witness complex
+ Witness_complex witness_complex(landmarks,
+ witnesses);
+
+ witness_complex.create_complex(simplex_tree, max_squared_alpha, lim_d);
+
+ std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(simplex_tree);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(p);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (filediag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::ofstream out(filediag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+
+ return 0;
+}
+
+
+void program_options(int argc, char * argv[]
+ , int & nbL
+ , std::string & file_name
+ , std::string & filediag
+ , Filtration_value & max_squared_alpha
+ , int & p
+ , int & dim_max
+ , Filtration_value & min_persistence) {
+ namespace po = boost::program_options;
+
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&file_name),
+ "Name of file containing a point set in off format.");
+
+ Filtration_value default_alpha = std::numeric_limits<Filtration_value>::infinity();
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("landmarks,l", po::value<int>(&nbL),
+ "Number of landmarks to choose from the point cloud.")
+ ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")
+ ("max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
+ "Maximal squared relaxation parameter.")
+ ("field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")
+ ("min-persistence,m", po::value<Filtration_value>(&min_persistence)->default_value(0),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals")
+ ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(std::numeric_limits<int>::max()),
+ "Maximal dimension of the weak witness complex we want to compute.");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+ po::variables_map vm;
+
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a Weak witness complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Witness_complex/example/witness_complex_sphere.cpp b/src/Witness_complex/example/example_witness_complex_sphere.cpp
index b26c9f36..124fd99b 100644
--- a/src/Witness_complex/example/witness_complex_sphere.cpp
+++ b/src/Witness_complex/example/example_witness_complex_sphere.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Siargey Kachanovich
*
- * Copyright (C) 2015 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2016 INRIA (France)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -19,17 +19,16 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#define BOOST_PARAMETER_MAX_ARITY 12
-
-#include <sys/types.h>
-#include <sys/stat.h>
+#define BOOST_PARAMETER_MAX_ARITY 12
#include <gudhi/Simplex_tree.h>
-#include <gudhi/Witness_complex.h>
-#include <gudhi/Landmark_choice_by_random_point.h>
+#include <gudhi/Euclidean_witness_complex.h>
+#include <gudhi/pick_n_random_points.h>
#include <gudhi/reader_utils.h>
+#include <CGAL/Epick_d.h>
+
#include <iostream>
#include <fstream>
#include <ctime>
@@ -51,6 +50,9 @@ void write_data(Data_range & data, std::string filename) {
}
int main(int argc, char * const argv[]) {
+ using Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+ using Witness_complex = Gudhi::witness_complex::Euclidean_witness_complex<Kernel>;
+
if (argc != 2) {
std::cerr << "Usage: " << argv[0]
<< " number_of_landmarks \n";
@@ -58,27 +60,27 @@ int main(int argc, char * const argv[]) {
}
int number_of_landmarks = atoi(argv[1]);
- clock_t start, end;
-
- // Construct the Simplex Tree
- Gudhi::Simplex_tree<> simplex_tree;
std::vector< std::pair<int, double> > l_time;
- // Read the point file
+ // Generate points
for (int nbP = 500; nbP < 10000; nbP += 500) {
- Point_Vector point_vector;
+ clock_t start, end;
+ // Construct the Simplex Tree
+ Gudhi::Simplex_tree<> simplex_tree;
+ Point_Vector point_vector, landmarks;
generate_points_sphere(point_vector, nbP, 4);
std::cout << "Successfully generated " << point_vector.size() << " points.\n";
std::cout << "Ambient dimension is " << point_vector[0].size() << ".\n";
// Choose landmarks
start = clock();
- std::vector<std::vector< int > > knn;
- Gudhi::witness_complex::landmark_choice_by_random_point(point_vector, number_of_landmarks, knn);
+ Gudhi::subsampling::pick_n_random_points(point_vector, number_of_landmarks, std::back_inserter(landmarks));
// Compute witness complex
- Gudhi::witness_complex::witness_complex(knn, number_of_landmarks, point_vector[0].size(), simplex_tree);
+ Witness_complex witness_complex(landmarks,
+ point_vector);
+ witness_complex.create_complex(simplex_tree, 0);
end = clock();
double time = static_cast<double>(end - start) / CLOCKS_PER_SEC;
std::cout << "Witness complex for " << number_of_landmarks << " landmarks took "
diff --git a/src/Witness_complex/example/generators.h b/src/Witness_complex/example/generators.h
index ac445261..7df43db5 100644
--- a/src/Witness_complex/example/generators.h
+++ b/src/Witness_complex/example/generators.h
@@ -25,17 +25,19 @@
#include <CGAL/Epick_d.h>
#include <CGAL/point_generators_d.h>
+#include <CGAL/Random.h>
#include <fstream>
#include <string>
#include <vector>
+#include <cmath>
-typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> K;
-typedef K::FT FT;
-typedef K::Point_d Point_d;
-typedef std::vector<Point_d> Point_Vector;
-typedef CGAL::Random_points_in_cube_d<Point_d> Random_cube_iterator;
-typedef CGAL::Random_points_in_ball_d<Point_d> Random_point_iterator;
+using K = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+using FT = K::FT;
+using Point_d = K::Point_d;
+using Point_Vector = std::vector<Point_d>;
+using Random_cube_iterator = CGAL::Random_points_in_cube_d<Point_d>;
+using Random_point_iterator = CGAL::Random_points_in_ball_d<Point_d>;
/**
* \brief Rock age method of reading off file
@@ -144,4 +146,21 @@ void generate_points_sphere(Point_Vector& W, int nbP, int dim) {
W.push_back(*rp++);
}
+/** \brief Generate nbP points on a (flat) d-torus embedded in R^{2d}
+ *
+ */
+void generate_points_torus(Point_Vector& W, int nbP, int dim) {
+ CGAL::Random rand;
+ const double pi = std::acos(-1);
+ for (int i = 0; i < nbP; i++) {
+ std::vector<FT> point;
+ for (int j = 0; j < dim; j++) {
+ double alpha = rand.uniform_real(static_cast<double>(0), 2*pi);
+ point.push_back(sin(alpha));
+ point.push_back(cos(alpha));
+ }
+ W.push_back(Point_d(point));
+ }
+}
+
#endif // EXAMPLE_WITNESS_COMPLEX_GENERATORS_H_
diff --git a/src/Witness_complex/example/witness_complex_from_file.cpp b/src/Witness_complex/example/witness_complex_from_file.cpp
deleted file mode 100644
index 53207ad2..00000000
--- a/src/Witness_complex/example/witness_complex_from_file.cpp
+++ /dev/null
@@ -1,100 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Siargey Kachanovich
- *
- * Copyright (C) 2015 INRIA Sophia Antipolis-Méditerranée (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <sys/types.h>
-#include <sys/stat.h>
-
-#include <gudhi/Simplex_tree.h>
-#include <gudhi/Witness_complex.h>
-#include <gudhi/Landmark_choice_by_random_point.h>
-#include <gudhi/reader_utils.h>
-
-#include <iostream>
-#include <fstream>
-#include <ctime>
-#include <string>
-#include <vector>
-
-typedef std::vector< Vertex_handle > typeVectorVertex;
-typedef std::vector< std::vector <double> > Point_Vector;
-
-/**
- * \brief Customized version of read_points
- * which takes into account a possible nbP first line
- *
- */
-inline void
-read_points_cust(std::string file_name, std::vector< std::vector< double > > & points) {
- std::ifstream in_file(file_name.c_str(), std::ios::in);
- if (!in_file.is_open()) {
- std::cerr << "Unable to open file " << file_name << std::endl;
- return;
- }
- std::string line;
- double x;
- while (getline(in_file, line)) {
- std::vector< double > point;
- std::istringstream iss(line);
- while (iss >> x) {
- point.push_back(x);
- }
- if (point.size() != 1)
- points.push_back(point);
- }
- in_file.close();
-}
-
-int main(int argc, char * const argv[]) {
- if (argc != 3) {
- std::cerr << "Usage: " << argv[0]
- << " path_to_point_file nbL \n";
- return 0;
- }
-
- std::string file_name = argv[1];
- int nbL = atoi(argv[2]);
- clock_t start, end;
-
- // Construct the Simplex Tree
- Gudhi::Simplex_tree<> simplex_tree;
-
- // Read the point file
- Point_Vector point_vector;
- read_points_cust(file_name, point_vector);
- std::cout << "Successfully read " << point_vector.size() << " points.\n";
- std::cout << "Ambient dimension is " << point_vector[0].size() << ".\n";
-
- // Choose landmarks
- start = clock();
- std::vector<std::vector< int > > knn;
- Gudhi::witness_complex::landmark_choice_by_random_point(point_vector, nbL, knn);
- end = clock();
- std::cout << "Landmark choice for " << nbL << " landmarks took "
- << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
-
- // Compute witness complex
- start = clock();
- Gudhi::witness_complex::witness_complex(knn, nbL, point_vector[0].size(), simplex_tree);
- end = clock();
- std::cout << "Witness complex took "
- << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
-}
diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h
new file mode 100644
index 00000000..d41a6811
--- /dev/null
+++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness.h
@@ -0,0 +1,67 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef ACTIVE_WITNESS_ACTIVE_WITNESS_H_
+#define ACTIVE_WITNESS_ACTIVE_WITNESS_H_
+
+#include <gudhi/Active_witness/Active_witness_iterator.h>
+#include <list>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+ /* \class Active_witness
+ * \brief Class representing a list of nearest neighbors to a given witness.
+ * \details Every element is a pair of a landmark identifier and the squared distance to it.
+ */
+template< typename Id_distance_pair,
+ typename INS_range >
+class Active_witness {
+ public:
+ typedef Active_witness<Id_distance_pair, INS_range> ActiveWitness;
+ typedef typename INS_range::iterator INS_iterator;
+ typedef Active_witness_iterator< ActiveWitness, Id_distance_pair, INS_iterator > iterator;
+ typedef typename std::list<Id_distance_pair> Table;
+
+ Table nearest_landmark_table_;
+ INS_range search_range_;
+ INS_iterator iterator_next_;
+ INS_iterator iterator_end_;
+
+ Active_witness(const INS_range& search_range)
+ : search_range_(search_range), iterator_next_(search_range_.begin()), iterator_end_(search_range_.end()) {
+ }
+
+ iterator begin() {
+ return iterator(this, nearest_landmark_table_.begin());
+ }
+
+ iterator end() {
+ return iterator(this);
+ }
+};
+
+} // namespace witness_complex
+} // namespace Gudhi
+
+#endif // ACTIVE_WITNESS_ACTIVE_WITNESS_H_
diff --git a/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h
new file mode 100644
index 00000000..0a05173a
--- /dev/null
+++ b/src/Witness_complex/include/gudhi/Active_witness/Active_witness_iterator.h
@@ -0,0 +1,108 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2016 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef ACTIVE_WITNESS_ACTIVE_WITNESS_ITERATOR_H_
+#define ACTIVE_WITNESS_ACTIVE_WITNESS_ITERATOR_H_
+
+#include <boost/iterator/iterator_facade.hpp>
+#include <list>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+/* \brief Iterator in the nearest landmark list.
+ * \details After the iterator reaches the end of the list,
+ * the list is augmented by a (nearest landmark, distance) pair if possible.
+ * If all the landmarks are present in the list, iterator returns the specific end value
+ * of the corresponding 'Active_witness' object.
+ */
+template< typename Active_witness,
+ typename Id_distance_pair,
+ typename INS_iterator >
+class Active_witness_iterator
+ : public boost::iterator_facade< Active_witness_iterator <Active_witness, Id_distance_pair, INS_iterator>,
+ Id_distance_pair const,
+ boost::forward_traversal_tag,
+ Id_distance_pair const> {
+ friend class boost::iterator_core_access;
+
+ typedef typename std::list<Id_distance_pair>::iterator Pair_iterator;
+ typedef typename Gudhi::witness_complex::Active_witness_iterator<Active_witness,
+ Id_distance_pair,
+ INS_iterator> Iterator;
+
+ Active_witness *aw_;
+ Pair_iterator lh_; // landmark handle
+ bool is_end_; // true only if the pointer is end and there are no more neighbors to add
+
+ public:
+ Active_witness_iterator(Active_witness* aw)
+ : aw_(aw), lh_(aw_->nearest_landmark_table_.end()), is_end_(true) {
+ }
+
+ Active_witness_iterator(Active_witness* aw, const Pair_iterator& lh)
+ : aw_(aw), lh_(lh) {
+ is_end_ = false;
+ if (lh_ == aw_->nearest_landmark_table_.end()) {
+ if (aw_->iterator_next_ == aw_->iterator_end_) {
+ is_end_ = true;
+ } else {
+ aw_->nearest_landmark_table_.push_back(*aw_->iterator_next_);
+ lh_ = --aw_->nearest_landmark_table_.end();
+ ++(aw_->iterator_next_);
+ }
+ }
+ }
+
+ private :
+ Id_distance_pair& dereference() const {
+ return *lh_;
+ }
+
+ bool equal(const Iterator& other) const {
+ return (is_end_ == other.is_end_) || (lh_ == other.lh_);
+ }
+
+ void increment() {
+ // the neighbor search can't be at the end iterator of a list
+ GUDHI_CHECK(!is_end_ && lh_ != aw_->nearest_landmark_table_.end(),
+ std::logic_error("Wrong active witness increment."));
+ // if the id of the current landmark is the same as the last one
+
+ lh_++;
+ if (lh_ == aw_->nearest_landmark_table_.end()) {
+ if (aw_->iterator_next_ == aw_->iterator_end_) {
+ is_end_ = true;
+ } else {
+ aw_->nearest_landmark_table_.push_back(*aw_->iterator_next_);
+ lh_ = std::prev(aw_->nearest_landmark_table_.end());
+ ++(aw_->iterator_next_);
+ }
+ }
+ }
+};
+
+} // namespace witness_complex
+} // namespace Gudhi
+
+#endif // ACTIVE_WITNESS_ACTIVE_WITNESS_ITERATOR_H_
diff --git a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h
new file mode 100644
index 00000000..fb669ef8
--- /dev/null
+++ b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h
@@ -0,0 +1,104 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2015 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef EUCLIDEAN_STRONG_WITNESS_COMPLEX_H_
+#define EUCLIDEAN_STRONG_WITNESS_COMPLEX_H_
+
+#include <gudhi/Strong_witness_complex.h>
+#include <gudhi/Active_witness/Active_witness.h>
+#include <gudhi/Kd_tree_search.h>
+
+#include <utility>
+#include <vector>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+/**
+ * \private
+ * \class Euclidean_strong_witness_complex
+ * \brief Constructs strong witness complex for given sets of witnesses and landmarks in Euclidean space.
+ * \ingroup witness_complex
+ *
+ * \tparam Kernel_ requires a <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Epick__d.html">CGAL::Epick_d</a> class.
+ */
+template< class Kernel_ >
+class Euclidean_strong_witness_complex
+ : public Strong_witness_complex<std::vector<typename Gudhi::spatial_searching::Kd_tree_search<Kernel_,
+ std::vector<typename Kernel_::Point_d>>::INS_range>> {
+ private:
+ typedef Kernel_ K;
+ typedef typename K::Point_d Point_d;
+ typedef std::vector<Point_d> Point_range;
+ typedef Gudhi::spatial_searching::Kd_tree_search<Kernel_, Point_range> Kd_tree;
+ typedef typename Kd_tree::INS_range Nearest_landmark_range;
+ typedef typename std::vector<Nearest_landmark_range> Nearest_landmark_table;
+
+ typedef typename Nearest_landmark_range::Point_with_transformed_distance Id_distance_pair;
+ typedef typename Id_distance_pair::first_type Landmark_id;
+ typedef Landmark_id Vertex_handle;
+
+ private:
+ Point_range landmarks_;
+ Kd_tree landmark_tree_;
+ using Strong_witness_complex<Nearest_landmark_table>::nearest_landmark_table_;
+
+ public:
+ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ /* @name Constructor
+ */
+
+ //@{
+
+ /**
+ * \brief Initializes member variables before constructing simplicial complex.
+ * \details Records landmarks from the range 'landmarks' into a
+ * table internally, as well as witnesses from the range 'witnesses'.
+ * Both ranges should have value_type Kernel_::Point_d.
+ */
+ template< typename LandmarkRange,
+ typename WitnessRange >
+ Euclidean_strong_witness_complex(const LandmarkRange & landmarks,
+ const WitnessRange & witnesses)
+ : landmarks_(std::begin(landmarks), std::end(landmarks)), landmark_tree_(landmarks_) {
+ nearest_landmark_table_.reserve(boost::size(witnesses));
+ for (auto w : witnesses)
+ nearest_landmark_table_.push_back(landmark_tree_.query_incremental_nearest_neighbors(w));
+ }
+
+ /** \brief Returns the point corresponding to the given vertex.
+ */
+ template <typename Vertex_handle>
+ Point_d get_point(Vertex_handle vertex) const {
+ return landmarks_[vertex];
+ }
+
+ //@}
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // EUCLIDEAN_STRONG_WITNESS_COMPLEX_H_
diff --git a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h
new file mode 100644
index 00000000..6afe9a5d
--- /dev/null
+++ b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h
@@ -0,0 +1,106 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2015 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef EUCLIDEAN_WITNESS_COMPLEX_H_
+#define EUCLIDEAN_WITNESS_COMPLEX_H_
+
+#include <gudhi/Witness_complex.h>
+#include <gudhi/Active_witness/Active_witness.h>
+#include <gudhi/Kd_tree_search.h>
+
+#include <utility>
+#include <vector>
+#include <list>
+#include <limits>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+/**
+ * \private
+ * \class Euclidean_witness_complex
+ * \brief Constructs (weak) witness complex for given sets of witnesses and landmarks in Euclidean space.
+ * \ingroup witness_complex
+ *
+ * \tparam Kernel_ requires a <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Epick__d.html">CGAL::Epick_d</a> class.
+ */
+template< class Kernel_ >
+class Euclidean_witness_complex
+ : public Witness_complex<std::vector<typename Gudhi::spatial_searching::Kd_tree_search<Kernel_,
+ std::vector<typename Kernel_::Point_d>>::INS_range>> {
+ private:
+ typedef Kernel_ K;
+ typedef typename K::Point_d Point_d;
+ typedef std::vector<Point_d> Point_range;
+ typedef Gudhi::spatial_searching::Kd_tree_search<Kernel_, Point_range> Kd_tree;
+ typedef typename Kd_tree::INS_range Nearest_landmark_range;
+ typedef typename std::vector<Nearest_landmark_range> Nearest_landmark_table;
+
+ typedef typename Nearest_landmark_range::Point_with_transformed_distance Id_distance_pair;
+ typedef typename Id_distance_pair::first_type Landmark_id;
+ typedef Landmark_id Vertex_handle;
+
+ private:
+ Point_range landmarks_;
+ Kd_tree landmark_tree_;
+ using Witness_complex<Nearest_landmark_table>::nearest_landmark_table_;
+
+ public:
+ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ /* @name Constructor
+ */
+
+ //@{
+
+ /**
+ * \brief Initializes member variables before constructing simplicial complex.
+ * \details Records landmarks from the range 'landmarks' into a
+ * table internally, as well as witnesses from the range 'witnesses'.
+ * Both ranges should have value_type Kernel_::Point_d.
+ */
+ template< typename LandmarkRange,
+ typename WitnessRange >
+ Euclidean_witness_complex(const LandmarkRange & landmarks,
+ const WitnessRange & witnesses)
+ : landmarks_(std::begin(landmarks), std::end(landmarks)), landmark_tree_(landmarks) {
+ nearest_landmark_table_.reserve(boost::size(witnesses));
+ for (auto w : witnesses)
+ nearest_landmark_table_.push_back(landmark_tree_.query_incremental_nearest_neighbors(w));
+ }
+
+ /** \brief Returns the point corresponding to the given vertex.
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ */
+ Point_d get_point(Vertex_handle vertex) const {
+ return landmarks_[vertex];
+ }
+
+ //@}
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // EUCLIDEAN_WITNESS_COMPLEX_H_
diff --git a/src/Witness_complex/include/gudhi/Landmark_choice_by_furthest_point.h b/src/Witness_complex/include/gudhi/Landmark_choice_by_furthest_point.h
deleted file mode 100644
index df93155b..00000000
--- a/src/Witness_complex/include/gudhi/Landmark_choice_by_furthest_point.h
+++ /dev/null
@@ -1,105 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Siargey Kachanovich
- *
- * Copyright (C) 2015 INRIA Sophia Antipolis-Méditerranée (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef LANDMARK_CHOICE_BY_FURTHEST_POINT_H_
-#define LANDMARK_CHOICE_BY_FURTHEST_POINT_H_
-
-#include <boost/range/size.hpp>
-
-#include <limits> // for numeric_limits<>
-#include <iterator>
-#include <algorithm> // for sort
-#include <vector>
-
-namespace Gudhi {
-
-namespace witness_complex {
-
- typedef std::vector<int> typeVectorVertex;
-
- /**
- * \ingroup witness_complex
- * \brief Landmark choice strategy by iteratively adding the furthest witness from the
- * current landmark set as the new landmark.
- * \details It chooses nbL landmarks from a random access range `points` and
- * writes {witness}*{closest landmarks} matrix in `knn`.
- *
- * The type KNearestNeighbors can be seen as
- * Witness_range<Closest_landmark_range<Vertex_handle>>, where
- * Witness_range and Closest_landmark_range are random access ranges
- *
- */
-
- template <typename KNearestNeighbours,
- typename Point_random_access_range>
- void landmark_choice_by_furthest_point(Point_random_access_range const &points,
- int nbL,
- KNearestNeighbours &knn) {
- int nb_points = boost::size(points);
- assert(nb_points >= nbL);
- // distance matrix witness x landmarks
- std::vector<std::vector<double>> wit_land_dist(nb_points, std::vector<double>());
- // landmark list
- typeVectorVertex chosen_landmarks;
-
- knn = KNearestNeighbours(nb_points, std::vector<int>());
- int current_number_of_landmarks = 0; // counter for landmarks
- double curr_max_dist = 0; // used for defining the furhest point from L
- const double infty = std::numeric_limits<double>::infinity(); // infinity (see next entry)
- std::vector< double > dist_to_L(nb_points, infty); // vector of current distances to L from points
-
- // TODO(SK) Consider using rand_r(...) instead of rand(...) for improved thread safety
- // or better yet std::uniform_int_distribution
- int rand_int = rand() % nb_points;
- int curr_max_w = rand_int; // For testing purposes a pseudo-random number is used here
-
- for (current_number_of_landmarks = 0; current_number_of_landmarks != nbL; current_number_of_landmarks++) {
- // curr_max_w at this point is the next landmark
- chosen_landmarks.push_back(curr_max_w);
- unsigned i = 0;
- for (auto& p : points) {
- double curr_dist = euclidean_distance(p, *(std::begin(points) + chosen_landmarks[current_number_of_landmarks]));
- wit_land_dist[i].push_back(curr_dist);
- knn[i].push_back(current_number_of_landmarks);
- if (curr_dist < dist_to_L[i])
- dist_to_L[i] = curr_dist;
- ++i;
- }
- curr_max_dist = 0;
- for (i = 0; i < dist_to_L.size(); i++)
- if (dist_to_L[i] > curr_max_dist) {
- curr_max_dist = dist_to_L[i];
- curr_max_w = i;
- }
- }
- for (int i = 0; i < nb_points; ++i)
- std::sort(std::begin(knn[i]),
- std::end(knn[i]),
- [&wit_land_dist, i](int a, int b) {
- return wit_land_dist[i][a] < wit_land_dist[i][b]; });
- }
-
-} // namespace witness_complex
-
-} // namespace Gudhi
-
-#endif // LANDMARK_CHOICE_BY_FURTHEST_POINT_H_
diff --git a/src/Witness_complex/include/gudhi/Landmark_choice_by_random_point.h b/src/Witness_complex/include/gudhi/Landmark_choice_by_random_point.h
deleted file mode 100644
index ebf6aad1..00000000
--- a/src/Witness_complex/include/gudhi/Landmark_choice_by_random_point.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Siargey Kachanovich
- *
- * Copyright (C) 2015 INRIA Sophia Antipolis-Méditerranée (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef LANDMARK_CHOICE_BY_RANDOM_POINT_H_
-#define LANDMARK_CHOICE_BY_RANDOM_POINT_H_
-
-#include <boost/range/size.hpp>
-
-#include <queue> // for priority_queue<>
-#include <utility> // for pair<>
-#include <iterator>
-#include <vector>
-#include <set>
-
-namespace Gudhi {
-
-namespace witness_complex {
-
- /**
- * \ingroup witness_complex
- * \brief Landmark choice strategy by taking random vertices for landmarks.
- * \details It chooses nbL distinct landmarks from a random access range `points`
- * and outputs a matrix {witness}*{closest landmarks} in knn.
- *
- * The type KNearestNeighbors can be seen as
- * Witness_range<Closest_landmark_range<Vertex_handle>>, where
- * Witness_range and Closest_landmark_range are random access ranges and
- * Vertex_handle is the label type of a vertex in a simplicial complex.
- * Closest_landmark_range needs to have push_back operation.
- */
-
- template <typename KNearestNeighbours,
- typename Point_random_access_range>
- void landmark_choice_by_random_point(Point_random_access_range const &points,
- int nbL,
- KNearestNeighbours &knn) {
- int nbP = boost::size(points);
- assert(nbP >= nbL);
- std::set<int> landmarks;
- int current_number_of_landmarks = 0; // counter for landmarks
-
- // TODO(SK) Consider using rand_r(...) instead of rand(...) for improved thread safety
- int chosen_landmark = rand() % nbP;
- for (current_number_of_landmarks = 0; current_number_of_landmarks != nbL; current_number_of_landmarks++) {
- while (landmarks.find(chosen_landmark) != landmarks.end())
- chosen_landmark = rand() % nbP;
- landmarks.insert(chosen_landmark);
- }
-
- int dim = boost::size(*std::begin(points));
- typedef std::pair<double, int> dist_i;
- typedef bool (*comp)(dist_i, dist_i);
- knn = KNearestNeighbours(nbP);
- for (int points_i = 0; points_i < nbP; points_i++) {
- std::priority_queue<dist_i, std::vector<dist_i>, comp> l_heap([](dist_i j1, dist_i j2) {
- return j1.first > j2.first;
- });
- std::set<int>::iterator landmarks_it;
- int landmarks_i = 0;
- for (landmarks_it = landmarks.begin(), landmarks_i = 0; landmarks_it != landmarks.end();
- ++landmarks_it, landmarks_i++) {
- dist_i dist = std::make_pair(euclidean_distance(points[points_i], points[*landmarks_it]), landmarks_i);
- l_heap.push(dist);
- }
- for (int i = 0; i < dim + 1; i++) {
- dist_i dist = l_heap.top();
- knn[points_i].push_back(dist.second);
- l_heap.pop();
- }
- }
- }
-
-} // namespace witness_complex
-
-} // namespace Gudhi
-
-#endif // LANDMARK_CHOICE_BY_RANDOM_POINT_H_
diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h
new file mode 100644
index 00000000..a973ddb7
--- /dev/null
+++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h
@@ -0,0 +1,185 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2015 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef STRONG_WITNESS_COMPLEX_H_
+#define STRONG_WITNESS_COMPLEX_H_
+
+#include <gudhi/Active_witness/Active_witness.h>
+
+#include <utility>
+#include <vector>
+#include <list>
+#include <limits>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+/* \private
+ * \class Strong_witness_complex
+ * \brief Constructs strong witness complex for a given table of nearest landmarks with respect to witnesses.
+ * \ingroup witness_complex
+ *
+ * \tparam Nearest_landmark_table_ needs to be a range of a range of pairs of nearest landmarks and distances.
+ * The class Nearest_landmark_table_::value_type must be a copiable range.
+ * The range of pairs must admit a member type 'iterator'. The dereference type
+ * of the pair range iterator needs to be 'std::pair<std::size_t, double>'.
+ */
+template< class Nearest_landmark_table_ >
+class Strong_witness_complex {
+ private:
+ typedef typename Nearest_landmark_table_::value_type Nearest_landmark_range;
+ typedef std::size_t Witness_id;
+ typedef std::size_t Landmark_id;
+ typedef std::pair<Landmark_id, double> Id_distance_pair;
+ typedef Active_witness<Id_distance_pair, Nearest_landmark_range> ActiveWitness;
+ typedef std::list< ActiveWitness > ActiveWitnessList;
+ typedef std::vector< Landmark_id > typeVectorVertex;
+ typedef std::vector<Nearest_landmark_range> Nearest_landmark_table_internal;
+ typedef Landmark_id Vertex_handle;
+
+ protected:
+ Nearest_landmark_table_internal nearest_landmark_table_;
+
+ public:
+ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ /* @name Constructor
+ */
+
+ //@{
+
+ Strong_witness_complex() {
+ }
+
+ /**
+ * \brief Initializes member variables before constructing simplicial complex.
+ * \details Records nearest landmark table.
+ * @param[in] nearest_landmark_table needs to be a range of a range of pairs of nearest landmarks and distances.
+ * The class Nearest_landmark_table_::value_type must be a copiable range.
+ * The range of pairs must admit a member type 'iterator'. The dereference type
+ * of the pair range iterator needs to be 'std::pair<std::size_t, double>'.
+ */
+ Strong_witness_complex(Nearest_landmark_table_ const & nearest_landmark_table)
+ : nearest_landmark_table_(std::begin(nearest_landmark_table), std::end(nearest_landmark_table)) {
+ }
+
+ /** \brief Outputs the strong witness complex of relaxation 'max_alpha_square'
+ * in a simplicial complex data structure.
+ * \details The function returns true if the construction is successful and false otherwise.
+ * @param[out] complex Simplicial complex data structure, which is a model of
+ * SimplicialComplexForWitness concept.
+ * @param[in] max_alpha_square Maximal squared relaxation parameter.
+ * @param[in] limit_dimension Represents the maximal dimension of the simplicial complex
+ * (default value = no limit).
+ */
+ template < typename SimplicialComplexForWitness >
+ bool create_complex(SimplicialComplexForWitness& complex,
+ double max_alpha_square,
+ Landmark_id limit_dimension = std::numeric_limits<Landmark_id>::max()) const {
+ Landmark_id complex_dim = 0;
+ if (complex.num_vertices() > 0) {
+ std::cerr << "Strong witness complex cannot create complex - complex is not empty.\n";
+ return false;
+ }
+ if (max_alpha_square < 0) {
+ std::cerr << "Strong witness complex cannot create complex - squared relaxation parameter must be "
+ << "non-negative.\n";
+ return false;
+ }
+ if (limit_dimension < 0) {
+ std::cerr << "Strong witness complex cannot create complex - limit dimension must be non-negative.\n";
+ return false;
+ }
+ for (auto w : nearest_landmark_table_) {
+ ActiveWitness aw(w);
+ typeVectorVertex simplex;
+ typename ActiveWitness::iterator aw_it = aw.begin();
+ float lim_dist2 = aw.begin()->second + max_alpha_square;
+ while ((Landmark_id)simplex.size() <= limit_dimension && aw_it != aw.end() && aw_it->second < lim_dist2) {
+ simplex.push_back(aw_it->first);
+ complex.insert_simplex_and_subfaces(simplex, aw_it->second - aw.begin()->second);
+ aw_it++;
+ }
+ // continue inserting limD-faces of the following simplices
+ typeVectorVertex& vertices = simplex; // 'simplex' now will be called vertices
+ while (aw_it != aw.end() && aw_it->second < lim_dist2) {
+ typeVectorVertex facet = {};
+ add_all_faces_of_dimension(limit_dimension, vertices, vertices.begin(), aw_it,
+ aw_it->second - aw.begin()->second, facet, complex);
+ vertices.push_back(aw_it->first);
+ aw_it++;
+ }
+ if ((Landmark_id)simplex.size() - 1 > complex_dim)
+ complex_dim = simplex.size() - 1;
+ }
+ complex.set_dimension(complex_dim);
+ return true;
+ }
+
+ private:
+ /* \brief Adds recursively all the faces of a certain dimension dim-1 witnessed by the same witness.
+ * Iterator is needed to know until how far we can take landmarks to form simplexes.
+ * simplex is the prefix of the simplexes to insert.
+ * The landmark pointed by aw_it is added to all formed simplices.
+ */
+ template < typename SimplicialComplexForWitness >
+ void add_all_faces_of_dimension(Landmark_id dim,
+ typeVectorVertex& vertices,
+ typename typeVectorVertex::iterator curr_it,
+ typename ActiveWitness::iterator aw_it,
+ double filtration_value,
+ typeVectorVertex& simplex,
+ SimplicialComplexForWitness& sc) const {
+ if (dim > 0) {
+ while (curr_it != vertices.end()) {
+ simplex.push_back(*curr_it);
+ ++curr_it;
+ add_all_faces_of_dimension(dim-1,
+ vertices,
+ curr_it,
+ aw_it,
+ filtration_value,
+ simplex,
+ sc);
+ simplex.pop_back();
+ add_all_faces_of_dimension(dim,
+ vertices,
+ curr_it,
+ aw_it,
+ filtration_value,
+ simplex,
+ sc);
+ }
+ } else if (dim == 0) {
+ simplex.push_back(aw_it->first);
+ sc.insert_simplex_and_subfaces(simplex, filtration_value);
+ simplex.pop_back();
+ }
+ }
+ //@}
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // STRONG_WITNESS_COMPLEX_H_
diff --git a/src/Witness_complex/include/gudhi/Witness_complex.h b/src/Witness_complex/include/gudhi/Witness_complex.h
index 489cdf11..63f03687 100644
--- a/src/Witness_complex/include/gudhi/Witness_complex.h
+++ b/src/Witness_complex/include/gudhi/Witness_complex.h
@@ -4,7 +4,7 @@
*
* Author(s): Siargey Kachanovich
*
- * Copyright (C) 2015 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2015 INRIA (France)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -23,65 +23,44 @@
#ifndef WITNESS_COMPLEX_H_
#define WITNESS_COMPLEX_H_
-// Needed for the adjacency graph in bad link search
-#include <boost/graph/graph_traits.hpp>
-#include <boost/graph/adjacency_list.hpp>
-#include <boost/graph/connected_components.hpp>
+#include <gudhi/Active_witness/Active_witness.h>
+#include <gudhi/Witness_complex/all_faces_in.h>
-#include <boost/range/size.hpp>
-
-#include <gudhi/distance_functions.h>
-
-#include <algorithm>
#include <utility>
#include <vector>
#include <list>
-#include <set>
-#include <queue>
#include <limits>
-#include <ctime>
-#include <iostream>
namespace Gudhi {
namespace witness_complex {
-// /*
-// * \private
-// \class Witness_complex
-// \brief Constructs the witness complex for the given set of witnesses and landmarks.
-// \ingroup witness_complex
-// */
-template< class SimplicialComplex>
+/**
+ * \private
+ * \class Witness_complex
+ * \brief Constructs (weak) witness complex for a given table of nearest landmarks with respect to witnesses.
+ * \ingroup witness_complex
+ *
+ * \tparam Nearest_landmark_table_ needs to be a range of a range of pairs of nearest landmarks and distances.
+ * The class Nearest_landmark_table_::value_type must be a copiable range.
+ * The range of pairs must admit a member type 'iterator'. The dereference type
+ * of the pair range iterator needs to be 'std::pair<std::size_t, double>'.
+*/
+template< class Nearest_landmark_table_ >
class Witness_complex {
private:
- struct Active_witness {
- int witness_id;
- int landmark_id;
-
- Active_witness(int witness_id_, int landmark_id_)
- : witness_id(witness_id_),
- landmark_id(landmark_id_) { }
- };
-
- private:
- typedef typename SimplicialComplex::Simplex_handle Simplex_handle;
- typedef typename SimplicialComplex::Vertex_handle Vertex_handle;
-
- typedef std::vector< double > Point_t;
- typedef std::vector< Point_t > Point_Vector;
-
- typedef std::vector< Vertex_handle > typeVectorVertex;
- typedef std::pair< typeVectorVertex, Filtration_value> typeSimplex;
- typedef std::pair< Simplex_handle, bool > typePairSimplexBool;
-
- typedef int Witness_id;
- typedef int Landmark_id;
- typedef std::list< Vertex_handle > ActiveWitnessList;
-
- private:
- int nbL_; // Number of landmarks
- SimplicialComplex& sc_; // Simplicial complex
+ typedef typename Nearest_landmark_table_::value_type Nearest_landmark_range;
+ typedef std::size_t Witness_id;
+ typedef std::size_t Landmark_id;
+ typedef std::pair<Landmark_id, double> Id_distance_pair;
+ typedef Active_witness<Id_distance_pair, Nearest_landmark_range> ActiveWitness;
+ typedef std::list< ActiveWitness > ActiveWitnessList;
+ typedef std::vector< Landmark_id > typeVectorVertex;
+ typedef std::vector<Nearest_landmark_range> Nearest_landmark_table_internal;
+ typedef Landmark_id Vertex_handle;
+
+ protected:
+ Nearest_landmark_table_internal nearest_landmark_table_;
public:
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -90,174 +69,136 @@ class Witness_complex {
//@{
- // Witness_range<Closest_landmark_range<Vertex_handle>>
+ Witness_complex() {
+ }
- /*
- * \brief Iterative construction of the witness complex.
- * \details The witness complex is written in sc_ basing on a matrix knn of
- * nearest neighbours of the form {witnesses}x{landmarks}.
- *
- * The type KNearestNeighbors can be seen as
- * Witness_range<Closest_landmark_range<Vertex_handle>>, where
- * Witness_range and Closest_landmark_range are random access ranges.
- *
- * Constructor takes into account at most (dim+1)
- * first landmarks from each landmark range to construct simplices.
- *
- * Landmarks are supposed to be in [0,nbL_-1]
+ /**
+ * \brief Initializes member variables before constructing simplicial complex.
+ * \details Records nearest landmark table.
+ * @param[in] nearest_landmark_table needs to be a range of a range of pairs of nearest landmarks and distances.
+ * The class Nearest_landmark_table_::value_type must be a copiable range.
+ * The range of pairs must admit a member type 'iterator'. The dereference type
+ * of the pair range iterator needs to be 'std::pair<std::size_t, double>'.
*/
- template< typename KNearestNeighbors >
- Witness_complex(KNearestNeighbors const & knn,
- int nbL,
- int dim,
- SimplicialComplex & sc) : nbL_(nbL), sc_(sc) {
- // Construction of the active witness list
- int nbW = boost::size(knn);
- typeVectorVertex vv;
- int counter = 0;
- /* The list of still useful witnesses
- * it will diminuish in the course of iterations
- */
- ActiveWitnessList active_w; // = new ActiveWitnessList();
- for (Vertex_handle i = 0; i != nbL_; ++i) {
- // initial fill of 0-dimensional simplices
- // by doing it we don't assume that landmarks are necessarily witnesses themselves anymore
- counter++;
- vv = {i};
- sc_.insert_simplex(vv);
- // TODO(SK) Error if not inserted : normally no need here though
+
+ Witness_complex(Nearest_landmark_table_ const & nearest_landmark_table)
+ : nearest_landmark_table_(std::begin(nearest_landmark_table), std::end(nearest_landmark_table)) {
+ }
+
+ /** \brief Outputs the (weak) witness complex of relaxation 'max_alpha_square'
+ * in a simplicial complex data structure.
+ * \details The function returns true if the construction is successful and false otherwise.
+ * @param[out] complex Simplicial complex data structure compatible which is a model of
+ * SimplicialComplexForWitness concept.
+ * @param[in] max_alpha_square Maximal squared relaxation parameter.
+ * @param[in] limit_dimension Represents the maximal dimension of the simplicial complex
+ * (default value = no limit).
+ */
+ template < typename SimplicialComplexForWitness >
+ bool create_complex(SimplicialComplexForWitness& complex,
+ double max_alpha_square,
+ std::size_t limit_dimension = std::numeric_limits<std::size_t>::max()) const {
+ if (complex.num_vertices() > 0) {
+ std::cerr << "Witness complex cannot create complex - complex is not empty.\n";
+ return false;
}
- int k = 1; /* current dimension in iterative construction */
- for (int i = 0; i != nbW; ++i)
- active_w.push_back(i);
- while (!active_w.empty() && k < dim) {
- typename ActiveWitnessList::iterator it = active_w.begin();
- while (it != active_w.end()) {
- typeVectorVertex simplex_vector;
- /* THE INSERTION: Checking if all the subfaces are in the simplex tree*/
- bool ok = all_faces_in(knn, *it, k);
- if (ok) {
- for (int i = 0; i != k + 1; ++i)
- simplex_vector.push_back(knn[*it][i]);
- sc_.insert_simplex(simplex_vector);
- // TODO(SK) Error if not inserted : normally no need here though
- ++it;
- } else {
- active_w.erase(it++); // First increase the iterator and then erase the previous element
- }
+ if (max_alpha_square < 0) {
+ std::cerr << "Witness complex cannot create complex - squared relaxation parameter must be non-negative.\n";
+ return false;
+ }
+ if (limit_dimension < 0) {
+ std::cerr << "Witness complex cannot create complex - limit dimension must be non-negative.\n";
+ return false;
+ }
+ ActiveWitnessList active_witnesses;
+ Landmark_id k = 0; /* current dimension in iterative construction */
+ for (auto w : nearest_landmark_table_)
+ active_witnesses.push_back(ActiveWitness(w));
+ while (!active_witnesses.empty() && k <= limit_dimension) {
+ typename ActiveWitnessList::iterator aw_it = active_witnesses.begin();
+ std::vector<Landmark_id> simplex;
+ simplex.reserve(k+1);
+ while (aw_it != active_witnesses.end()) {
+ bool ok = add_all_faces_of_dimension(k,
+ max_alpha_square,
+ std::numeric_limits<double>::infinity(),
+ aw_it->begin(),
+ simplex,
+ complex,
+ aw_it->end());
+ assert(simplex.empty());
+ if (!ok)
+ active_witnesses.erase(aw_it++); // First increase the iterator and then erase the previous element
+ else
+ aw_it++;
}
k++;
}
+ complex.set_dimension(k-1);
+ return true;
}
//@}
private:
- /* \brief Check if the facets of the k-dimensional simplex witnessed
- * by witness witness_id are already in the complex.
- * inserted_vertex is the handle of the (k+1)-th vertex witnessed by witness_id
+ /* \brief Adds recursively all the faces of a certain dimension dim witnessed by the same witness.
+ * Iterator is needed to know until how far we can take landmarks to form simplexes.
+ * simplex is the prefix of the simplexes to insert.
+ * The output value indicates if the witness rests active or not.
*/
- template <typename KNearestNeighbors>
- bool all_faces_in(KNearestNeighbors const &knn, int witness_id, int k) {
- std::vector< Vertex_handle > facet;
- // CHECK ALL THE FACETS
- for (int i = 0; i != k + 1; ++i) {
- facet = {};
- for (int j = 0; j != k + 1; ++j) {
- if (j != i) {
- facet.push_back(knn[witness_id][j]);
+ template < typename SimplicialComplexForWitness >
+ bool add_all_faces_of_dimension(int dim,
+ double alpha2,
+ double norelax_dist2,
+ typename ActiveWitness::iterator curr_l,
+ std::vector<Landmark_id>& simplex,
+ SimplicialComplexForWitness& sc,
+ typename ActiveWitness::iterator end) const {
+ if (curr_l == end)
+ return false;
+ bool will_be_active = false;
+ typename ActiveWitness::iterator l_it = curr_l;
+ if (dim > 0) {
+ for (; l_it != end && l_it->second - alpha2 <= norelax_dist2; ++l_it) {
+ simplex.push_back(l_it->first);
+ if (sc.find(simplex) != sc.null_simplex()) {
+ typename ActiveWitness::iterator next_it = l_it;
+ will_be_active = add_all_faces_of_dimension(dim-1,
+ alpha2,
+ norelax_dist2,
+ ++next_it,
+ simplex,
+ sc,
+ end) || will_be_active;
}
- } // endfor
- if (sc_.find(facet) == sc_.null_simplex())
- return false;
- } // endfor
- return true;
- }
-
- template <typename T>
- static void print_vector(const std::vector<T>& v) {
- std::cout << "[";
- if (!v.empty()) {
- std::cout << *(v.begin());
- for (auto it = v.begin() + 1; it != v.end(); ++it) {
- std::cout << ",";
- std::cout << *it;
+ assert(!simplex.empty());
+ simplex.pop_back();
+ // If norelax_dist is infinity, change to first omitted distance
+ if (l_it->second <= norelax_dist2)
+ norelax_dist2 = l_it->second;
}
- }
- std::cout << "]";
- }
-
- public:
- // /*
- // * \brief Verification if every simplex in the complex is witnessed by witnesses in knn.
- // * \param print_output =true will print the witnesses for each simplex
- // * \remark Added for debugging purposes.
- // */
- template< class KNearestNeighbors >
- bool is_witness_complex(KNearestNeighbors const & knn, bool print_output) {
- for (Simplex_handle sh : sc_.complex_simplex_range()) {
- bool is_witnessed = false;
- typeVectorVertex simplex;
- int nbV = 0; // number of verticed in the simplex
- for (Vertex_handle v : sc_.simplex_vertex_range(sh))
- simplex.push_back(v);
- nbV = simplex.size();
- for (typeVectorVertex w : knn) {
- bool has_vertices = true;
- for (Vertex_handle v : simplex)
- if (std::find(w.begin(), w.begin() + nbV, v) == w.begin() + nbV) {
- has_vertices = false;
- }
- if (has_vertices) {
- is_witnessed = true;
- if (print_output) {
- std::cout << "The simplex ";
- print_vector(simplex);
- std::cout << " is witnessed by the witness ";
- print_vector(w);
- std::cout << std::endl;
- }
- break;
- }
- }
- if (!is_witnessed) {
- if (print_output) {
- std::cout << "The following simplex is not witnessed ";
- print_vector(simplex);
- std::cout << std::endl;
+ } else if (dim == 0) {
+ for (; l_it != end && l_it->second - alpha2 <= norelax_dist2; ++l_it) {
+ simplex.push_back(l_it->first);
+ double filtration_value = 0;
+ // if norelax_dist is infinite, relaxation is 0.
+ if (l_it->second > norelax_dist2)
+ filtration_value = l_it->second - norelax_dist2;
+ if (all_faces_in(simplex, &filtration_value, sc)) {
+ will_be_active = true;
+ sc.insert_simplex(simplex, filtration_value);
}
- assert(is_witnessed);
- return false;
+ assert(!simplex.empty());
+ simplex.pop_back();
+ // If norelax_dist is infinity, change to first omitted distance
+ if (l_it->second < norelax_dist2)
+ norelax_dist2 = l_it->second;
}
}
- return true;
+ return will_be_active;
}
};
- /**
- * \ingroup witness_complex
- * \brief Iterative construction of the witness complex.
- * \details The witness complex is written in simplicial complex sc_
- * basing on a matrix knn of
- * nearest neighbours of the form {witnesses}x{landmarks}.
- *
- * The type KNearestNeighbors can be seen as
- * Witness_range<Closest_landmark_range<Vertex_handle>>, where
- * Witness_range and Closest_landmark_range are random access ranges.
- *
- * Procedure takes into account at most (dim+1)
- * first landmarks from each landmark range to construct simplices.
- *
- * Landmarks are supposed to be in [0,nbL_-1]
- */
- template <class KNearestNeighbors, class SimplicialComplexForWitness>
- void witness_complex(KNearestNeighbors const & knn,
- int nbL,
- int dim,
- SimplicialComplexForWitness & sc) {
- Witness_complex<SimplicialComplexForWitness>(knn, nbL, dim, sc);
- }
-
} // namespace witness_complex
} // namespace Gudhi
diff --git a/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h
new file mode 100644
index 00000000..b68d75a1
--- /dev/null
+++ b/src/Witness_complex/include/gudhi/Witness_complex/all_faces_in.h
@@ -0,0 +1,55 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Siargey Kachanovich
+ *
+ * Copyright (C) 2015 INRIA (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef WITNESS_COMPLEX_ALL_FACES_IN_H_
+#define WITNESS_COMPLEX_ALL_FACES_IN_H_
+
+/* \brief Check if the facets of the k-dimensional simplex witnessed
+ * by witness witness_id are already in the complex.
+ * inserted_vertex is the handle of the (k+1)-th vertex witnessed by witness_id
+ */
+template < typename SimplicialComplexForWitness,
+ typename Simplex >
+ bool all_faces_in(Simplex& simplex,
+ double* filtration_value,
+ SimplicialComplexForWitness& sc) {
+ typedef typename SimplicialComplexForWitness::Simplex_handle Simplex_handle;
+
+ if (simplex.size() == 1)
+ return true; /* Add vertices unconditionally */
+
+ Simplex facet;
+ for (typename Simplex::iterator not_it = simplex.begin(); not_it != simplex.end(); ++not_it) {
+ facet.clear();
+ for (typename Simplex::iterator it = simplex.begin(); it != simplex.end(); ++it)
+ if (it != not_it)
+ facet.push_back(*it);
+ Simplex_handle facet_sh = sc.find(facet);
+ if (facet_sh == sc.null_simplex())
+ return false;
+ else if (sc.filtration(facet_sh) > *filtration_value)
+ *filtration_value = sc.filtration(facet_sh);
+ }
+ return true;
+ }
+
+#endif // WITNESS_COMPLEX_ALL_FACES_IN_H_
diff --git a/src/Witness_complex/test/CMakeLists.txt b/src/Witness_complex/test/CMakeLists.txt
index bb55b0f1..e73f9c3a 100644
--- a/src/Witness_complex/test/CMakeLists.txt
+++ b/src/Witness_complex/test/CMakeLists.txt
@@ -10,21 +10,29 @@ if (GPROF_PATH)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg")
endif()
-add_executable ( simple_witness_complexUT simple_witness_complex.cpp )
-target_link_libraries(simple_witness_complexUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+add_executable ( Witness_complex_test_simple_witness_complex test_simple_witness_complex.cpp )
+target_link_libraries(Witness_complex_test_simple_witness_complex ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Witness_complex_test_simple_witness_complex ${TBB_LIBRARIES})
+endif(TBB_FOUND)
# Unitary tests definition and xml result file generation
-add_test(NAME simple_witness_complexUT
- COMMAND ${CMAKE_CURRENT_BINARY_DIR}/simple_witness_complexUT
+add_test(NAME simple_witness_complex
+ COMMAND ${CMAKE_CURRENT_BINARY_DIR}/Witness_complex_test_simple_witness_complex
# XML format for Jenkins xUnit plugin
- --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/simple_witness_complexUT.xml --log_level=test_suite --report_level=no)
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Witness_complex_test_simple_witness_complexUT.xml --log_level=test_suite --report_level=no)
-add_executable ( witness_complex_pointsUT witness_complex_points.cpp )
-target_link_libraries(witness_complex_pointsUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
-
-# Unitary tests definition and xml result file generation
-add_test(NAME witness_complex_pointsUT
- COMMAND ${CMAKE_CURRENT_BINARY_DIR}/witness_complex_pointsUT
- # XML format for Jenkins xUnit plugin
- --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/witness_complex_pointsUT.xml --log_level=test_suite --report_level=no)
+# CGAL and Eigen3 are required for Euclidean version of Witness
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ add_executable ( Witness_complex_test_euclidean_simple_witness_complex test_euclidean_simple_witness_complex.cpp )
+ target_link_libraries(Witness_complex_test_euclidean_simple_witness_complex ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_test_euclidean_simple_witness_complex ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+ # Unitary tests definition and xml result file generation
+ add_test(NAME euclidean_simple_witness_complex
+ COMMAND ${CMAKE_CURRENT_BINARY_DIR}/Witness_complex_test_euclidean_simple_witness_complex
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Witness_complex_test_euclidean_simple_witness_complexUT.xml --log_level=test_suite --report_level=no)
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/Witness_complex/test/simple_witness_complex.cpp b/src/Witness_complex/test/simple_witness_complex.cpp
deleted file mode 100644
index 03df78ee..00000000
--- a/src/Witness_complex/test/simple_witness_complex.cpp
+++ /dev/null
@@ -1,59 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Siargey Kachanovich
- *
- * Copyright (C) 2016 INRIA Sophia Antipolis-Méditerranée (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#define BOOST_TEST_DYN_LINK
-#define BOOST_TEST_MODULE "simple_witness_complex"
-#include <boost/test/unit_test.hpp>
-#include <boost/mpl/list.hpp>
-
-#include <gudhi/Simplex_tree.h>
-#include <gudhi/Witness_complex.h>
-
-#include <iostream>
-#include <ctime>
-#include <vector>
-
-typedef Gudhi::Simplex_tree<> Simplex_tree;
-typedef std::vector< Vertex_handle > typeVectorVertex;
-typedef Gudhi::witness_complex::Witness_complex<Simplex_tree> WitnessComplex;
-
-BOOST_AUTO_TEST_CASE(simple_witness_complex) {
- Simplex_tree complex;
- std::vector< typeVectorVertex > knn;
-
- knn.push_back({1, 0, 5, 2, 6, 3, 4});
- knn.push_back({2, 6, 4, 5, 0, 1, 3});
- knn.push_back({3, 4, 2, 1, 5, 6, 0});
- knn.push_back({4, 2, 1, 3, 5, 6, 0});
- knn.push_back({5, 1, 6, 0, 2, 3, 4});
- knn.push_back({6, 0, 5, 2, 1, 3, 4});
- knn.push_back({0, 5, 6, 1, 2, 3, 4});
- knn.push_back({2, 6, 4, 5, 3, 1, 0});
- knn.push_back({1, 2, 5, 4, 3, 6, 0});
- knn.push_back({3, 4, 0, 6, 5, 1, 2});
- knn.push_back({5, 0, 1, 3, 6, 2, 4});
- knn.push_back({5, 6, 1, 0, 2, 3, 4});
- knn.push_back({1, 6, 0, 5, 2, 3, 4});
- WitnessComplex witnessComplex(knn, 7, 7, complex);
-
- BOOST_CHECK(witnessComplex.is_witness_complex(knn, false));
-}
diff --git a/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp b/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
new file mode 100644
index 00000000..62fd1157
--- /dev/null
+++ b/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
@@ -0,0 +1,135 @@
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "euclidean_simple_witness_complex"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+#include <CGAL/Epick_d.h>
+
+#include <gudhi/Simplex_tree.h>
+
+#include <gudhi/Witness_complex.h>
+#include <gudhi/Euclidean_witness_complex.h>
+#include <gudhi/Strong_witness_complex.h>
+#include <gudhi/Euclidean_strong_witness_complex.h>
+
+#include <gudhi/Kd_tree_search.h>
+
+#include <iostream>
+#include <ctime>
+#include <vector>
+
+typedef Gudhi::Simplex_tree<> Simplex_tree;
+typedef typename Simplex_tree::Vertex_handle Vertex_handle;
+typedef std::vector< Vertex_handle > typeVectorVertex;
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+typedef typename Kernel::FT FT;
+typedef typename Kernel::Point_d Point_d;
+typedef Gudhi::witness_complex::Euclidean_witness_complex<Kernel> EuclideanWitnessComplex;
+typedef Gudhi::witness_complex::Euclidean_strong_witness_complex<Kernel> EuclideanStrongWitnessComplex;
+
+typedef std::vector<Point_d> Point_range;
+typedef Gudhi::spatial_searching::Kd_tree_search<Kernel, Point_range> Kd_tree;
+typedef Kd_tree::INS_range Nearest_landmark_range;
+typedef std::vector<Nearest_landmark_range> Nearest_landmark_table;
+typedef Gudhi::witness_complex::Witness_complex<Nearest_landmark_table> WitnessComplex;
+typedef Gudhi::witness_complex::Strong_witness_complex<Nearest_landmark_table> StrongWitnessComplex;
+
+
+/* All landmarks and witnesses are taken on the grid in the following manner.
+ LWLWL
+ WW.WW
+ L...L
+ WW.WW
+ LWLWL
+
+ Witness complex consists of 8 vertices, 12 edges and 4 triangles
+ */
+
+BOOST_AUTO_TEST_CASE(simple_witness_complex) {
+ Simplex_tree complex, relaxed_complex, strong_relaxed_complex, strong_relaxed_complex2;
+ Simplex_tree complex_ne, relaxed_complex_ne, strong_relaxed_complex_ne, strong_relaxed_complex2_ne;
+
+ Point_range witnesses, landmarks;
+
+ landmarks.push_back(Point_d(std::vector<FT>{-2,-2}));
+ landmarks.push_back(Point_d(std::vector<FT>{-2, 0}));
+ landmarks.push_back(Point_d(std::vector<FT>{-2, 2}));
+ landmarks.push_back(Point_d(std::vector<FT>{ 0,-2}));
+ landmarks.push_back(Point_d(std::vector<FT>{ 0, 2}));
+ landmarks.push_back(Point_d(std::vector<FT>{ 2,-2}));
+ landmarks.push_back(Point_d(std::vector<FT>{ 2, 0}));
+ landmarks.push_back(Point_d(std::vector<FT>{ 2, 2}));
+ witnesses.push_back(Point_d(std::vector<FT>{-2,-1}));
+ witnesses.push_back(Point_d(std::vector<FT>{-2, 1}));
+ witnesses.push_back(Point_d(std::vector<FT>{-1,-2}));
+ witnesses.push_back(Point_d(std::vector<FT>{-1,-1}));
+ witnesses.push_back(Point_d(std::vector<FT>{-1, 1}));
+ witnesses.push_back(Point_d(std::vector<FT>{-1, 2}));
+ witnesses.push_back(Point_d(std::vector<FT>{ 1,-2}));
+ witnesses.push_back(Point_d(std::vector<FT>{ 1,-1}));
+ witnesses.push_back(Point_d(std::vector<FT>{ 1, 1}));
+ witnesses.push_back(Point_d(std::vector<FT>{ 1, 2}));
+ witnesses.push_back(Point_d(std::vector<FT>{ 2,-1}));
+ witnesses.push_back(Point_d(std::vector<FT>{ 2, 1}));
+
+ Kd_tree landmark_tree(landmarks);
+ Nearest_landmark_table nearest_landmark_table;
+ for (auto w: witnesses)
+ nearest_landmark_table.push_back(landmark_tree.query_incremental_nearest_neighbors(w));
+
+ // Weak witness complex: Euclidean version
+ EuclideanWitnessComplex eucl_witness_complex(landmarks,
+ witnesses);
+ eucl_witness_complex.create_complex(complex, 0);
+
+ std::cout << "complex.num_simplices() = " << complex.num_simplices() << std::endl;
+ BOOST_CHECK(complex.num_simplices() == 24);
+
+ eucl_witness_complex.create_complex(relaxed_complex, 8.01);
+
+ std::cout << "relaxed_complex.num_simplices() = " << relaxed_complex.num_simplices() << std::endl;
+ BOOST_CHECK(relaxed_complex.num_simplices() == 239);
+ // The corner simplex {0,2,5,7} and its cofaces are missing.
+
+ // Weak witness complex: non-Euclidean version
+ WitnessComplex witness_complex(nearest_landmark_table);
+ witness_complex.create_complex(complex_ne, 0);
+
+ std::cout << "complex.num_simplices() = " << complex_ne.num_simplices() << std::endl;
+ BOOST_CHECK(complex_ne.num_simplices() == 24);
+
+ witness_complex.create_complex(relaxed_complex_ne, 8.01);
+
+ std::cout << "relaxed_complex.num_simplices() = " << relaxed_complex_ne.num_simplices() << std::endl;
+ BOOST_CHECK(relaxed_complex_ne.num_simplices() == 239);
+
+
+ // Strong complex : Euclidean version
+ EuclideanStrongWitnessComplex eucl_strong_witness_complex(landmarks,
+ witnesses);
+
+ eucl_strong_witness_complex.create_complex(strong_relaxed_complex, 9.1);
+ eucl_strong_witness_complex.create_complex(strong_relaxed_complex2, 9.1, 2);
+
+ std::cout << "strong_relaxed_complex.num_simplices() = " << strong_relaxed_complex.num_simplices() << std::endl;
+ BOOST_CHECK(strong_relaxed_complex.num_simplices() == 239);
+
+ std::cout << "strong_relaxed_complex2.num_simplices() = " << strong_relaxed_complex2.num_simplices() << std::endl;
+ BOOST_CHECK(strong_relaxed_complex2.num_simplices() == 92);
+
+
+ // Strong complex : non-Euclidean version
+ StrongWitnessComplex strong_witness_complex(nearest_landmark_table);
+
+ strong_witness_complex.create_complex(strong_relaxed_complex_ne, 9.1);
+ strong_witness_complex.create_complex(strong_relaxed_complex2_ne, 9.1, 2);
+
+ std::cout << "strong_relaxed_complex.num_simplices() = " << strong_relaxed_complex_ne.num_simplices() << std::endl;
+ BOOST_CHECK(strong_relaxed_complex_ne.num_simplices() == 239);
+
+ std::cout << "strong_relaxed_complex2.num_simplices() = " << strong_relaxed_complex2_ne.num_simplices() << std::endl;
+ BOOST_CHECK(strong_relaxed_complex2_ne.num_simplices() == 92);
+
+
+ // 8 vertices, 28 edges, 56 triangles
+}
diff --git a/src/Witness_complex/test/test_simple_witness_complex.cpp b/src/Witness_complex/test/test_simple_witness_complex.cpp
new file mode 100644
index 00000000..9e3509d3
--- /dev/null
+++ b/src/Witness_complex/test/test_simple_witness_complex.cpp
@@ -0,0 +1,53 @@
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "simple_witness_complex"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+#include <gudhi/Simplex_tree.h>
+
+#include <gudhi/Witness_complex.h>
+
+#include <iostream>
+#include <vector>
+#include <utility>
+
+
+BOOST_AUTO_TEST_CASE(simple_witness_complex) {
+ using Nearest_landmark_range = std::vector<std::pair<std::size_t, double>>;
+ using Nearest_landmark_table = std::vector<Nearest_landmark_range>;
+ using Witness_complex = Gudhi::witness_complex::Witness_complex<Nearest_landmark_table>;
+ using Simplex_tree = Gudhi::Simplex_tree<>;
+
+ Simplex_tree stree;
+ Nearest_landmark_table nlt;
+
+ // Example contains 5 witnesses and 5 landmarks
+ Nearest_landmark_range w0 = {std::make_pair(0, 0), std::make_pair(1, 1), std::make_pair(2, 2),
+ std::make_pair(3, 3), std::make_pair(4, 4)}; nlt.push_back(w0);
+ Nearest_landmark_range w1 = {std::make_pair(1, 0), std::make_pair(2, 1), std::make_pair(3, 2),
+ std::make_pair(4, 3), std::make_pair(0, 4)}; nlt.push_back(w1);
+ Nearest_landmark_range w2 = {std::make_pair(2, 0), std::make_pair(3, 1), std::make_pair(4, 2),
+ std::make_pair(0, 3), std::make_pair(1, 4)}; nlt.push_back(w2);
+ Nearest_landmark_range w3 = {std::make_pair(3, 0), std::make_pair(4, 1), std::make_pair(0, 2),
+ std::make_pair(1, 3), std::make_pair(2, 4)}; nlt.push_back(w3);
+ Nearest_landmark_range w4 = {std::make_pair(4, 0), std::make_pair(0, 1), std::make_pair(1, 2),
+ std::make_pair(2, 3), std::make_pair(3, 4)}; nlt.push_back(w4);
+
+ Witness_complex witness_complex(nlt);
+ BOOST_CHECK(witness_complex.create_complex(stree, 4.1));
+
+ std::cout << "Number of simplices: " << stree.num_simplices() << std::endl;
+ BOOST_CHECK(stree.num_simplices() == 31);
+
+ // Check when complex not empty
+ BOOST_CHECK(!witness_complex.create_complex(stree, 4.1));
+
+ // Check when max_alpha_square negative
+ Simplex_tree stree2;
+ BOOST_CHECK(!witness_complex.create_complex(stree2, -0.02));
+
+ witness_complex.create_complex(stree2, 4.1, 2);
+ std::cout << "Number of simplices: " << stree2.num_simplices() << std::endl;
+ BOOST_CHECK(stree2.num_simplices() == 25);
+
+}
diff --git a/src/Witness_complex/test/witness_complex_points.cpp b/src/Witness_complex/test/witness_complex_points.cpp
deleted file mode 100644
index bd3df604..00000000
--- a/src/Witness_complex/test/witness_complex_points.cpp
+++ /dev/null
@@ -1,64 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Siargey Kachanovich
- *
- * Copyright (C) 2016 INRIA Sophia Antipolis-Méditerranée (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#define BOOST_TEST_DYN_LINK
-#define BOOST_TEST_MODULE "witness_complex_points"
-#include <boost/test/unit_test.hpp>
-#include <boost/mpl/list.hpp>
-
-#include <gudhi/Simplex_tree.h>
-#include <gudhi/Witness_complex.h>
-#include <gudhi/Landmark_choice_by_random_point.h>
-#include <gudhi/Landmark_choice_by_furthest_point.h>
-
-#include <iostream>
-#include <vector>
-
-typedef std::vector<double> Point;
-typedef std::vector< Vertex_handle > typeVectorVertex;
-typedef Gudhi::Simplex_tree<> Simplex_tree;
-typedef Gudhi::witness_complex::Witness_complex<Simplex_tree> WitnessComplex;
-
-BOOST_AUTO_TEST_CASE(witness_complex_points) {
- std::vector< typeVectorVertex > knn;
- std::vector< Point > points;
- // Add grid points as witnesses
- for (double i = 0; i < 10; i += 1.0)
- for (double j = 0; j < 10; j += 1.0)
- for (double k = 0; k < 10; k += 1.0)
- points.push_back(Point({i, j, k}));
-
- bool b_print_output = false;
- // First test: random choice
- Simplex_tree complex1;
- Gudhi::witness_complex::landmark_choice_by_random_point(points, 100, knn);
- assert(!knn.empty());
- WitnessComplex witnessComplex1(knn, 100, 3, complex1);
- BOOST_CHECK(witnessComplex1.is_witness_complex(knn, b_print_output));
-
- // Second test: furthest choice
- knn.clear();
- Simplex_tree complex2;
- Gudhi::witness_complex::landmark_choice_by_furthest_point(points, 100, knn);
- WitnessComplex witnessComplex2(knn, 100, 3, complex2);
- BOOST_CHECK(witnessComplex2.is_witness_complex(knn, b_print_output));
-}
diff --git a/src/cmake/modules/FindGMPXX.cmake b/src/cmake/modules/FindGMPXX.cmake
index 277e4b19..dda302c0 100644
--- a/src/cmake/modules/FindGMPXX.cmake
+++ b/src/cmake/modules/FindGMPXX.cmake
@@ -33,6 +33,10 @@ if(GMP_FOUND)
DOC "Path to the GMPXX library"
)
+ if ( GMPXX_LIBRARIES )
+ get_filename_component(GMPXX_LIBRARIES_DIR ${GMPXX_LIBRARIES} PATH CACHE )
+ endif()
+
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GMPXX "DEFAULT_MSG" GMPXX_LIBRARIES GMPXX_INCLUDE_DIR )
diff --git a/src/cmake/modules/FindQGLViewer.cmake b/src/cmake/modules/FindQGLViewer.cmake
index 1f3dbc1f..56c1b3cf 100644
--- a/src/cmake/modules/FindQGLViewer.cmake
+++ b/src/cmake/modules/FindQGLViewer.cmake
@@ -15,7 +15,7 @@ find_path(QGLVIEWER_INCLUDE_DIR
)
find_library(QGLVIEWER_LIBRARY_RELEASE
- NAMES qglviewer-qt4 QGLViewer-qt4 qglviewer QGLViewer QGLViewer2
+ NAMES qglviewer-qt5 QGLViewer-qt5 qglviewer QGLViewer QGLViewer2
PATHS /usr/lib
/usr/local/lib
ENV QGLVIEWERROOT
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.txt b/src/cmake/modules/GUDHI_third_party_libraries.txt
new file mode 100644
index 00000000..1974f70f
--- /dev/null
+++ b/src/cmake/modules/GUDHI_third_party_libraries.txt
@@ -0,0 +1,107 @@
+# This files manage third party libraries required by GUDHI
+
+find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer date_time program_options thread)
+
+if(NOT Boost_FOUND)
+ message(FATAL_ERROR "NOTICE: This program requires Boost and will not be compiled.")
+endif(NOT Boost_FOUND)
+
+find_package(GMP)
+if(GMP_FOUND)
+ message(STATUS "GMP_LIBRARIES = ${GMP_LIBRARIES}")
+ INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR})
+ find_package(GMPXX)
+ if(GMPXX_FOUND)
+ message(STATUS "GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
+ INCLUDE_DIRECTORIES(${GMPXX_INCLUDE_DIR})
+ endif()
+endif()
+
+# In CMakeLists.txt, when include(${CGAL_USE_FILE}), CMAKE_CXX_FLAGS are overwritten.
+# cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+# A workaround is to include(${CGAL_USE_FILE}) before adding "-std=c++11".
+# A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+# or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+# but it implies to use cmake version 3.1 at least.
+find_package(CGAL)
+
+# Only CGAL versions > 4.4 supports what Gudhi uses from CGAL
+if (CGAL_VERSION VERSION_LESS 4.4.0)
+ message("CGAL version ${CGAL_VERSION} is considered too old to be used by Gudhi.")
+ unset(CGAL_FOUND)
+endif()
+if(CGAL_FOUND)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+ include( ${CGAL_USE_FILE} )
+
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.0)
+ # HACK to detect CGAL version 4.8.0
+ # CGAL version 4.8, 4.8.1 and 4.8.2 are identified as version 4.8.1000)
+ # cf. https://github.com/CGAL/cgal/issues/1559
+ # Limit the HACK between CGAL versions 4.8 and 4.9 because of file read
+ if (NOT CGAL_VERSION VERSION_GREATER 4.9.0)
+ foreach(CGAL_INCLUDE_DIR ${CGAL_INCLUDE_DIRS})
+ if (EXISTS "${CGAL_INCLUDE_DIR}/CGAL/version.h")
+ FILE(READ "${CGAL_INCLUDE_DIR}/CGAL/version.h" contents)
+ STRING(REGEX REPLACE "\n" ";" contents "${contents}")
+ foreach(Line ${contents})
+ if("${Line}" STREQUAL "#define CGAL_VERSION 4.8")
+ set(CGAL_VERSION 4.8.0)
+ message (">>>>> HACK CGAL version to ${CGAL_VERSION}")
+ endif("${Line}" STREQUAL "#define CGAL_VERSION 4.8")
+ endforeach(Line ${contents})
+ endif (EXISTS "${CGAL_INCLUDE_DIR}/CGAL/version.h")
+ endforeach(CGAL_INCLUDE_DIR ${CGAL_INCLUDE_DIRS})
+ endif(NOT CGAL_VERSION VERSION_GREATER 4.9.0)
+
+ # For dev version
+ include_directories(BEFORE "src/common/include/gudhi_patches")
+ # For user version
+ include_directories(BEFORE "include/gudhi_patches")
+ endif()
+endif()
+
+# Find TBB package for parallel sort - not mandatory, just optional.
+set(TBB_FIND_QUIETLY ON)
+find_package(TBB)
+if (TBB_FOUND)
+ include(${TBB_USE_FILE})
+ message("TBB found in ${TBB_LIBRARY_DIRS}")
+ add_definitions(-DGUDHI_USE_TBB)
+endif()
+
+set(CGAL_WITH_EIGEN3_VERSION 0.0.0)
+find_package(Eigen3 3.1.0)
+if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+ set(CGAL_WITH_EIGEN3_VERSION ${CGAL_VERSION})
+endif (EIGEN3_FOUND)
+
+# Required programs for unitary tests purpose
+FIND_PROGRAM( GCOVR_PATH gcovr )
+if (GCOVR_PATH)
+ message("gcovr found in ${GCOVR_PATH}")
+endif()
+# Required programs for unitary tests purpose
+FIND_PROGRAM( GPROF_PATH gprof )
+if (GPROF_PATH)
+ message("gprof found in ${GPROF_PATH}")
+endif()
+FIND_PROGRAM( DIFF_PATH diff )
+if (DIFF_PATH)
+ message("diff found in ${DIFF_PATH}")
+endif()
+
+# BOOST ISSUE result_of vs C++11
+add_definitions(-DBOOST_RESULT_OF_USE_DECLTYPE)
+# BOOST ISSUE with Libraries name resolution under Windows
+add_definitions(-DBOOST_ALL_NO_LIB)
+# problem with Visual Studio link on Boost program_options
+add_definitions( -DBOOST_ALL_DYN_LINK )
+
+INCLUDE_DIRECTORIES(${Boost_INCLUDE_DIRS})
+LINK_DIRECTORIES(${Boost_LIBRARY_DIRS})
+
+message(STATUS "boost include dirs:" ${Boost_INCLUDE_DIRS})
+message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
diff --git a/src/cmake/modules/GUDHI_user_version_target.txt b/src/cmake/modules/GUDHI_user_version_target.txt
index 0d0b8767..b6a60dd1 100644
--- a/src/cmake/modules/GUDHI_user_version_target.txt
+++ b/src/cmake/modules/GUDHI_user_version_target.txt
@@ -1,8 +1,6 @@
# Some functionnalities requires CMake 2.8.11 minimum
if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
- string(TIMESTAMP DATE_AND_TIME "%Y-%m-%d-%H-%M-%S")
-
# Definition of the custom target user_version
add_custom_target(user_version)
@@ -11,6 +9,7 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${USER_VERSION_DIR})
else()
# set the GUDHI_USER_VERSION_DIR with timestamp and Gudhi version number
+ string(TIMESTAMP DATE_AND_TIME "%Y-%m-%d-%H-%M-%S")
set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${DATE_AND_TIME}_GUDHI_${GUDHI_VERSION})
endif()
@@ -21,6 +20,8 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
COMMENT "user_version creation in ${GUDHI_USER_VERSION_DIR}")
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/Conventions.txt ${GUDHI_USER_VERSION_DIR}/Conventions.txt)
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy ${CMAKE_SOURCE_DIR}/README ${GUDHI_USER_VERSION_DIR}/README)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy ${CMAKE_SOURCE_DIR}/COPYING ${GUDHI_USER_VERSION_DIR}/COPYING)
@@ -40,76 +41,53 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/cython ${GUDHI_USER_VERSION_DIR}/cython)
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/src/cmake ${GUDHI_USER_VERSION_DIR}/cmake)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/debian ${GUDHI_USER_VERSION_DIR}/debian)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
- set(GUDHI_MODULES "common;Alpha_complex;Bitmap_cubical_complex;Contraction;Hasse_complex;Persistent_cohomology;Simplex_tree;Skeleton_blocker;Witness_complex;Gudhi_stat")
-
+ set(GUDHI_MODULES "common;Alpha_complex;Bitmap_cubical_complex;Bottleneck_distance;Contraction;Gudhi_stat;Hasse_complex;Persistent_cohomology;Rips_complex;Simplex_tree;Skeleton_blocker;Spatial_searching;Subsampling;Tangential_complex;Witness_complex")
+ set(GUDHI_DIRECTORIES "doc;example;concept")
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
+
foreach(GUDHI_MODULE ${GUDHI_MODULES})
- # doc files
- file(GLOB GUDHI_DOC_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/doc/*)
-
- foreach(GUDHI_DOC_FILE ${GUDHI_DOC_FILES})
- get_filename_component(GUDHI_DOC_FILE_NAME ${GUDHI_DOC_FILE} NAME)
- # GUDHI_DOC_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_DOC_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_DOC_FILE} ${GUDHI_USER_VERSION_DIR}/doc/${GUDHI_MODULE}/${GUDHI_DOC_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_DOC_FILE} ${GUDHI_USER_VERSION_DIR}/doc/${GUDHI_MODULE}/${GUDHI_DOC_FILE_NAME})
- endif()
- endforeach()
-
- # example files
- file(GLOB GUDHI_EXAMPLE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/example/*)
-
- foreach(GUDHI_EXAMPLE_FILE ${GUDHI_EXAMPLE_FILES})
- get_filename_component(GUDHI_EXAMPLE_FILE_NAME ${GUDHI_EXAMPLE_FILE} NAME)
- # GUDHI_EXAMPLE_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_EXAMPLE_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_EXAMPLE_FILE} ${GUDHI_USER_VERSION_DIR}/example/${GUDHI_MODULE}/${GUDHI_EXAMPLE_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_EXAMPLE_FILE} ${GUDHI_USER_VERSION_DIR}/example/${GUDHI_MODULE}/${GUDHI_EXAMPLE_FILE_NAME})
- endif()
- endforeach()
-
- # include files
- file(GLOB GUDHI_INCLUDE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/include/gudhi/*)
-
- foreach(GUDHI_INCLUDE_FILE ${GUDHI_INCLUDE_FILES})
- get_filename_component(GUDHI_INCLUDE_FILE_NAME ${GUDHI_INCLUDE_FILE} NAME)
- # GUDHI_INCLUDE_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_INCLUDE_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/include/gudhi/${GUDHI_INCLUDE_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/include/gudhi/${GUDHI_INCLUDE_FILE_NAME})
- endif()
- endforeach()
-
- # concept files
- file(GLOB GUDHI_CONCEPT_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/concept/*.h)
-
- foreach(GUDHI_CONCEPT_FILE ${GUDHI_CONCEPT_FILES})
- get_filename_component(GUDHI_CONCEPT_FILE_NAME ${GUDHI_CONCEPT_FILE} NAME)
- # GUDHI_CONCEPT_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_CONCEPT_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_CONCEPT_FILE} ${GUDHI_USER_VERSION_DIR}/concept/${GUDHI_MODULE}/${GUDHI_CONCEPT_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_CONCEPT_FILE} ${GUDHI_USER_VERSION_DIR}/concept/${GUDHI_MODULE}/${GUDHI_CONCEPT_FILE_NAME})
- endif()
- endforeach()
- endforeach()
+ foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+ # Find files
+ file(GLOB GUDHI_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_DIRECTORY}/*)
+
+ foreach(GUDHI_FILE ${GUDHI_FILES})
+ get_filename_component(GUDHI_FILE_NAME ${GUDHI_FILE} NAME)
+ # GUDHI_FILE can be a file or a directory
+ if(IS_DIRECTORY ${GUDHI_FILE})
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
+ else()
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
+ endif()
+ endforeach()
+ endforeach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+
+ foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+ # include files
+ file(GLOB GUDHI_INCLUDE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_INCLUDE_DIRECTORY}/*)
+
+ foreach(GUDHI_INCLUDE_FILE ${GUDHI_INCLUDE_FILES})
+ get_filename_component(GUDHI_INCLUDE_FILE_NAME ${GUDHI_INCLUDE_FILE} NAME)
+ # GUDHI_INCLUDE_FILE can be a file or a directory
+ if(IS_DIRECTORY ${GUDHI_INCLUDE_FILE})
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
+ else()
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
+ endif()
+ endforeach()
+ endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+
+ endforeach(GUDHI_MODULE ${GUDHI_MODULES})
endif()
diff --git a/src/common/doc/header.html b/src/common/doc/header.html
index a6f3ed9c..94e641b2 100644
--- a/src/common/doc/header.html
+++ b/src/common/doc/header.html
@@ -9,7 +9,7 @@
<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
<!-- GUDHI website css for header BEGIN -->
-<link rel="stylesheet" type="text/css" href="http://gudhi.gforge.inria.fr/assets/css/styles_feeling_responsive.css" />
+<link rel="stylesheet" type="text/css" href="http://pages.saclay.inria.fr/vincent.rouvreau/gudhi/gudhi-doc-2.0.0/assets/css/styles_feeling_responsive.css" />
<!-- GUDHI website css for header END -->
<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="$relpath^jquery.js"></script>
@@ -25,6 +25,13 @@ $extrastylesheet
<!-- GUDHI website header BEGIN -->
<div id="navigation" class="sticky">
<nav class="top-bar" role="navigation" data-topbar>
+ <ul class="title-area">
+ <li class="name">
+ <h1 class="show-for-small-only"><a href="http://gudhi.gforge.inria.fr" class="icon-tree"> GUDHI C++ library</a></h1>
+ </li>
+ <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
+ <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
+ </ul>
<section class="top-bar-section">
<ul class="right">
<li class="divider"></li>
@@ -37,7 +44,7 @@ $extrastylesheet
<a href="#">Project</a>
<ul class="dropdown">
<li><a href="http://gudhi.gforge.inria.fr/people/">People</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/getinvolved/">Get involved</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/keepintouch/">Keep in touch</a></li>
<li><a href="http://gudhi.gforge.inria.fr/partners/">Partners and Funding</a></li>
<li><a href="http://gudhi.gforge.inria.fr/relatedprojects/">Related projects</a></li>
<li><a href="http://gudhi.gforge.inria.fr/theyaretalkingaboutus/">They are talking about us</a></li>
@@ -49,11 +56,18 @@ $extrastylesheet
<ul class="dropdown">
<li><a href="http://gudhi.gforge.inria.fr/licensing/">Licensing</a></li>
<li><a href="https://gforge.inria.fr/frs/?group_id=3865" target="_blank">Get the sources</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">Installation manual</a></li>
</ul>
</li>
<li class="divider"></li>
- <li><a href="http://gudhi.gforge.inria.fr/doc/latest/">Documentation</a></li>
+ <li class="has-dropdown">
+ <a href="#">Documentation</a>
+ <ul class="dropdown">
+ <li><a href="http://gudhi.gforge.inria.fr/doc/latest/">C++ documentation</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/cython/latest/">Cython documentation</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/cython/latest/installation.html">Cython installation manual</a></li>
+ </ul>
+ </li>
<li class="divider"></li>
<li><a href="http://gudhi.gforge.inria.fr/interfaces/">Interfaces</a></li>
<li class="divider"></li>
@@ -62,7 +76,7 @@ $extrastylesheet
</nav>
</div><!-- /#navigation -->
<!-- GUDHI website header BEGIN -->
-
+
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<!--BEGIN TITLEAREA-->
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index 21cf6925..e6f29fa8 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -3,7 +3,9 @@
* \image html "Gudhi_banner.png" "" width=20cm
*
* \section Introduction Introduction
- * The Gudhi library (Geometry Understanding in Higher Dimensions) is a generic open source C++ library for
+ * The GUDHI library (Geometry Understanding in Higher Dimensions) is a generic open source
+ * <a class="el" target="_blank" href="http://gudhi.gforge.inria.fr/doc/latest/">C++ library</a>, with a
+ * <a class="el" target="_blank" href="http://gudhi.gforge.inria.fr/cython/latest/">Cython interface</a>, for
* Computational Topology and Topological Data Analysis
* (<a class="el" target="_blank" href="https://en.wikipedia.org/wiki/Topological_data_analysis">TDA</a>).
* The GUDHI library intends to help the development of new algorithmic solutions in TDA and their transfer to
@@ -28,6 +30,7 @@
<b>Author:</b> Vincent Rouvreau<br>
<b>Introduced in:</b> GUDHI 1.3.0<br>
<b>Copyright:</b> GPL v3<br>
+ <b>Requires:</b> \ref cgal &ge; 4.7.0 and \ref eigen3
</td>
<td width="75%">
Alpha_complex is a simplicial complex constructed from the finite cells of a Delaunay Triangulation.<br>
@@ -55,6 +58,24 @@
<b>User manual:</b> \ref cubical_complex - <b>Reference manual:</b> Gudhi::cubical_complex::Bitmap_cubical_complex
</td>
</tr>
+ \subsection RipsComplexDataStructure Rips complex
+ \image html "rips_complex_representation.png" "Rips complex representation"
+<table border="0">
+ <tr>
+ <td width="25%">
+ <b>Author:</b> Cl&eacute;ment Maria, Pawel Dlotko, Vincent Rouvreau<br>
+ <b>Introduced in:</b> GUDHI 2.0.0<br>
+ <b>Copyright:</b> GPL v3<br>
+ </td>
+ <td width="75%">
+ Rips_complex is a simplicial complex constructed from a one skeleton graph.<br>
+ The filtration value of each edge is computed from a user-given distance function and is inserted until a
+ user-given threshold value.<br>
+ This complex can be built from a point cloud and a distance function, or from a distance matrix.<br>
+ <b>User manual:</b> \ref rips_complex - <b>Reference manual:</b> Gudhi::rips_complex::Rips_complex
+ </td>
+ </tr>
+</table>
</table>
\subsection SimplexTreeDataStructure Simplex tree
\image html "Simplex_tree_representation.png" "Simplex tree representation"
@@ -93,6 +114,26 @@
</td>
</tr>
</table>
+ \subsection TangentialComplexDataStructure Tangential complex
+ \image html "tc_examples.png" "Tangential complex representation"
+<table border="0">
+ <tr>
+ <td width="25%">
+ <b>Author:</b> Cl&eacute;ment Jamin<br>
+ <b>Introduced in:</b> GUDHI 2.0.0<br>
+ <b>Copyright:</b> GPL v3<br>
+ <b>Requires:</b> \ref cgal &ge; 4.8.1 and \ref eigen3
+ </td>
+ <td width="75%">
+ A Tangential Delaunay complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
+ designed to reconstruct a \f$ k \f$-dimensional manifold embedded in \f$ d \f$-dimensional Euclidean space.
+ The input is a point sample coming from an unknown manifold.
+ The running time depends only linearly on the extrinsic dimension \f$ d \f$
+ and exponentially on the intrinsic dimension \f$ k \f$.<br>
+ <b>User manual:</b> \ref tangential_complex - <b>Reference manual:</b> Gudhi::tangential_complex::Tangential_complex
+ </td>
+ </tr>
+</table>
\subsection WitnessComplexDataStructure Witness complex
\image html "Witness_complex_representation.png" "Witness complex representation"
<table border="0">
@@ -101,6 +142,7 @@
<b>Author:</b> Siargey Kachanovich<br>
<b>Introduced in:</b> GUDHI 1.3.0<br>
<b>Copyright:</b> GPL v3<br>
+ <b>Euclidean version requires:</b> \ref cgal &ge; 4.6.0 and \ref eigen3
</td>
<td width="75%">
Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$.
@@ -111,6 +153,26 @@
</table>
\section Toolbox Toolbox
+ \subsection BottleneckDistanceToolbox Bottleneck distance
+ \image html "perturb_pd.png" "Bottleneck distance is the length of the longest edge"
+<table border="0">
+ <tr>
+ <td width="25%">
+ <b>Author:</b> Fran&ccedil;ois Godi<br>
+ <b>Introduced in:</b> GUDHI 2.0.0<br>
+ <b>Copyright:</b> GPL v3<br>
+ <b>Requires:</b> \ref cgal &ge; 4.8.1 and \ref eigen3
+ </td>
+ <td width="75%">
+ Bottleneck distance measures the similarity between two persistence diagrams.
+ It's the shortest distance b for which there exists a perfect matching between
+ the points of the two diagrams (+ all the diagonal points) such that
+ any couple of matched points are at distance at most b.
+ <br>
+ <b>User manual:</b> \ref bottleneck_distance
+ </td>
+ </tr>
+</table>
\subsection ContractionToolbox Contraction
\image html "sphere_contraction_representation.png" "Sphere contraction example"
<table border="0">
@@ -153,11 +215,11 @@
</table>
*/
-/*! \page installation Gudhi installation
+/*! \page installation GUDHI installation
* \tableofcontents
- * As Gudhi is a header only library, there is no need to install the library.
+ * As GUDHI is a header only library, there is no need to install the library.
*
- * Examples of Gudhi headers inclusion can be found in \ref demos.
+ * Examples of GUDHI headers inclusion can be found in \ref demos.
*
* \section compiling Compiling
* The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> with version 1.48.0 or
@@ -176,39 +238,54 @@ make \endverbatim
* To test your build, run the following command in a terminal:
* \verbatim make test \endverbatim
*
+ * \subsection documentationgeneration Documentation
+ * To generate the documentation, <a target="_blank" href="http://www.doxygen.org/">Doxygen</a> is required.
+ * Run the following command in a terminal:
+\verbatim
+make doxygen
+# Documentation will be generated in the folder YYYY-MM-DD-hh-mm-ss_GUDHI_X.Y.Z/doc/html/
+# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder`
+\endverbatim
+ *
* \section optionallibrary Optional third-party library
- * \subsection gmp GMP:
+ * \subsection gmp GMP
* The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
* arithmetic, operating on signed integers, rational numbers, and floating point numbers.
*
* The following example requires the <a target="_blank" href="http://gmplib.org/">GNU Multiple Precision Arithmetic
* Library</a> (GMP) and will not be built if GMP is not installed:
- * \li <a href="_persistent_cohomology_2performance_rips_persistence_8cpp-example.html">
- * Persistent_cohomology/performance_rips_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
* Persistent_cohomology/rips_multifield_persistence.cpp</a>
*
* Having GMP version 4.2 or higher installed is recommended.
*
- * \subsection cgal CGAL:
- * The \ref alpha_complex data structure and few examples requires CGAL, which is a C++ library which provides easy
- * access to efficient and reliable geometric algorithms.
+ * \subsection cgal CGAL
+ * The \ref alpha_complex data structure, \ref bottleneck_distance, and few examples requires CGAL, which is a C++
+ * library which provides easy access to efficient and reliable geometric algorithms.
+ *
+ * \note There is no need to install CGAL, you can just <CODE>cmake . && make</CODE> CGAL (or even
+ * <CODE>cmake -DCGAL_HEADER_ONLY=ON .</CODE> for CGAL version &ge; 4.8.0), thereafter you will be able to compile
+ * GUDHI by calling <CODE>cmake -DCGAL_DIR=/your/path/to/CGAL-X.Y .. && make</CODE>
*
- * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to
+ * Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to
* your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
*
* The following examples require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
* Library</a> (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed:
* \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_simplex_tree_2simplex_tree_from_alpha_shapes_3_8cpp-example.html">
- * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp</a>
+ * \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
+ * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
*
- * The following example requires CGAL version &ge; 4.6:
+ * The following example requires CGAL version &ge; 4.6.0:
* \li <a href="_witness_complex_2witness_complex_sphere_8cpp-example.html">
* Witness_complex/witness_complex_sphere.cpp</a>
*
- * The following example requires CGAL version &ge; 4.7:
+ * The following example requires CGAL version &ge; 4.7.0:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
@@ -220,7 +297,29 @@ make \endverbatim
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
*
- * \subsection eigen3 Eigen3:
+ * The following example requires CGAL version &ge; 4.8.1:
+ * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
+ * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
+ * \li <a href="_bottleneck_distance_2bottleneck_basic_example_8cpp-example.html">
+ * Bottleneck_distance/bottleneck_basic_example.cpp</a>
+ * \li <a href="_bottleneck_distance_2bottleneck_read_file_example_8cpp-example.html">
+ * Bottleneck_distance/bottleneck_read_file_example.cpp</a>
+ * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
+ * Spatial_searching/example_spatial_searching.cpp</a>
+ * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
+ * Subsampling/example_choose_n_farthest_points.cpp</a>
+ * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
+ * Subsampling/example_custom_kernel.cpp</a>
+ * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
+ * Subsampling/example_pick_n_random_points.cpp</a>
+ * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
+ * Subsampling/example_sparsify_point_set.cpp</a>
+ * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
+ * Tangential_complex/example_basic.cpp</a>
+ * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
+ * Tangential_complex/example_with_perturb.cpp</a>
+ *
+ * \subsection eigen3 Eigen3
* The \ref alpha_complex data structure and few examples requires
* <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> is a C++ template library for linear algebra:
* matrices, vectors, numerical solvers, and related algorithms.
@@ -228,9 +327,9 @@ make \endverbatim
* The following example requires the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
* built if Eigen3 is not installed:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_off.cpp</a> (requires also Eigen3)
+ * Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_points.cpp</a> (requires also Eigen3)
+ * Alpha_complex/Alpha_complex_from_points.cpp</a>
* \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
* Persistent_cohomology/alpha_complex_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
@@ -238,7 +337,7 @@ make \endverbatim
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
*
- * \subsection tbb Threading Building Blocks:
+ * \subsection tbb Threading Building Blocks
* <a target="_blank" href="https://www.threadingbuildingblocks.org/">Intel&reg; TBB</a> lets you easily write parallel
* C++ programs that take full advantage of multicore performance, that are portable and composable, and that have
* future-proof scalability.
@@ -262,8 +361,8 @@ make \endverbatim
* Persistent_cohomology/alpha_complex_persistence.cpp</a>
* \li <a href="_simplex_tree_2simple_simplex_tree_8cpp-example.html">
* Simplex_tree/simple_simplex_tree.cpp</a>
- * \li <a href="_simplex_tree_2simplex_tree_from_alpha_shapes_3_8cpp-example.html">
- * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp</a>
+ * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
+ * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
* \li <a href="_simplex_tree_2simplex_tree_from_cliques_of_graph_8cpp-example.html">
* Simplex_tree/simplex_tree_from_cliques_of_graph.cpp</a>
* \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
@@ -272,28 +371,38 @@ make \endverbatim
* Persistent_cohomology/alpha_complex_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_persistence_via_boundary_matrix_8cpp-example.html">
* Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp</a>
- * \li <a href="_persistent_cohomology_2performance_rips_persistence_8cpp-example.html">
- * Persistent_cohomology/performance_rips_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2persistence_from_file_8cpp-example.html">
* Persistent_cohomology/persistence_from_file.cpp</a>
* \li <a href="_persistent_cohomology_2persistence_from_simple_simplex_tree_8cpp-example.html">
* Persistent_cohomology/persistence_from_simple_simplex_tree.cpp</a>
* \li <a href="_persistent_cohomology_2plain_homology_8cpp-example.html">
* Persistent_cohomology/plain_homology.cpp</a>
+ * \li <a href="_persistent_cohomology_2rips_distance_matrix_persistence_8cpp-example.html">
+ * Persistent_cohomology/rips_distance_matrix_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
* Persistent_cohomology/rips_multifield_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_persistence_8cpp-example.html">
* Persistent_cohomology/rips_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2rips_persistence_step_by_step_8cpp-example.html">
+ * Persistent_cohomology/rips_persistence_step_by_step.cpp</a>
+ * \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
+ * \li <a href="_rips_complex_2example_one_skeleton_rips_from_points_8cpp-example.html">
+ * Rips_complex/example_one_skeleton_rips_from_points.cpp</a>
+ * \li <a href="_rips_complex_2example_rips_complex_from_off_file_8cpp-example.html">
+ * Rips_complex/example_rips_complex_from_off_file.cpp</a>
*
* \section Contributions Bug reports and contributions
* Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
* \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*
- * Gudhi is open to external contributions. If you want to join our development team, please contact us.
+ * GUDHI is open to external contributions. If you want to join our development team, please contact us.
*
*/
@@ -308,10 +417,13 @@ make \endverbatim
* \verbinclude biblio/how_to_cite_gudhi.bib
*/
-// List of Gudhi examples - Doxygen needs at least a file tag to analyse comments
+// List of GUDHI examples - Doxygen needs at least a file tag to analyse comments
/*! @file Examples
* @example Alpha_complex/Alpha_complex_from_off.cpp
* @example Alpha_complex/Alpha_complex_from_points.cpp
+ * @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
+ * @example Bottleneck_distance/bottleneck_basic_example.cpp
+ * @example Bottleneck_distance/bottleneck_read_file_example.cpp
* @example Bitmap_cubical_complex/Bitmap_cubical_complex.cpp
* @example Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
* @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp
@@ -322,22 +434,38 @@ make \endverbatim
* @example Persistent_cohomology/alpha_complex_3d_persistence.cpp
* @example Persistent_cohomology/alpha_complex_persistence.cpp
* @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
- * @example Persistent_cohomology/performance_rips_persistence.cpp
+ * @example Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp
+ * @example Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp
* @example Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp
* @example Persistent_cohomology/persistence_from_file.cpp
* @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
* @example Persistent_cohomology/plain_homology.cpp
* @example Persistent_cohomology/rips_multifield_persistence.cpp
+ * @example Persistent_cohomology/rips_distance_matrix_persistence.cpp
* @example Persistent_cohomology/rips_persistence.cpp
* @example Persistent_cohomology/custom_persistence_sort.cpp
+ * @example Persistent_cohomology/rips_persistence_step_by_step.cpp
+ * @example Rips_complex/example_one_skeleton_rips_from_points.cpp
+ * @example Rips_complex/example_rips_complex_from_off_file.cpp
* @example Simplex_tree/mini_simplex_tree.cpp
* @example Simplex_tree/simple_simplex_tree.cpp
- * @example Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp
+ * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
* @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp
* @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp
* @example Skeleton_blocker/Skeleton_blocker_iteration.cpp
* @example Skeleton_blocker/Skeleton_blocker_link.cpp
- * @example Witness_complex/witness_complex_from_file.cpp
- * @example Witness_complex/witness_complex_sphere.cpp
+ * @example Spatial_searching/example_spatial_searching.cpp
+ * @example Subsampling/example_choose_n_farthest_points.cpp
+ * @example Subsampling/example_custom_kernel.cpp
+ * @example Subsampling/example_pick_n_random_points.cpp
+ * @example Subsampling/example_sparsify_point_set.cpp
+ * @example Tangential_complex/example_basic.cpp
+ * @example Tangential_complex/example_with_perturb.cpp
+ * @example Witness_complex/example_nearest_landmark_table.cpp
+ * @example Witness_complex/example_strong_witness_complex_off.cpp
+ * @example Witness_complex/example_strong_witness_persistence.cpp
+ * @example Witness_complex/example_witness_complex_off.cpp
+ * @example Witness_complex/example_witness_complex_persistence.cpp
+ * @example Witness_complex/example_witness_complex_sphere.cpp
*/
diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt
index b0c6d69a..61c94391 100644
--- a/src/common/example/CMakeLists.txt
+++ b/src/common/example/CMakeLists.txt
@@ -10,12 +10,10 @@ if(CGAL_FOUND)
target_link_libraries(cgal_3D_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
add_test(cgal_3D_off_reader ${CMAKE_CURRENT_BINARY_DIR}/cgal_3D_off_reader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
- # need CGAL 4.7
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (EIGEN3_FOUND)
- add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp )
- target_link_libraries(cgal_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
- add_test(cgal_off_reader ${CMAKE_CURRENT_BINARY_DIR}/cgal_off_reader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off)
- endif(EIGEN3_FOUND)
- endif (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ # need CGAL 4.7and Eigen3
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp )
+ target_link_libraries(cgal_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ add_test(cgal_off_reader ${CMAKE_CURRENT_BINARY_DIR}/cgal_off_reader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off)
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
endif()
diff --git a/src/common/example/example_CGAL_3D_points_off_reader.cpp b/src/common/example/example_CGAL_3D_points_off_reader.cpp
index d48bb17d..665b7a29 100644
--- a/src/common/example/example_CGAL_3D_points_off_reader.cpp
+++ b/src/common/example/example_CGAL_3D_points_off_reader.cpp
@@ -32,7 +32,7 @@ int main(int argc, char **argv) {
// Retrieve the triangulation
std::vector<Point_3> point_cloud = off_reader.get_point_cloud();
- int n {0};
+ int n {};
for (auto point : point_cloud) {
++n;
std::cout << "Point[" << n << "] = (" << point[0] << ", " << point[1] << ", " << point[2] << ")\n";
diff --git a/src/common/example/example_CGAL_points_off_reader.cpp b/src/common/example/example_CGAL_points_off_reader.cpp
index 4522174a..8c6a6b54 100644
--- a/src/common/example/example_CGAL_points_off_reader.cpp
+++ b/src/common/example/example_CGAL_points_off_reader.cpp
@@ -34,7 +34,7 @@ int main(int argc, char **argv) {
// Retrieve the triangulation
std::vector<Point_d> point_cloud = off_reader.get_point_cloud();
- int n {0};
+ int n {};
for (auto point : point_cloud) {
std::cout << "Point[" << n << "] = ";
for (std::size_t i {0}; i < point.size(); i++)
diff --git a/src/common/include/gudhi/Clock.h b/src/common/include/gudhi/Clock.h
index 04c6ffb9..77f196ca 100644
--- a/src/common/include/gudhi/Clock.h
+++ b/src/common/include/gudhi/Clock.h
@@ -27,47 +27,55 @@
#include <string>
+namespace Gudhi {
+
class Clock {
public:
- Clock() : end_called(false) {
- startTime = boost::posix_time::microsec_clock::local_time();
- }
-
- Clock(const std::string& msg_) {
- end_called = false;
- begin();
- msg = msg_;
- }
+ // Construct and start the timer
+ Clock(const std::string& msg_ = std::string())
+ : startTime(boost::posix_time::microsec_clock::local_time()),
+ end_called(false),
+ msg(msg_) { }
+ // Restart the timer
void begin() const {
end_called = false;
startTime = boost::posix_time::microsec_clock::local_time();
}
+ // Stop the timer
void end() const {
end_called = true;
endTime = boost::posix_time::microsec_clock::local_time();
}
+ std::string message() const {
+ return msg;
+ }
+
+ // Print current value to std::cout
void print() const {
std::cout << *this << std::endl;
}
friend std::ostream& operator<<(std::ostream& stream, const Clock& clock) {
- if (!clock.end_called)
- clock.end();
+ if (!clock.msg.empty())
+ stream << clock.msg << ": ";
- if (!clock.end_called) {
- stream << "end not called";
- } else {
- stream << clock.msg << ":" << clock.num_seconds() << "s";
- }
+ stream << clock.num_seconds() << "s";
return stream;
}
+ // Get the number of seconds between the timer start and:
+ // - the last call of end() if it was called
+ // - or now otherwise. In this case, the timer is not stopped.
double num_seconds() const {
- if (!end_called) return -1;
- return (endTime - startTime).total_milliseconds() / 1000.;
+ if (!end_called) {
+ auto end = boost::posix_time::microsec_clock::local_time();
+ return (end - startTime).total_milliseconds() / 1000.;
+ } else {
+ return (endTime - startTime).total_milliseconds() / 1000.;
+ }
}
private:
@@ -76,4 +84,6 @@ class Clock {
std::string msg;
};
-#endif // CLOCK_H_
+} // namespace Gudhi
+
+#endif // CLOCK_H_
diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h
index 7573a9db..8ed3b7b3 100644
--- a/src/common/include/gudhi/Debug_utils.h
+++ b/src/common/include/gudhi/Debug_utils.h
@@ -33,8 +33,10 @@
// Could assert in release mode, but cmake sets NDEBUG (for "NO DEBUG") in this mode, means assert does nothing.
#ifdef GUDHI_DEBUG
#define GUDHI_CHECK(expression, excpt) if ((expression) == 0) throw excpt
+ #define GUDHI_CHECK_code(CODE) CODE
#else
#define GUDHI_CHECK(expression, excpt) (void) 0
+ #define GUDHI_CHECK_code(CODE)
#endif
#define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl
diff --git a/src/common/include/gudhi/console_color.h b/src/common/include/gudhi/console_color.h
new file mode 100644
index 00000000..c4671da3
--- /dev/null
+++ b/src/common/include/gudhi/console_color.h
@@ -0,0 +1,97 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA Sophia-Antipolis (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CONSOLE_COLOR_H_
+#define CONSOLE_COLOR_H_
+
+#include <iostream>
+
+#if defined(WIN32)
+#include <windows.h>
+#endif
+
+inline std::ostream& blue(std::ostream &s) {
+#if defined(WIN32)
+ HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
+ SetConsoleTextAttribute(hStdout,
+ FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_INTENSITY);
+#else
+ s << "\x1b[0;34m";
+#endif
+ return s;
+}
+
+inline std::ostream& red(std::ostream &s) {
+#if defined(WIN32)
+ HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
+ SetConsoleTextAttribute(hStdout, FOREGROUND_RED | FOREGROUND_INTENSITY);
+#else
+ s << "\x1b[0;31m";
+#endif
+ return s;
+}
+
+inline std::ostream& green(std::ostream &s) {
+#if defined(WIN32)
+ HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
+ SetConsoleTextAttribute(hStdout, FOREGROUND_GREEN | FOREGROUND_INTENSITY);
+#else
+ s << "\x1b[0;32m";
+#endif
+ return s;
+}
+
+inline std::ostream& yellow(std::ostream &s) {
+#if defined(WIN32)
+ HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
+ SetConsoleTextAttribute(hStdout,
+ FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY);
+#else
+ s << "\x1b[0;33m";
+#endif
+ return s;
+}
+
+inline std::ostream& white(std::ostream &s) {
+#if defined(WIN32)
+ HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
+ SetConsoleTextAttribute(hStdout,
+ FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE);
+#else
+ s << "\x1b[0;37m";
+#endif
+ return s;
+}
+
+inline std::ostream& black_on_white(std::ostream &s) {
+#if defined(WIN32)
+ HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE);
+ SetConsoleTextAttribute(hStdout,
+ BACKGROUND_RED | BACKGROUND_GREEN | BACKGROUND_BLUE);
+#else
+ s << "\x1b[0;33m";
+#endif
+ return s;
+}
+
+
+#endif // CONSOLE_COLOR_H_
diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h
index cd518581..22747637 100644
--- a/src/common/include/gudhi/distance_functions.h
+++ b/src/common/include/gudhi/distance_functions.h
@@ -4,7 +4,7 @@
*
* Author(s): Clément Maria
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -24,20 +24,28 @@
#define DISTANCE_FUNCTIONS_H_
#include <cmath> // for std::sqrt
+#include <type_traits> // for std::decay
+#include <iterator> // for std::begin, std::end
-/* Compute the Euclidean distance between two Points given
- * by a range of coordinates. The points are assumed to have
- * the same dimension. */
-template< typename Point >
-double euclidean_distance(Point &p1, Point &p2) {
- double dist = 0.;
- auto it1 = p1.begin();
- auto it2 = p2.begin();
- for (; it1 != p1.end(); ++it1, ++it2) {
- double tmp = *it1 - *it2;
- dist += tmp*tmp;
+/** @file
+ * @brief Global distance functions
+ */
+
+/** @brief Compute the Euclidean distance between two Points given by a range of coordinates. The points are assumed to
+ * have the same dimension. */
+class Euclidean_distance {
+ public:
+ template< typename Point >
+ auto operator()(const Point& p1, const Point& p2) const -> typename std::decay<decltype(*std::begin(p1))>::type {
+ auto it1 = p1.begin();
+ auto it2 = p2.begin();
+ typename Point::value_type dist = 0.;
+ for (; it1 != p1.end(); ++it1, ++it2) {
+ typename Point::value_type tmp = (*it1) - (*it2);
+ dist += tmp*tmp;
+ }
+ return std::sqrt(dist);
}
- return std::sqrt(dist);
-}
+};
#endif // DISTANCE_FUNCTIONS_H_
diff --git a/src/common/include/gudhi/graph_simplicial_complex.h b/src/common/include/gudhi/graph_simplicial_complex.h
index 042ef516..5fe7c826 100644
--- a/src/common/include/gudhi/graph_simplicial_complex.h
+++ b/src/common/include/gudhi/graph_simplicial_complex.h
@@ -4,7 +4,7 @@
*
* Author(s): Clément Maria
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -39,61 +39,4 @@ struct vertex_filtration_t {
typedef boost::vertex_property_tag kind;
};
-typedef int Vertex_handle;
-typedef double Filtration_value;
-typedef boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
-, boost::property < vertex_filtration_t, Filtration_value >
-, boost::property < edge_filtration_t, Filtration_value >
-> Graph_t;
-typedef std::pair< Vertex_handle, Vertex_handle > Edge_t;
-
-/** \brief Output the proximity graph of the points.
- *
- * If points contains n elements, the proximity graph is the graph
- * with n vertices, and an edge [u,v] iff the distance function between
- * points u and v is smaller than threshold.
- *
- * The type PointCloud furnishes .begin() and .end() methods, that return
- * iterators with value_type Point.
- */
-template< typename PointCloud
-, typename Point >
-Graph_t compute_proximity_graph(PointCloud &points
- , Filtration_value threshold
- , Filtration_value distance(Point p1, Point p2)) {
- std::vector< Edge_t > edges;
- std::vector< Filtration_value > edges_fil;
- std::map< Vertex_handle, Filtration_value > vertices;
-
- Vertex_handle idx_u, idx_v;
- Filtration_value fil;
- idx_u = 0;
- for (auto it_u = points.begin(); it_u != points.end(); ++it_u) {
- idx_v = idx_u + 1;
- for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) {
- fil = distance(*it_u, *it_v);
- if (fil <= threshold) {
- edges.emplace_back(idx_u, idx_v);
- edges_fil.push_back(fil);
- }
- }
- ++idx_u;
- }
-
- Graph_t skel_graph(edges.begin()
- , edges.end()
- , edges_fil.begin()
- , idx_u); // number of points labeled from 0 to idx_u-1
-
- auto vertex_prop = boost::get(vertex_filtration_t(), skel_graph);
-
- boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
- for (std::tie(vi, vi_end) = boost::vertices(skel_graph);
- vi != vi_end; ++vi) {
- boost::put(vertex_prop, *vi, 0.);
- }
-
- return skel_graph;
-}
-
#endif // GRAPH_SIMPLICIAL_COMPLEX_H_
diff --git a/src/common/include/gudhi/random_point_generators.h b/src/common/include/gudhi/random_point_generators.h
new file mode 100644
index 00000000..2ec465ef
--- /dev/null
+++ b/src/common/include/gudhi/random_point_generators.h
@@ -0,0 +1,474 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef RANDOM_POINT_GENERATORS_H_
+#define RANDOM_POINT_GENERATORS_H_
+
+#include <CGAL/number_utils.h>
+#include <CGAL/Random.h>
+#include <CGAL/point_generators_d.h>
+
+#include <vector> // for vector<>
+
+namespace Gudhi {
+
+///////////////////////////////////////////////////////////////////////////////
+// Note: All these functions have been tested with the CGAL::Epick_d kernel
+///////////////////////////////////////////////////////////////////////////////
+
+// construct_point: dim 2
+
+template <typename Kernel>
+typename Kernel::Point_d construct_point(const Kernel &k,
+ typename Kernel::FT x1, typename Kernel::FT x2) {
+ typename Kernel::FT tab[2];
+ tab[0] = x1;
+ tab[1] = x2;
+ return k.construct_point_d_object()(2, &tab[0], &tab[2]);
+}
+
+// construct_point: dim 3
+
+template <typename Kernel>
+typename Kernel::Point_d construct_point(const Kernel &k,
+ typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3) {
+ typename Kernel::FT tab[3];
+ tab[0] = x1;
+ tab[1] = x2;
+ tab[2] = x3;
+ return k.construct_point_d_object()(3, &tab[0], &tab[3]);
+}
+
+// construct_point: dim 4
+
+template <typename Kernel>
+typename Kernel::Point_d construct_point(const Kernel &k,
+ typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3,
+ typename Kernel::FT x4) {
+ typename Kernel::FT tab[4];
+ tab[0] = x1;
+ tab[1] = x2;
+ tab[2] = x3;
+ tab[3] = x4;
+ return k.construct_point_d_object()(4, &tab[0], &tab[4]);
+}
+
+// construct_point: dim 5
+
+template <typename Kernel>
+typename Kernel::Point_d construct_point(const Kernel &k,
+ typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3,
+ typename Kernel::FT x4, typename Kernel::FT x5) {
+ typename Kernel::FT tab[5];
+ tab[0] = x1;
+ tab[1] = x2;
+ tab[2] = x3;
+ tab[3] = x4;
+ tab[4] = x5;
+ return k.construct_point_d_object()(5, &tab[0], &tab[5]);
+}
+
+// construct_point: dim 6
+
+template <typename Kernel>
+typename Kernel::Point_d construct_point(const Kernel &k,
+ typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3,
+ typename Kernel::FT x4, typename Kernel::FT x5, typename Kernel::FT x6) {
+ typename Kernel::FT tab[6];
+ tab[0] = x1;
+ tab[1] = x2;
+ tab[2] = x3;
+ tab[3] = x4;
+ tab[4] = x5;
+ tab[5] = x6;
+ return k.construct_point_d_object()(6, &tab[0], &tab[6]);
+}
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_plane(std::size_t num_points, int intrinsic_dim,
+ int ambient_dim,
+ double coord_min = -5., double coord_max = 5.) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ std::vector<FT> pt(ambient_dim, FT(0));
+ for (int j = 0; j < intrinsic_dim; ++j)
+ pt[j] = rng.get_double(coord_min, coord_max);
+
+ Point p = k.construct_point_d_object()(ambient_dim, pt.begin(), pt.end());
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_moment_curve(std::size_t num_points, int dim,
+ typename Kernel::FT min_x,
+ typename Kernel::FT max_x) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ FT x = rng.get_double(min_x, max_x);
+ std::vector<FT> coords;
+ coords.reserve(dim);
+ for (int p = 1; p <= dim; ++p)
+ coords.push_back(std::pow(CGAL::to_double(x), p));
+ Point p = k.construct_point_d_object()(
+ dim, coords.begin(), coords.end());
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+
+// R = big radius, r = small radius
+template <typename Kernel/*, typename TC_basis*/>
+std::vector<typename Kernel::Point_d> generate_points_on_torus_3D(std::size_t num_points, double R, double r,
+ bool uniform = false) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+
+ // if uniform
+ std::size_t num_lines = (std::size_t)sqrt(num_points);
+
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ FT u, v;
+ if (uniform) {
+ std::size_t k1 = i / num_lines;
+ std::size_t k2 = i % num_lines;
+ u = 6.2832 * k1 / num_lines;
+ v = 6.2832 * k2 / num_lines;
+ } else {
+ u = rng.get_double(0, 6.2832);
+ v = rng.get_double(0, 6.2832);
+ }
+ Point p = construct_point(k,
+ (R + r * std::cos(u)) * std::cos(v),
+ (R + r * std::cos(u)) * std::sin(v),
+ r * std::sin(u));
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+// "Private" function used by generate_points_on_torus_d
+template <typename Kernel, typename OutputIterator>
+static void generate_uniform_points_on_torus_d(const Kernel &k, int dim, std::size_t num_slices,
+ OutputIterator out,
+ double radius_noise_percentage = 0.,
+ std::vector<typename Kernel::FT> current_point = std::vector<typename Kernel::FT>()) {
+ CGAL::Random rng;
+ int point_size = static_cast<int>(current_point.size());
+ if (point_size == 2 * dim) {
+ *out++ = k.construct_point_d_object()(point_size, current_point.begin(), current_point.end());
+ } else {
+ for (std::size_t slice_idx = 0; slice_idx < num_slices; ++slice_idx) {
+ double radius_noise_ratio = 1.;
+ if (radius_noise_percentage > 0.) {
+ radius_noise_ratio = rng.get_double(
+ (100. - radius_noise_percentage) / 100.,
+ (100. + radius_noise_percentage) / 100.);
+ }
+ std::vector<typename Kernel::FT> cp2 = current_point;
+ double alpha = 6.2832 * slice_idx / num_slices;
+ cp2.push_back(radius_noise_ratio * std::cos(alpha));
+ cp2.push_back(radius_noise_ratio * std::sin(alpha));
+ generate_uniform_points_on_torus_d(
+ k, dim, num_slices, out, radius_noise_percentage, cp2);
+ }
+ }
+}
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_torus_d(std::size_t num_points, int dim, bool uniform = false,
+ double radius_noise_percentage = 0.) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+
+ std::vector<Point> points;
+ points.reserve(num_points);
+ if (uniform) {
+ std::size_t num_slices = (std::size_t)std::pow(num_points, 1. / dim);
+ generate_uniform_points_on_torus_d(
+ k, dim, num_slices, std::back_inserter(points), radius_noise_percentage);
+ } else {
+ for (std::size_t i = 0; i < num_points;) {
+ double radius_noise_ratio = 1.;
+ if (radius_noise_percentage > 0.) {
+ radius_noise_ratio = rng.get_double(
+ (100. - radius_noise_percentage) / 100.,
+ (100. + radius_noise_percentage) / 100.);
+ }
+ std::vector<typename Kernel::FT> pt;
+ pt.reserve(dim * 2);
+ for (int curdim = 0; curdim < dim; ++curdim) {
+ FT alpha = rng.get_double(0, 6.2832);
+ pt.push_back(radius_noise_ratio * std::cos(alpha));
+ pt.push_back(radius_noise_ratio * std::sin(alpha));
+ }
+
+ Point p = k.construct_point_d_object()(pt.begin(), pt.end());
+ points.push_back(p);
+ ++i;
+ }
+ }
+ return points;
+}
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_sphere_d(std::size_t num_points, int dim, double radius,
+ double radius_noise_percentage = 0.) {
+ typedef typename Kernel::Point_d Point;
+ Kernel k;
+ CGAL::Random rng;
+ CGAL::Random_points_on_sphere_d<Point> generator(dim, radius);
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ Point p = *generator++;
+ if (radius_noise_percentage > 0.) {
+ double radius_noise_ratio = rng.get_double(
+ (100. - radius_noise_percentage) / 100.,
+ (100. + radius_noise_percentage) / 100.);
+
+ typename Kernel::Point_to_vector_d k_pt_to_vec =
+ k.point_to_vector_d_object();
+ typename Kernel::Vector_to_point_d k_vec_to_pt =
+ k.vector_to_point_d_object();
+ typename Kernel::Scaled_vector_d k_scaled_vec =
+ k.scaled_vector_d_object();
+ p = k_vec_to_pt(k_scaled_vec(k_pt_to_vec(p), radius_noise_ratio));
+ }
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_two_spheres_d(std::size_t num_points, int dim, double radius,
+ double distance_between_centers,
+ double radius_noise_percentage = 0.) {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::Vector_d Vector;
+ Kernel k;
+ CGAL::Random rng;
+ CGAL::Random_points_on_sphere_d<Point> generator(dim, radius);
+ std::vector<Point> points;
+ points.reserve(num_points);
+
+ std::vector<FT> t(dim, FT(0));
+ t[0] = distance_between_centers;
+ Vector c1_to_c2(t.begin(), t.end());
+
+ for (std::size_t i = 0; i < num_points;) {
+ Point p = *generator++;
+ if (radius_noise_percentage > 0.) {
+ double radius_noise_ratio = rng.get_double(
+ (100. - radius_noise_percentage) / 100.,
+ (100. + radius_noise_percentage) / 100.);
+
+ typename Kernel::Point_to_vector_d k_pt_to_vec =
+ k.point_to_vector_d_object();
+ typename Kernel::Vector_to_point_d k_vec_to_pt =
+ k.vector_to_point_d_object();
+ typename Kernel::Scaled_vector_d k_scaled_vec =
+ k.scaled_vector_d_object();
+ p = k_vec_to_pt(k_scaled_vec(k_pt_to_vec(p), radius_noise_ratio));
+ }
+
+ typename Kernel::Translated_point_d k_transl =
+ k.translated_point_d_object();
+ Point p2 = k_transl(p, c1_to_c2);
+ points.push_back(p);
+ points.push_back(p2);
+ i += 2;
+ }
+ return points;
+}
+
+// Product of a 3-sphere and a circle => d = 3 / D = 5
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_3sphere_and_circle(std::size_t num_points,
+ double sphere_radius) {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point;
+ Kernel k;
+ CGAL::Random rng;
+ CGAL::Random_points_on_sphere_d<Point> generator(3, sphere_radius);
+ std::vector<Point> points;
+ points.reserve(num_points);
+
+ typename Kernel::Compute_coordinate_d k_coord =
+ k.compute_coordinate_d_object();
+ for (std::size_t i = 0; i < num_points;) {
+ Point p_sphere = *generator++; // First 3 coords
+
+ FT alpha = rng.get_double(0, 6.2832);
+ std::vector<FT> pt(5);
+ pt[0] = k_coord(p_sphere, 0);
+ pt[1] = k_coord(p_sphere, 1);
+ pt[2] = k_coord(p_sphere, 2);
+ pt[3] = std::cos(alpha);
+ pt[4] = std::sin(alpha);
+ Point p(pt.begin(), pt.end());
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+// a = big radius, b = small radius
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_klein_bottle_3D(std::size_t num_points, double a, double b,
+ bool uniform = false) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+
+ // if uniform
+ std::size_t num_lines = (std::size_t)sqrt(num_points);
+
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ FT u, v;
+ if (uniform) {
+ std::size_t k1 = i / num_lines;
+ std::size_t k2 = i % num_lines;
+ u = 6.2832 * k1 / num_lines;
+ v = 6.2832 * k2 / num_lines;
+ } else {
+ u = rng.get_double(0, 6.2832);
+ v = rng.get_double(0, 6.2832);
+ }
+ double tmp = cos(u / 2) * sin(v) - sin(u / 2) * sin(2. * v);
+ Point p = construct_point(k,
+ (a + b * tmp) * cos(u),
+ (a + b * tmp) * sin(u),
+ b * (sin(u / 2) * sin(v) + cos(u / 2) * sin(2. * v)));
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+// a = big radius, b = small radius
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_on_klein_bottle_4D(std::size_t num_points, double a, double b,
+ double noise = 0., bool uniform = false) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+
+ // if uniform
+ std::size_t num_lines = (std::size_t)sqrt(num_points);
+
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ FT u, v;
+ if (uniform) {
+ std::size_t k1 = i / num_lines;
+ std::size_t k2 = i % num_lines;
+ u = 6.2832 * k1 / num_lines;
+ v = 6.2832 * k2 / num_lines;
+ } else {
+ u = rng.get_double(0, 6.2832);
+ v = rng.get_double(0, 6.2832);
+ }
+ Point p = construct_point(k,
+ (a + b * cos(v)) * cos(u) + (noise == 0. ? 0. : rng.get_double(0, noise)),
+ (a + b * cos(v)) * sin(u) + (noise == 0. ? 0. : rng.get_double(0, noise)),
+ b * sin(v) * cos(u / 2) + (noise == 0. ? 0. : rng.get_double(0, noise)),
+ b * sin(v) * sin(u / 2) + (noise == 0. ? 0. : rng.get_double(0, noise)));
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+
+// a = big radius, b = small radius
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d>
+generate_points_on_klein_bottle_variant_5D(
+ std::size_t num_points, double a, double b, bool uniform = false) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::FT FT;
+ Kernel k;
+ CGAL::Random rng;
+
+ // if uniform
+ std::size_t num_lines = (std::size_t)sqrt(num_points);
+
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ FT u, v;
+ if (uniform) {
+ std::size_t k1 = i / num_lines;
+ std::size_t k2 = i % num_lines;
+ u = 6.2832 * k1 / num_lines;
+ v = 6.2832 * k2 / num_lines;
+ } else {
+ u = rng.get_double(0, 6.2832);
+ v = rng.get_double(0, 6.2832);
+ }
+ FT x1 = (a + b * cos(v)) * cos(u);
+ FT x2 = (a + b * cos(v)) * sin(u);
+ FT x3 = b * sin(v) * cos(u / 2);
+ FT x4 = b * sin(v) * sin(u / 2);
+ FT x5 = x1 + x2 + x3 + x4;
+
+ Point p = construct_point(k, x1, x2, x3, x4, x5);
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+} // namespace Gudhi
+
+#endif // RANDOM_POINT_GENERATORS_H_
diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h
index 899f9df6..97a87edd 100644
--- a/src/common/include/gudhi/reader_utils.h
+++ b/src/common/include/gudhi/reader_utils.h
@@ -2,9 +2,9 @@
* (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
- * Author(s): Clément Maria
+ * Author(s): Clement Maria, Pawel Dlotko
*
- * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -30,18 +30,25 @@
#include <iostream>
#include <fstream>
#include <map>
-#include <limits> // for numeric_limits<>
+#include <limits> // for numeric_limits
#include <string>
#include <vector>
+#include <utility> // for pair
+
+// Keep this file tag for Doxygen to parse the code, otherwise, functions are not documented.
+// It is required for global functions and variables.
+
+/** @file
+ * @brief This file includes common file reader for GUDHI
+ */
/**
- * \brief Read a set of points to turn it
- * into a vector< vector<double> > by filling points
+ * @brief Read a set of points to turn it into a vector< vector<double> > by filling points.
*
- * File format: 1 point per line
- * X11 X12 ... X1d
- * X21 X22 ... X2d
- * etc
+ * File format: 1 point per line<br>
+ * X11 X12 ... X1d<br>
+ * X21 X22 ... X2d<br>
+ * etc<br>
*/
inline void read_points(std::string file_name, std::vector< std::vector< double > > & points) {
std::ifstream in_file(file_name.c_str(), std::ios::in);
@@ -66,23 +73,29 @@ inline void read_points(std::string file_name, std::vector< std::vector< double
}
/**
- * \brief Read a graph from a file.
+ * @brief Read a graph from a file.
+ *
+ * \tparam Graph_t Type for the return graph. Must be constructible from iterators on pairs of Vertex_handle
+ * \tparam Filtration_value Type for the value of the read filtration
+ * \tparam Vertex_handle Type for the value of the read vertices
*
- * File format: 1 simplex per line
- * Dim1 X11 X12 ... X1d Fil1
- * Dim2 X21 X22 ... X2d Fil2
- * etc
+ * File format: 1 simplex per line<br>
+ * Dim1 X11 X12 ... X1d Fil1<br>
+ * Dim2 X21 X22 ... X2d Fil2<br>
+ * etc<br>
*
* The vertices must be labeled from 0 to n-1.
* Every simplex must appear exactly once.
* Simplices of dimension more than 1 are ignored.
*/
-inline Graph_t read_graph(std::string file_name) {
+template< typename Graph_t, typename Filtration_value, typename Vertex_handle >
+Graph_t read_graph(std::string file_name) {
std::ifstream in_(file_name.c_str(), std::ios::in);
if (!in_.is_open()) {
std::cerr << "Unable to open file " << file_name << std::endl;
}
+ typedef std::pair< Vertex_handle, Vertex_handle > Edge_t;
std::vector< Edge_t > edges;
std::vector< Filtration_value > edges_fil;
std::map< Vertex_handle, Filtration_value > vertices;
@@ -130,7 +143,7 @@ inline Graph_t read_graph(std::string file_name) {
Graph_t skel_graph(edges.begin(), edges.end(), edges_fil.begin(), vertices.size());
auto vertex_prop = boost::get(vertex_filtration_t(), skel_graph);
- boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
+ typename boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
auto v_it = vertices.begin();
for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi, ++v_it) {
boost::put(vertex_prop, *vi, v_it->second);
@@ -140,12 +153,12 @@ inline Graph_t read_graph(std::string file_name) {
}
/**
- * \brief Read a face from a file.
+ * @brief Read a face from a file.
*
- * File format: 1 simplex per line
- * Dim1 X11 X12 ... X1d Fil1
- * Dim2 X21 X22 ... X2d Fil2
- * etc
+ * File format: 1 simplex per line<br>
+ * Dim1 X11 X12 ... X1d Fil1<br>
+ * Dim2 X21 X22 ... X2d Fil2<br>
+ * etc<br>
*
* The vertices must be labeled from 0 to n-1.
* Every simplex must appear exactly once.
@@ -166,18 +179,16 @@ bool read_simplex(std::istream & in_, std::vector< Vertex_handle > & simplex, Fi
}
/**
- * \brief Read a hasse simplex from a file.
- *
- * File format: 1 simplex per line
- * Dim1 k11 k12 ... k1Dim1 Fil1
- * Dim2 k21 k22 ... k2Dim2 Fil2
- * etc
- *
- * The key of a simplex is its position in the filtration order
- * and also the number of its row in the file.
- * Dimi ki1 ki2 ... kiDimi Fili means that the ith simplex in the
- * filtration has dimension Dimi, filtration value fil1 and simplices with
- * key ki1 ... kiDimi in its boundary.*/
+ * @brief Read a hasse simplex from a file.
+ *
+ * File format: 1 simplex per line<br>
+ * Dim1 k11 k12 ... k1Dim1 Fil1<br>
+ * Dim2 k21 k22 ... k2Dim2 Fil2<br>
+ * etc<br>
+ *
+ * The key of a simplex is its position in the filtration order and also the number of its row in the file.
+ * Dimi ki1 ki2 ... kiDimi Fili means that the ith simplex in the filtration has dimension Dimi, filtration value
+ * fil1 and simplices with key ki1 ... kiDimi in its boundary.*/
template< typename Simplex_key, typename Filtration_value >
bool read_hasse_simplex(std::istream & in_, std::vector< Simplex_key > & boundary, Filtration_value & fil) {
int dim;
@@ -195,4 +206,93 @@ bool read_hasse_simplex(std::istream & in_, std::vector< Simplex_key > & boundar
return true;
}
+/**
+ * @brief Read a lower triangular distance matrix from a csv file. We assume that the .csv store the whole
+ * (square) matrix.
+ *
+ * @author Pawel Dlotko
+ *
+ * Square matrix file format:<br>
+ * 0;D12;...;D1j<br>
+ * D21;0;...;D2j<br>
+ * ...<br>
+ * Dj1;Dj2;...;0<br>
+ *
+ * lower matrix file format:<br>
+ * 0<br>
+ * D21;<br>
+ * D31;D32;<br>
+ * ...<br>
+ * Dj1;Dj2;...;Dj(j-1);<br>
+ *
+ **/
+template< typename Filtration_value >
+std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from_csv_file(const std::string& filename,
+ const char separator = ';') {
+#ifdef DEBUG_TRACES
+ std::cout << "Using procedure read_lower_triangular_matrix_from_csv_file \n";
+#endif // DEBUG_TRACES
+ std::vector< std::vector< Filtration_value > > result;
+ std::ifstream in;
+ in.open(filename.c_str());
+ if (!in.is_open()) {
+ return result;
+ }
+
+ std::string line;
+
+ // the first line is emtpy, so we ignore it:
+ std::getline(in, line);
+ std::vector< Filtration_value > values_in_this_line;
+ result.push_back(values_in_this_line);
+
+ int number_of_line = 0;
+
+ // first, read the file line by line to a string:
+ while (std::getline(in, line)) {
+ // if line is empty, break
+ if (line.size() == 0)
+ break;
+
+ // if the last element of a string is comma:
+ if (line[ line.size() - 1 ] == separator) {
+ // then shrink the string by one
+ line.pop_back();
+ }
+
+ // replace all commas with spaces
+ std::replace(line.begin(), line.end(), separator, ' ');
+
+ // put the new line to a stream
+ std::istringstream iss(line);
+ // and now read the doubles.
+
+ int number_of_entry = 0;
+ std::vector< Filtration_value > values_in_this_line;
+ while (iss.good()) {
+ double entry;
+ iss >> entry;
+ if (number_of_entry <= number_of_line) {
+ values_in_this_line.push_back(entry);
+ }
+ ++number_of_entry;
+ }
+ if (!values_in_this_line.empty())result.push_back(values_in_this_line);
+ ++number_of_line;
+ }
+ in.close();
+
+#ifdef DEBUG_TRACES
+ std::cerr << "Here is the matrix we read : \n";
+ for (size_t i = 0; i != result.size(); ++i) {
+ for (size_t j = 0; j != result[i].size(); ++j) {
+ std::cerr << result[i][j] << " ";
+ }
+ std::cerr << std::endl;
+ }
+#endif // DEBUG_TRACES
+
+ return result;
+} // read_lower_triangular_matrix_from_csv_file
+
#endif // READER_UTILS_H_
diff --git a/src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt b/src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt
new file mode 100644
index 00000000..a588d113
--- /dev/null
+++ b/src/common/include/gudhi_patches/Bottleneck_distance_CGAL_patches.txt
@@ -0,0 +1,3 @@
+CGAL/Kd_tree.h
+CGAL/Kd_tree_node.h
+CGAL/Orthogonal_incremental_neighbor_search.h
diff --git a/src/common/include/gudhi_patches/CGAL/Convex_hull.h b/src/common/include/gudhi_patches/CGAL/Convex_hull.h
new file mode 100644
index 00000000..a8f91bf8
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Convex_hull.h
@@ -0,0 +1,56 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+/* RANDOM DESIGN IDEAS:
+- Use a policy tag to choose for incremental with inserts only or
+ incremental with removals and inserts.
+ In the first case: use Triangulation for storage.
+ In the second case: use Delaunay !
+ In this second case, we must keeps the points that are inserted in the hull,
+ as they may become part of the boundary later on, when some points are removed.
+- Constructor with range argument uses quickhull.
+*/
+
+#ifndef CGAL_CONVEX_HULL_H
+#define CGAL_CONVEX_HULL_H
+
+namespace CGAL {
+
+template < class CHTraits, class TDS_ = Default >
+class Convex_hull
+{
+ typedef typename Maximal_dimension<typename CHTraits::Point_d>::type
+ Maximal_dimension_;
+ typedef typename Default::Get<TDS_, Triangulation_data_structure
+ < Maximal_dimension_,
+ Triangulation_vertex<CHTraits>,
+ Triangulation_full_cell<CHTraits> >
+ >::type TDS;
+ typedef Convex_hull<CHTraits, TDS_> Self;
+
+ typedef typename CHTraits::Coaffine_orientation_d
+ Coaffine_orientation_d;
+ typedef typename CHTraits::Orientation_d Orientation_d;
+
+public:
+};
+
+} //namespace CGAL
+
+#endif // CGAL_CONVEX_HULL_H
diff --git a/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h b/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h
new file mode 100644
index 00000000..071cd184
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h
@@ -0,0 +1,933 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_DELAUNAY_COMPLEX_H
+#define CGAL_DELAUNAY_COMPLEX_H
+
+#include <CGAL/tss.h>
+#include <CGAL/Triangulation.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/Default.h>
+
+#include <boost/iterator/transform_iterator.hpp>
+
+#include <algorithm>
+
+namespace CGAL {
+
+template< typename DCTraits, typename _TDS = Default >
+class Delaunay_triangulation
+: public Triangulation<DCTraits,
+ typename Default::Get<_TDS, Triangulation_data_structure<
+ typename DCTraits::Dimension,
+ Triangulation_vertex<DCTraits>,
+ Triangulation_full_cell<DCTraits> >
+ >::type >
+{
+ typedef typename DCTraits::Dimension Maximal_dimension_;
+ typedef typename Default::Get<_TDS, Triangulation_data_structure<
+ Maximal_dimension_,
+ Triangulation_vertex<DCTraits>,
+ Triangulation_full_cell<DCTraits> >
+ >::type TDS;
+ typedef Triangulation<DCTraits, TDS> Base;
+ typedef Delaunay_triangulation<DCTraits, _TDS> Self;
+
+ typedef typename DCTraits::Side_of_oriented_sphere_d
+ Side_of_oriented_sphere_d;
+ typedef typename DCTraits::Orientation_d Orientation_d;
+
+public: // PUBLIC NESTED TYPES
+
+ typedef DCTraits Geom_traits;
+ typedef typename Base::Triangulation_ds Triangulation_ds;
+
+ typedef typename Base::Vertex Vertex;
+ typedef typename Base::Full_cell Full_cell;
+ typedef typename Base::Facet Facet;
+ typedef typename Base::Face Face;
+
+ typedef typename Base::Maximal_dimension Maximal_dimension;
+ typedef typename DCTraits::Point_d Point;
+ typedef typename DCTraits::Point_d Point_d;
+
+ typedef typename Base::Vertex_handle Vertex_handle;
+ typedef typename Base::Vertex_iterator Vertex_iterator;
+ typedef typename Base::Vertex_const_handle Vertex_const_handle;
+ typedef typename Base::Vertex_const_iterator Vertex_const_iterator;
+
+ typedef typename Base::Full_cell_handle Full_cell_handle;
+ typedef typename Base::Full_cell_iterator Full_cell_iterator;
+ typedef typename Base::Full_cell_const_handle Full_cell_const_handle;
+ typedef typename Base::Full_cell_const_iterator Full_cell_const_iterator;
+ typedef typename Base::Finite_full_cell_const_iterator
+ Finite_full_cell_const_iterator;
+
+ typedef typename Base::size_type size_type;
+ typedef typename Base::difference_type difference_type;
+
+ typedef typename Base::Locate_type Locate_type;
+
+ //Tag to distinguish triangulations with weighted_points
+ typedef Tag_false Weighted_tag;
+
+protected: // DATA MEMBERS
+
+
+public:
+
+ using typename Base::Rotor;
+ using Base::maximal_dimension;
+ using Base::are_incident_full_cells_valid;
+ using Base::coaffine_orientation_predicate;
+ using Base::reset_flat_orientation;
+ using Base::current_dimension;
+ //using Base::star;
+ //using Base::incident_full_cells;
+ using Base::geom_traits;
+ using Base::index_of_covertex;
+ //using Base::index_of_second_covertex;
+ using Base::infinite_vertex;
+ using Base::rotate_rotor;
+ using Base::insert_in_hole;
+ using Base::insert_outside_convex_hull_1;
+ using Base::is_infinite;
+ using Base::locate;
+ using Base::points_begin;
+ using Base::set_neighbors;
+ using Base::new_full_cell;
+ using Base::number_of_vertices;
+ using Base::orientation;
+ using Base::tds;
+ using Base::reorient_full_cells;
+ using Base::full_cell;
+ using Base::full_cells_begin;
+ using Base::full_cells_end;
+ using Base::finite_full_cells_begin;
+ using Base::finite_full_cells_end;
+ using Base::vertices_begin;
+ using Base::vertices_end;
+ // using Base::
+
+private:
+ //*** Side_of_oriented_subsphere_d ***
+ typedef typename Base::Flat_orientation_d Flat_orientation_d;
+ typedef typename Base::Construct_flat_orientation_d Construct_flat_orientation_d;
+ typedef typename DCTraits::In_flat_side_of_oriented_sphere_d In_flat_side_of_oriented_sphere_d;
+ // Wrapper
+ struct Side_of_oriented_subsphere_d
+ {
+ boost::optional<Flat_orientation_d>* fop;
+ Construct_flat_orientation_d cfo;
+ In_flat_side_of_oriented_sphere_d ifsoos;
+
+ Side_of_oriented_subsphere_d(
+ boost::optional<Flat_orientation_d>& x,
+ Construct_flat_orientation_d const&y,
+ In_flat_side_of_oriented_sphere_d const&z)
+ : fop(&x), cfo(y), ifsoos(z) {}
+
+ template<class Iter>
+ CGAL::Orientation operator()(Iter a, Iter b, const Point & p)const
+ {
+ if(!*fop)
+ *fop=cfo(a,b);
+ return ifsoos(fop->get(),a,b,p);
+ }
+ };
+public:
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS
+
+ Delaunay_triangulation(int dim, const Geom_traits &k = Geom_traits())
+ : Base(dim, k)
+ {
+ }
+
+ // With this constructor,
+ // the user can specify a Flat_orientation_d object to be used for
+ // orienting simplices of a specific dimension
+ // (= preset_flat_orientation_.first)
+ // It it used by the dark triangulations created by DT::remove
+ Delaunay_triangulation(
+ int dim,
+ const std::pair<int, const Flat_orientation_d *> &preset_flat_orientation,
+ const Geom_traits &k = Geom_traits())
+ : Base(dim, preset_flat_orientation, k)
+ {
+ }
+
+ ~Delaunay_triangulation() {}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS
+
+ // Not Documented
+ Side_of_oriented_subsphere_d side_of_oriented_subsphere_predicate() const
+ {
+ return Side_of_oriented_subsphere_d (
+ flat_orientation_,
+ geom_traits().construct_flat_orientation_d_object(),
+ geom_traits().in_flat_side_of_oriented_sphere_d_object()
+ );
+ }
+
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS
+
+ Full_cell_handle remove(Vertex_handle);
+ Full_cell_handle remove(const Point & p, Full_cell_handle hint = Full_cell_handle())
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, hint);
+ if( Base::ON_VERTEX == lt )
+ {
+ return remove(s->vertex(f.index(0)));
+ }
+ return Full_cell_handle();
+ }
+
+ template< typename ForwardIterator >
+ void remove(ForwardIterator start, ForwardIterator end)
+ {
+ while( start != end )
+ remove(*start++);
+ }
+
+ // Not documented
+ void remove_decrease_dimension(Vertex_handle);
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS
+
+ template< typename ForwardIterator >
+ size_type insert(ForwardIterator start, ForwardIterator end)
+ {
+ size_type n = number_of_vertices();
+ std::vector<Point> points(start, end);
+ spatial_sort(points.begin(), points.end(), geom_traits());
+ Full_cell_handle hint;
+ for( typename std::vector<Point>::const_iterator p = points.begin(); p != points.end(); ++p )
+ {
+ hint = insert(*p, hint)->full_cell();
+ }
+ return number_of_vertices() - n;
+ }
+ Vertex_handle insert(const Point &, Locate_type, const Face &, const Facet &, Full_cell_handle);
+ Vertex_handle insert(const Point & p, Full_cell_handle start = Full_cell_handle())
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, start);
+ return insert(p, lt, f, ft, s);
+ }
+ Vertex_handle insert(const Point & p, Vertex_handle hint)
+ {
+ CGAL_assertion( Vertex_handle() != hint );
+ return insert(p, hint->full_cell());
+ }
+ Vertex_handle insert_outside_affine_hull(const Point &);
+ Vertex_handle insert_in_conflicting_cell(const Point &, Full_cell_handle);
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES
+
+ bool is_in_conflict(const Point &, Full_cell_const_handle) const;
+ template< class OrientationPredicate >
+ Oriented_side perturbed_side_of_positive_sphere(const Point &,
+ Full_cell_const_handle, const OrientationPredicate &) const;
+
+ template< typename OutputIterator >
+ Facet compute_conflict_zone(const Point &, Full_cell_handle, OutputIterator) const;
+
+ template < typename OrientationPredicate, typename SideOfOrientedSpherePredicate >
+ class Conflict_predicate
+ {
+ const Self & dc_;
+ const Point & p_;
+ OrientationPredicate ori_;
+ SideOfOrientedSpherePredicate side_of_s_;
+ int cur_dim_;
+ public:
+ Conflict_predicate(
+ const Self & dc,
+ const Point & p,
+ const OrientationPredicate & ori,
+ const SideOfOrientedSpherePredicate & side)
+ : dc_(dc), p_(p), ori_(ori), side_of_s_(side), cur_dim_(dc.current_dimension()) {}
+
+ inline
+ bool operator()(Full_cell_const_handle s) const
+ {
+ bool ok;
+ if( ! dc_.is_infinite(s) )
+ {
+ Oriented_side side = side_of_s_(dc_.points_begin(s), dc_.points_begin(s) + cur_dim_ + 1, p_);
+ if( ON_POSITIVE_SIDE == side )
+ ok = true;
+ else if( ON_NEGATIVE_SIDE == side )
+ ok = false;
+ else
+ ok = ON_POSITIVE_SIDE == dc_.perturbed_side_of_positive_sphere<OrientationPredicate>(p_, s, ori_);
+ }
+ else
+ {
+ typedef typename Full_cell::Vertex_handle_const_iterator VHCI;
+ typedef Substitute_point_in_vertex_iterator<VHCI> F;
+ F spivi(dc_.infinite_vertex(), &p_);
+
+ Orientation o = ori_(
+ boost::make_transform_iterator(s->vertices_begin(), spivi),
+ boost::make_transform_iterator(s->vertices_begin() + cur_dim_ + 1,
+ spivi));
+
+ if( POSITIVE == o )
+ ok = true;
+ else if( o == NEGATIVE )
+ ok = false;
+ else
+ ok = (*this)(s->neighbor( s->index( dc_.infinite_vertex() ) ));
+ }
+ return ok;
+ }
+ };
+
+ template < typename ConflictPredicate >
+ class Conflict_traversal_predicate
+ {
+ const Self & dc_;
+ const ConflictPredicate & pred_;
+ public:
+ Conflict_traversal_predicate(const Self & dc, const ConflictPredicate & pred)
+ : dc_(dc), pred_(pred)
+ {}
+ inline
+ bool operator()(const Facet & f) const
+ {
+ return pred_(dc_.full_cell(f)->neighbor(dc_.index_of_covertex(f)));
+ }
+ };
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY
+
+ bool is_valid(bool verbose = false, int level = 0) const;
+
+private:
+ // Some internal types to shorten notation
+ typedef typename Base::Coaffine_orientation_d Coaffine_orientation_d;
+ using Base::flat_orientation_;
+ typedef Conflict_predicate<Coaffine_orientation_d, Side_of_oriented_subsphere_d>
+ Conflict_pred_in_subspace;
+ typedef Conflict_predicate<Orientation_d, Side_of_oriented_sphere_d>
+ Conflict_pred_in_fullspace;
+ typedef Conflict_traversal_predicate<Conflict_pred_in_subspace>
+ Conflict_traversal_pred_in_subspace;
+ typedef Conflict_traversal_predicate<Conflict_pred_in_fullspace>
+ Conflict_traversal_pred_in_fullspace;
+};
+
+// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
+// FUNCTIONS THAT ARE MEMBER METHODS:
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS
+
+template< typename DCTraits, typename TDS >
+typename Delaunay_triangulation<DCTraits, TDS>::Full_cell_handle
+Delaunay_triangulation<DCTraits, TDS>
+::remove( Vertex_handle v )
+{
+ CGAL_precondition( ! is_infinite(v) );
+ CGAL_expensive_precondition( is_vertex(v) );
+
+ // THE CASE cur_dim == 0
+ if( 0 == current_dimension() )
+ {
+ remove_decrease_dimension(v);
+ return Full_cell_handle();
+ }
+ else if( 1 == current_dimension() )
+ { // THE CASE cur_dim == 1
+ if( 2 == number_of_vertices() )
+ {
+ remove_decrease_dimension(v);
+ return Full_cell_handle();
+ }
+ Full_cell_handle left = v->full_cell();
+ if( 0 == left->index(v) )
+ left = left->neighbor(1);
+ CGAL_assertion( 1 == left->index(v) );
+ Full_cell_handle right = left->neighbor(0);
+
+ tds().associate_vertex_with_full_cell(left, 1, right->vertex(1));
+ set_neighbors(left, 0, right->neighbor(0), right->mirror_index(0));
+
+ tds().delete_vertex(v);
+ tds().delete_full_cell(right);
+ return left;
+ }
+
+ // THE CASE cur_dim >= 2
+ // Gather the finite vertices sharing an edge with |v|
+ typedef typename Base::template Full_cell_set<Full_cell_handle> Simplices;
+ Simplices simps;
+ std::back_insert_iterator<Simplices> out(simps);
+ tds().incident_full_cells(v, out);
+ typedef std::set<Vertex_handle> Vertex_set;
+ Vertex_set verts;
+ Vertex_handle vh;
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ for( int i = 0; i <= current_dimension(); ++i )
+ {
+ vh = (*it)->vertex(i);
+ if( is_infinite(vh) )
+ continue;
+ if( vh == v )
+ continue;
+ verts.insert(vh);
+ }
+
+ // After gathering finite neighboring vertices, create their Dark Delaunay triangulation
+ typedef Triangulation_vertex<Geom_traits, Vertex_handle> Dark_vertex_base;
+ typedef Triangulation_full_cell<Geom_traits,
+ internal::Triangulation::Dark_full_cell_data<Self> > Dark_full_cell_base;
+ typedef Triangulation_data_structure<Maximal_dimension, Dark_vertex_base, Dark_full_cell_base> Dark_tds;
+ typedef Delaunay_triangulation<DCTraits, Dark_tds> Dark_triangulation;
+ typedef typename Dark_triangulation::Face Dark_face;
+ typedef typename Dark_triangulation::Facet Dark_facet;
+ typedef typename Dark_triangulation::Vertex_handle Dark_v_handle;
+ typedef typename Dark_triangulation::Full_cell_handle Dark_s_handle;
+
+ // If flat_orientation_ is defined, we give it the Dark triangulation
+ // so that the orientation it uses for "current_dimension()"-simplices is
+ // coherent with the global triangulation
+ Dark_triangulation dark_side(
+ maximal_dimension(),
+ flat_orientation_ ?
+ std::pair<int, const Flat_orientation_d *>(current_dimension(), flat_orientation_.get_ptr())
+ : std::pair<int, const Flat_orientation_d *>((std::numeric_limits<int>::max)(), (Flat_orientation_d*) NULL) );
+
+ Dark_s_handle dark_s;
+ Dark_v_handle dark_v;
+ typedef std::map<Vertex_handle, Dark_v_handle> Vertex_map;
+ Vertex_map light_to_dark;
+ typename Vertex_set::iterator vit = verts.begin();
+ while( vit != verts.end() )
+ {
+ dark_v = dark_side.insert((*vit)->point(), dark_s);
+ dark_s = dark_v->full_cell();
+ dark_v->data() = *vit;
+ light_to_dark[*vit] = dark_v;
+ ++vit;
+ }
+
+ if( dark_side.current_dimension() != current_dimension() )
+ {
+ CGAL_assertion( dark_side.current_dimension() + 1 == current_dimension() );
+ // Here, the finite neighbors of |v| span a affine subspace of
+ // dimension one less than the current dimension. Two cases are possible:
+ if( (size_type)(verts.size() + 1) == number_of_vertices() )
+ {
+ remove_decrease_dimension(v);
+ return Full_cell_handle();
+ }
+ else
+ { // |v| is strictly outside the convex hull of the rest of the points. This is an
+ // easy case: first, modify the finite full_cells, then, delete the infinite ones.
+ // We don't even need the Dark triangulation.
+ Simplices infinite_simps;
+ {
+ Simplices finite_simps;
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ if( is_infinite(*it) )
+ infinite_simps.push_back(*it);
+ else
+ finite_simps.push_back(*it);
+ simps.swap(finite_simps);
+ } // now, simps only contains finite simplices
+ // First, modify the finite full_cells:
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ {
+ int v_idx = (*it)->index(v);
+ tds().associate_vertex_with_full_cell(*it, v_idx, infinite_vertex());
+ }
+ // Make the handles to infinite full cells searchable
+ infinite_simps.make_searchable();
+ // Then, modify the neighboring relation
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ {
+ for( int i = 0; i <= current_dimension(); ++i )
+ {
+ if (is_infinite((*it)->vertex(i)))
+ continue;
+ (*it)->vertex(i)->set_full_cell(*it);
+ Full_cell_handle n = (*it)->neighbor(i);
+ // Was |n| a finite full cell prior to removing |v| ?
+ if( ! infinite_simps.contains(n) )
+ continue;
+ int n_idx = n->index(v);
+ set_neighbors(*it, i, n->neighbor(n_idx), n->neighbor(n_idx)->index(n));
+ }
+ }
+ Full_cell_handle ret_s;
+ // Then, we delete the infinite full_cells
+ for( typename Simplices::iterator it = infinite_simps.begin(); it != infinite_simps.end(); ++it )
+ tds().delete_full_cell(*it);
+ tds().delete_vertex(v);
+ return simps.front();
+ }
+ }
+ else // From here on, dark_side.current_dimension() == current_dimension()
+ {
+ dark_side.infinite_vertex()->data() = infinite_vertex();
+ light_to_dark[infinite_vertex()] = dark_side.infinite_vertex();
+ }
+
+ // Now, compute the conflict zone of v->point() in
+ // the dark side. This is precisely the set of full_cells
+ // that we have to glue back into the light side.
+ Dark_face dark_f(dark_side.maximal_dimension());
+ Dark_facet dark_ft;
+ typename Dark_triangulation::Locate_type lt;
+ dark_s = dark_side.locate(v->point(), lt, dark_f, dark_ft);
+ CGAL_assertion( lt != Dark_triangulation::ON_VERTEX
+ && lt != Dark_triangulation::OUTSIDE_AFFINE_HULL );
+
+ // |ret_s| is the full_cell that we return
+ Dark_s_handle dark_ret_s = dark_s;
+ Full_cell_handle ret_s;
+
+ typedef typename Base::template Full_cell_set<Dark_s_handle> Dark_full_cells;
+ Dark_full_cells conflict_zone;
+ std::back_insert_iterator<Dark_full_cells> dark_out(conflict_zone);
+
+ dark_ft = dark_side.compute_conflict_zone(v->point(), dark_s, dark_out);
+ // Make the dark simplices in the conflict zone searchable
+ conflict_zone.make_searchable();
+
+ // THE FOLLOWING SHOULD MAYBE GO IN TDS.
+ // Here is the plan:
+ // 1. Pick any Facet from boundary of the light zone
+ // 2. Find corresponding Facet on boundary of dark zone
+ // 3. stitch.
+
+ // 1. Build a facet on the boudary of the light zone:
+ Full_cell_handle light_s = *simps.begin();
+ Facet light_ft(light_s, light_s->index(v));
+
+ // 2. Find corresponding Dark_facet on boundary of the dark zone
+ Dark_full_cells dark_incident_s;
+ for( int i = 0; i <= current_dimension(); ++i )
+ {
+ if( index_of_covertex(light_ft) == i )
+ continue;
+ Dark_v_handle dark_v = light_to_dark[full_cell(light_ft)->vertex(i)];
+ dark_incident_s.clear();
+ dark_out = std::back_inserter(dark_incident_s);
+ dark_side.tds().incident_full_cells(dark_v, dark_out);
+ for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it )
+ {
+ (*it)->data().count_ += 1;
+ }
+ }
+
+ for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it )
+ {
+ if( current_dimension() != (*it)->data().count_ )
+ continue;
+ if( ! conflict_zone.contains(*it) )
+ continue;
+ // We found a full_cell incident to the dark facet corresponding to the light facet |light_ft|
+ int ft_idx = 0;
+ while( light_s->has_vertex( (*it)->vertex(ft_idx)->data() ) )
+ ++ft_idx;
+ dark_ft = Dark_facet(*it, ft_idx);
+ break;
+ }
+ // Pre-3. Now, we are ready to traverse both boundary and do the stiching.
+
+ // But first, we create the new full_cells in the light triangulation,
+ // with as much adjacency information as possible.
+
+ // Create new full_cells with vertices
+ for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it )
+ {
+ Full_cell_handle new_s = new_full_cell();
+ (*it)->data().light_copy_ = new_s;
+ for( int i = 0; i <= current_dimension(); ++i )
+ tds().associate_vertex_with_full_cell(new_s, i, (*it)->vertex(i)->data());
+ if( dark_ret_s == *it )
+ ret_s = new_s;
+ }
+
+ // Setup adjacencies inside the hole
+ for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it )
+ {
+ Full_cell_handle new_s = (*it)->data().light_copy_;
+ for( int i = 0; i <= current_dimension(); ++i )
+ if( conflict_zone.contains((*it)->neighbor(i)) )
+ tds().set_neighbors(new_s, i, (*it)->neighbor(i)->data().light_copy_, (*it)->mirror_index(i));
+ }
+
+ // 3. Stitch
+ simps.make_searchable();
+ typedef std::queue<std::pair<Facet, Dark_facet> > Queue;
+ Queue q;
+ q.push(std::make_pair(light_ft, dark_ft));
+ dark_s = dark_side.full_cell(dark_ft);
+ int dark_i = dark_side.index_of_covertex(dark_ft);
+ // mark dark_ft as visited:
+ // TODO try by marking with Dark_v_handle (vertex)
+ dark_s->neighbor(dark_i)->set_neighbor(dark_s->mirror_index(dark_i), Dark_s_handle());
+ while( ! q.empty() )
+ {
+ std::pair<Facet, Dark_facet> p = q.front();
+ q.pop();
+ light_ft = p.first;
+ dark_ft = p.second;
+ light_s = full_cell(light_ft);
+ int light_i = index_of_covertex(light_ft);
+ dark_s = dark_side.full_cell(dark_ft);
+ int dark_i = dark_side.index_of_covertex(dark_ft);
+ Full_cell_handle light_n = light_s->neighbor(light_i);
+ set_neighbors(dark_s->data().light_copy_, dark_i, light_n, light_s->mirror_index(light_i));
+ for( int di = 0; di <= current_dimension(); ++di )
+ {
+ if( di == dark_i )
+ continue;
+ int li = light_s->index(dark_s->vertex(di)->data());
+ Rotor light_r(light_s, li, light_i);
+ typename Dark_triangulation::Rotor dark_r(dark_s, di, dark_i);
+
+ while (simps.contains(cpp11::get<0>(light_r)->neighbor(cpp11::get<1>(light_r))))
+ light_r = rotate_rotor(light_r);
+
+ while (conflict_zone.contains(cpp11::get<0>(dark_r)->neighbor(cpp11::get<1>(dark_r))))
+ dark_r = dark_side.rotate_rotor(dark_r);
+
+ Dark_s_handle dark_ns = cpp11::get<0>(dark_r);
+ int dark_ni = cpp11::get<1>(dark_r);
+ Full_cell_handle light_ns = cpp11::get<0>(light_r);
+ int light_ni = cpp11::get<1>(light_r);
+ // mark dark_r as visited:
+ // TODO try by marking with Dark_v_handle (vertex)
+ Dark_s_handle outside = dark_ns->neighbor(dark_ni);
+ Dark_v_handle mirror = dark_ns->mirror_vertex(dark_ni, current_dimension());
+ int dn = outside->index(mirror);
+ if( Dark_s_handle() == outside->neighbor(dn) )
+ continue;
+ outside->set_neighbor(dn, Dark_s_handle());
+ q.push(std::make_pair(Facet(light_ns, light_ni), Dark_facet(dark_ns, dark_ni)));
+ }
+ }
+ tds().delete_full_cells(simps.begin(), simps.end());
+ tds().delete_vertex(v);
+ return ret_s;
+}
+
+template< typename DCTraits, typename TDS >
+void
+Delaunay_triangulation<DCTraits, TDS>
+::remove_decrease_dimension(Vertex_handle v)
+{
+ CGAL_precondition( current_dimension() >= 0 );
+ tds().remove_decrease_dimension(v, infinite_vertex());
+ // reset the predicates:
+ reset_flat_orientation();
+ if( 1 <= current_dimension() )
+ {
+ Full_cell_handle inf_v_cell = infinite_vertex()->full_cell();
+ int inf_v_index = inf_v_cell->index(infinite_vertex());
+ Full_cell_handle s = inf_v_cell->neighbor(inf_v_index);
+ Orientation o = orientation(s);
+ CGAL_assertion( ZERO != o );
+ if( NEGATIVE == o )
+ reorient_full_cells();
+ }
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS
+
+template< typename DCTraits, typename TDS >
+typename Delaunay_triangulation<DCTraits, TDS>::Vertex_handle
+Delaunay_triangulation<DCTraits, TDS>
+::insert(const Point & p, Locate_type lt, const Face & f, const Facet &, Full_cell_handle s)
+{
+ switch( lt )
+ {
+ case Base::OUTSIDE_AFFINE_HULL:
+ return insert_outside_affine_hull(p);
+ break;
+ case Base::ON_VERTEX:
+ {
+ Vertex_handle v = s->vertex(f.index(0));
+ v->set_point(p);
+ return v;
+ break;
+ }
+ default:
+ if( 1 == current_dimension() )
+ {
+ if( Base::OUTSIDE_CONVEX_HULL == lt )
+ {
+ return insert_outside_convex_hull_1(p, s);
+ }
+ Vertex_handle v = tds().insert_in_full_cell(s);
+ v->set_point(p);
+ return v;
+ }
+ else
+ return insert_in_conflicting_cell(p, s);
+ break;
+ }
+}
+
+/*
+[Undocumented function]
+
+Inserts the point `p` in the Delaunay triangulation. Returns a handle to the
+(possibly newly created) vertex at that position.
+\pre The point `p`
+must lie outside the affine hull of the Delaunay triangulation. This implies that
+`dt`.`current_dimension()` must be less than `dt`.`maximal_dimension()`.
+*/
+template< typename DCTraits, typename TDS >
+typename Delaunay_triangulation<DCTraits, TDS>::Vertex_handle
+Delaunay_triangulation<DCTraits, TDS>
+::insert_outside_affine_hull(const Point & p)
+{
+ // we don't use Base::insert_outside_affine_hull(...) because here, we
+ // also need to reset the side_of_oriented_subsphere functor.
+ CGAL_precondition( current_dimension() < maximal_dimension() );
+ Vertex_handle v = tds().insert_increase_dimension(infinite_vertex());
+ // reset the predicates:
+ reset_flat_orientation();
+ v->set_point(p);
+ if( current_dimension() >= 1 )
+ {
+ Full_cell_handle inf_v_cell = infinite_vertex()->full_cell();
+ int inf_v_index = inf_v_cell->index(infinite_vertex());
+ Full_cell_handle s = inf_v_cell->neighbor(inf_v_index);
+ Orientation o = orientation(s);
+ CGAL_assertion( ZERO != o );
+ if( NEGATIVE == o )
+ reorient_full_cells();
+
+ // We just inserted the second finite point and the right infinite
+ // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be
+ // consistent with the rest of the cells
+ if (current_dimension() == 1)
+ {
+ // Is "inf_v_cell" the right infinite cell?
+ // Then inf_v_index should be 1
+ if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0
+ && inf_v_index == 0)
+ {
+ inf_v_cell->swap_vertices(
+ current_dimension() - 1, current_dimension());
+ }
+ // Otherwise, let's find the right infinite cell
+ else
+ {
+ inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2);
+ inf_v_index = inf_v_cell->index(infinite_vertex());
+ // Is "inf_v_cell" the right infinite cell?
+ // Then inf_v_index should be 1
+ if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0
+ && inf_v_index == 0)
+ {
+ inf_v_cell->swap_vertices(
+ current_dimension() - 1, current_dimension());
+ }
+ }
+ }
+ }
+ return v;
+}
+
+/*!
+[Undocumented function]
+
+Inserts the point `p` in the Delaunay triangulation. Returns a handle to the
+(possibly newly created) vertex at that position.
+\pre The point `p` must be in conflict with the full cell `c`.
+*/
+template< typename DCTraits, typename TDS >
+typename Delaunay_triangulation<DCTraits, TDS>::Vertex_handle
+Delaunay_triangulation<DCTraits, TDS>
+::insert_in_conflicting_cell(const Point & p, Full_cell_handle s)
+{
+ CGAL_precondition(is_in_conflict(p, s));
+
+ // for storing conflicting full_cells.
+ typedef std::vector<Full_cell_handle> Full_cell_h_vector;
+ CGAL_STATIC_THREAD_LOCAL_VARIABLE(Full_cell_h_vector,cs,0);
+ cs.clear();
+
+ std::back_insert_iterator<Full_cell_h_vector> out(cs);
+ Facet ft = compute_conflict_zone(p, s, out);
+ return insert_in_hole(p, cs.begin(), cs.end(), ft);
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES
+
+// NOT DOCUMENTED
+template< typename DCTraits, typename TDS >
+template< typename OrientationPred >
+Oriented_side
+Delaunay_triangulation<DCTraits, TDS>
+::perturbed_side_of_positive_sphere(const Point & p, Full_cell_const_handle s,
+ const OrientationPred & ori) const
+{
+ CGAL_precondition_msg( ! is_infinite(s), "full cell must be finite");
+ CGAL_expensive_precondition( POSITIVE == orientation(s) );
+ typedef std::vector<const Point *> Points;
+ Points points(current_dimension() + 2);
+ int i(0);
+ for( ; i <= current_dimension(); ++i )
+ points[i] = &(s->vertex(i)->point());
+ points[i] = &p;
+ std::sort(points.begin(), points.end(),
+ internal::Triangulation::Compare_points_for_perturbation<Self>(*this));
+ typename Points::const_reverse_iterator cut_pt = points.rbegin();
+ Points test_points;
+ while( cut_pt != points.rend() )
+ {
+ if( &p == *cut_pt )
+ // because the full_cell "s" is assumed to be positively oriented
+ return ON_NEGATIVE_SIDE; // we consider |p| to lie outside the sphere
+ test_points.clear();
+ typename Base::Point_const_iterator spit = points_begin(s);
+ int adjust_sign = -1;
+ for( i = 0; i < current_dimension(); ++i )
+ {
+ if( &(*spit) == *cut_pt )
+ {
+ ++spit;
+ adjust_sign = (((current_dimension() + i) % 2) == 0) ? -1 : +1;
+ }
+ test_points.push_back(&(*spit));
+ ++spit;
+ }
+ test_points.push_back(&p);
+
+ typedef typename CGAL::Iterator_project<typename Points::iterator,
+ internal::Triangulation::Point_from_pointer<Self>,
+ const Point &, const Point *> Point_pointer_iterator;
+
+ Orientation ori_value = ori(
+ Point_pointer_iterator(test_points.begin()),
+ Point_pointer_iterator(test_points.end()));
+
+ if( ZERO != ori_value )
+ return Oriented_side( - adjust_sign * ori_value );
+
+ ++cut_pt;
+ }
+ CGAL_assertion(false); // we should never reach here
+ return ON_NEGATIVE_SIDE;
+}
+
+template< typename DCTraits, typename TDS >
+bool
+Delaunay_triangulation<DCTraits, TDS>
+::is_in_conflict(const Point & p, Full_cell_const_handle s) const
+{
+ CGAL_precondition( 2 <= current_dimension() );
+ if( current_dimension() < maximal_dimension() )
+ {
+ Conflict_pred_in_subspace c(*this, p, coaffine_orientation_predicate(), side_of_oriented_subsphere_predicate());
+ return c(s);
+ }
+ else
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ Side_of_oriented_sphere_d side = geom_traits().side_of_oriented_sphere_d_object();
+ Conflict_pred_in_fullspace c(*this, p, ori, side);
+ return c(s);
+ }
+}
+
+template< typename DCTraits, typename TDS >
+template< typename OutputIterator >
+typename Delaunay_triangulation<DCTraits, TDS>::Facet
+Delaunay_triangulation<DCTraits, TDS>
+::compute_conflict_zone(const Point & p, Full_cell_handle s, OutputIterator out) const
+{
+ CGAL_precondition( 2 <= current_dimension() );
+ if( current_dimension() < maximal_dimension() )
+ {
+ Conflict_pred_in_subspace c(*this, p, coaffine_orientation_predicate(), side_of_oriented_subsphere_predicate());
+ Conflict_traversal_pred_in_subspace tp(*this, c);
+ return tds().gather_full_cells(s, tp, out);
+ }
+ else
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ Side_of_oriented_sphere_d side = geom_traits().side_of_oriented_sphere_d_object();
+ Conflict_pred_in_fullspace c(*this, p, ori, side);
+ Conflict_traversal_pred_in_fullspace tp(*this, c);
+ return tds().gather_full_cells(s, tp, out);
+ }
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY
+
+template< typename DCTraits, typename TDS >
+bool
+Delaunay_triangulation<DCTraits, TDS>
+::is_valid(bool verbose, int level) const
+{
+ if (!Base::is_valid(verbose, level))
+ return false;
+
+ int dim = current_dimension();
+ if (dim == maximal_dimension())
+ {
+ for (Finite_full_cell_const_iterator cit = this->finite_full_cells_begin() ;
+ cit != this->finite_full_cells_end() ; ++cit )
+ {
+ Full_cell_const_handle ch = cit.base();
+ for(int i = 0; i < dim+1 ; ++i )
+ {
+ // If the i-th neighbor is not an infinite cell
+ Vertex_handle opposite_vh =
+ ch->neighbor(i)->vertex(ch->neighbor(i)->index(ch));
+ if (!is_infinite(opposite_vh))
+ {
+ Side_of_oriented_sphere_d side =
+ geom_traits().side_of_oriented_sphere_d_object();
+ if (side(Point_const_iterator(ch->vertices_begin()),
+ Point_const_iterator(ch->vertices_end()),
+ opposite_vh->point()) == ON_BOUNDED_SIDE)
+ {
+ if (verbose)
+ CGAL_warning_msg(false, "Non-empty sphere");
+ return false;
+ }
+ }
+ }
+ }
+ }
+ return true;
+}
+
+
+} //namespace CGAL
+
+#endif // CGAL_DELAUNAY_COMPLEX_H
diff --git a/src/common/include/gudhi_patches/CGAL/Epeck_d.h b/src/common/include/gudhi_patches/CGAL/Epeck_d.h
new file mode 100644
index 00000000..52bce84c
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Epeck_d.h
@@ -0,0 +1,53 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_EPECK_D_H
+#define CGAL_EPECK_D_H
+#include <CGAL/NewKernel_d/Cartesian_base.h>
+#include <CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h>
+#include <CGAL/NewKernel_d/Kernel_d_interface.h>
+#include <CGAL/internal/Exact_type_selector.h>
+
+
+namespace CGAL {
+#define CGAL_BASE \
+ Cartesian_base_d<internal::Exact_field_selector<double>::Type, Dim>
+template<class Dim>
+struct Epeck_d_help1
+: CGAL_BASE
+{
+ CGAL_CONSTEXPR Epeck_d_help1(){}
+ CGAL_CONSTEXPR Epeck_d_help1(int d):CGAL_BASE(d){}
+};
+#undef CGAL_BASE
+#define CGAL_BASE \
+ Kernel_d_interface< \
+ Cartesian_wrap< \
+ Epeck_d_help1<Dim>, \
+ Epeck_d<Dim> > >
+template<class Dim>
+struct Epeck_d
+: CGAL_BASE
+{
+ CGAL_CONSTEXPR Epeck_d(){}
+ CGAL_CONSTEXPR Epeck_d(int d):CGAL_BASE(d){}
+};
+#undef CGAL_BASE
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/Epick_d.h b/src/common/include/gudhi_patches/CGAL/Epick_d.h
new file mode 100644
index 00000000..64438539
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Epick_d.h
@@ -0,0 +1,71 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_EPICK_D_H
+#define CGAL_EPICK_D_H
+#include <CGAL/NewKernel_d/Cartesian_base.h>
+#include <CGAL/NewKernel_d/Cartesian_static_filters.h>
+#include <CGAL/NewKernel_d/Cartesian_filter_K.h>
+#include <CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h>
+#include <CGAL/NewKernel_d/Kernel_d_interface.h>
+#include <CGAL/internal/Exact_type_selector.h>
+#include <CGAL/Interval_nt.h>
+#include <CGAL/NewKernel_d/Types/Weighted_point.h>
+
+
+namespace CGAL {
+#define CGAL_BASE \
+ Cartesian_filter_K< \
+ Cartesian_base_d<double, Dim>, \
+ Cartesian_base_d<Interval_nt_advanced, Dim>, \
+ Cartesian_base_d<internal::Exact_field_selector<double>::Type, Dim> \
+ >
+template<class Dim>
+struct Epick_d_help1
+: CGAL_BASE
+{
+ CGAL_CONSTEXPR Epick_d_help1(){}
+ CGAL_CONSTEXPR Epick_d_help1(int d):CGAL_BASE(d){}
+};
+#undef CGAL_BASE
+#define CGAL_BASE \
+ Cartesian_static_filters<Dim,Epick_d_help1<Dim>,Epick_d_help2<Dim> >
+template<class Dim>
+struct Epick_d_help2
+: CGAL_BASE
+{
+ CGAL_CONSTEXPR Epick_d_help2(){}
+ CGAL_CONSTEXPR Epick_d_help2(int d):CGAL_BASE(d){}
+};
+#undef CGAL_BASE
+#define CGAL_BASE \
+ Kernel_d_interface< \
+ Cartesian_wrap< \
+ Epick_d_help2<Dim>, \
+ Epick_d<Dim> > >
+template<class Dim>
+struct Epick_d
+: CGAL_BASE
+{
+ CGAL_CONSTEXPR Epick_d(){}
+ CGAL_CONSTEXPR Epick_d(int d):CGAL_BASE(d){}
+};
+#undef CGAL_BASE
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h b/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h
new file mode 100644
index 00000000..701f0820
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h
@@ -0,0 +1,320 @@
+// Copyright (c) 2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL: $
+// $Id: $
+//
+// Author(s) : Clement Jamin
+
+
+#ifndef CGAL_TRIANGULATION_IO_H
+#define CGAL_TRIANGULATION_IO_H
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Triangulation.h>
+#include <sstream>
+#include <iostream>
+
+namespace CGAL {
+
+namespace Triangulation_IO
+{
+// TODO: test if the stream is binary or text?
+template<typename Traits, typename P>
+int
+output_point(std::ostream & os, const Traits &traits, const P & p)
+{
+ typedef typename Traits::Compute_coordinate_d Ccd;
+ const Ccd ccd = traits.compute_coordinate_d_object();
+ const int dim = traits.point_dimension_d_object()(p);
+ if (dim > 0)
+ {
+ os << ccd(p, 0);
+ for (int i = 1 ; i < dim ; ++i)
+ os << " " << CGAL::to_double(ccd(p, i));
+ }
+ return dim;
+}
+
+// TODO: test if the stream is binary or text?
+template<typename Traits, typename P>
+int
+output_weighted_point(std::ostream & os, const Traits &traits, const P & p,
+ bool output_weight = true)
+{
+ typedef typename Traits::Compute_coordinate_d Ccd;
+ typename Traits::Construct_point_d cp =
+ traits.construct_point_d_object();
+ typename Traits::Compute_weight_d pt_weight = traits.compute_weight_d_object();
+ const Ccd ccd = traits.compute_coordinate_d_object();
+ const int dim = traits.point_dimension_d_object()(p);
+ if (dim > 0)
+ {
+ output_point(os, traits, p);
+ if (output_weight)
+ os << " " << pt_weight(p);
+ }
+ return dim;
+}
+
+// TODO: test if the stream is binary or text?
+template<typename Traits, typename FCH>
+void
+output_full_cell(std::ostream & os, const Traits &traits, const FCH & fch,
+ bool output_weights = false)
+{
+ typename FCH::value_type::Vertex_handle_iterator vit = fch->vertices_begin();
+ for( ; vit != fch->vertices_end(); ++vit )
+ {
+ int dim;
+ if (output_weights)
+ dim = output_weighted_point(os, traits, (*vit)->point());
+ else
+ dim = output_point(os, traits, (*vit)->point());
+ if (dim > 0)
+ os << std::endl;
+ }
+}
+
+// TODO: test if the stream is binary or text?
+/*template<typename Traits, typename P>
+void
+input_point(std::istream & is, const Traits &traits, P & p)
+{
+ typedef typename Traits::FT FT;
+ std::vector<FT> coords;
+
+ std::string line;
+ for(;;)
+ {
+ if (!std::getline(is, line))
+ return is;
+ if (line != "")
+ break;
+ }
+ std::stringstream line_sstr(line);
+ FT temp;
+ while (line_sstr >> temp)
+ coords.push_back(temp);
+
+ p = traits.construct_point_d_object()(coords.begin(), coords.end());
+}*/
+
+} // namespace Triangulation_IO
+
+///////////////////////////////////////////////////////////////
+// TODO: replace these operator>> by an "input_point" function
+///////////////////////////////////////////////////////////////
+
+// TODO: test if the stream is binary or text?
+template<typename K>
+std::istream &
+operator>>(std::istream &is, typename Wrap::Point_d<K> & p)
+{
+ typedef typename Wrap::Point_d<K> P;
+ typedef typename K::FT FT;
+ std::vector<FT> coords;
+
+ std::string line;
+ for(;;)
+ {
+ if (!std::getline(is, line))
+ return is;
+ if (line != "")
+ break;
+ }
+ std::stringstream line_sstr(line);
+ FT temp;
+ while (line_sstr >> temp)
+ coords.push_back(temp);
+
+ p = P(coords.begin(), coords.end());
+ return is;
+}
+
+// TODO: test if the stream is binary or text?
+template<typename K>
+std::istream &
+operator>>(std::istream &is, typename Wrap::Weighted_point_d<K> & wp)
+{
+ typedef typename Wrap::Point_d<K> P;
+ typedef typename Wrap::Weighted_point_d<K> WP;
+ typedef typename K::FT FT;
+
+ std::string line;
+ for(;;)
+ {
+ if (!std::getline(is, line))
+ return is;
+ if (line != "")
+ break;
+ }
+ std::stringstream line_sstr(line);
+ FT temp;
+ std::vector<FT> coords;
+ while (line_sstr >> temp)
+ coords.push_back(temp);
+
+ typename std::vector<FT>::iterator last = coords.end() - 1;
+ P p = P(coords.begin(), last);
+ wp = WP(p, *last);
+
+ return is;
+}
+
+// TODO: test if the stream is binary or text?
+template<typename K>
+std::istream &
+operator>>(std::istream &is, typename Wrap::Vector_d<K> & v)
+{
+ typedef typename Wrap::Vector_d<K> V;
+ typedef typename K::FT FT;
+ std::vector<FT> coords;
+
+ std::string line;
+ for (;;)
+ {
+ if (!std::getline(is, line))
+ return is;
+ if (line != "")
+ break;
+ }
+ std::stringstream line_sstr(line);
+ FT temp;
+ while (line_sstr >> temp)
+ coords.push_back(temp);
+
+ v = V(coords.begin(), coords.end());
+ return is;
+}
+
+template < class GT, class TDS >
+std::ostream &
+export_triangulation_to_off(std::ostream & os,
+ const Triangulation<GT,TDS> & tr,
+ bool in_3D_export_surface_only = false)
+{
+ typedef Triangulation<GT,TDS> Tr;
+ typedef typename Tr::Vertex_const_handle Vertex_handle;
+ typedef typename Tr::Finite_vertex_const_iterator Finite_vertex_iterator;
+ typedef typename Tr::Finite_full_cell_const_iterator Finite_full_cell_iterator;
+ typedef typename Tr::Full_cell_const_iterator Full_cell_iterator;
+ typedef typename Tr::Full_cell Full_cell;
+ typedef typename Full_cell::Vertex_handle_const_iterator Full_cell_vertex_iterator;
+
+ if (tr.maximal_dimension() < 2 || tr.maximal_dimension() > 3)
+ {
+ std::cerr << "Warning: export_tds_to_off => dimension should be 2 or 3.";
+ os << "Warning: export_tds_to_off => dimension should be 2 or 3.";
+ return os;
+ }
+
+ size_t n = tr.number_of_vertices();
+
+ std::stringstream output;
+
+ // write the vertices
+ std::map<Vertex_handle, int> index_of_vertex;
+ int i = 0;
+ for(Finite_vertex_iterator it = tr.finite_vertices_begin();
+ it != tr.finite_vertices_end(); ++it, ++i)
+ {
+ Triangulation_IO::output_point(output, tr.geom_traits(), it->point());
+ if (tr.maximal_dimension() == 2)
+ output << " 0";
+ output << std::endl;
+ index_of_vertex[it.base()] = i;
+ }
+ CGAL_assertion( i == n );
+
+ size_t number_of_triangles = 0;
+ if (tr.maximal_dimension() == 2)
+ {
+ for (Finite_full_cell_iterator fch = tr.finite_full_cells_begin() ;
+ fch != tr.finite_full_cells_end() ; ++fch)
+ {
+ output << "3 ";
+ for (Full_cell_vertex_iterator vit = fch->vertices_begin() ;
+ vit != fch->vertices_end() ; ++vit)
+ {
+ output << index_of_vertex[*vit] << " ";
+ }
+ output << std::endl;
+ ++number_of_triangles;
+ }
+ }
+ else if (tr.maximal_dimension() == 3)
+ {
+ if (in_3D_export_surface_only)
+ {
+ // Parse boundary facets
+ for (Full_cell_iterator fch = tr.full_cells_begin() ;
+ fch != tr.full_cells_end() ; ++fch)
+ {
+ if (tr.is_infinite(fch))
+ {
+ output << "3 ";
+ for (Full_cell_vertex_iterator vit = fch->vertices_begin() ;
+ vit != fch->vertices_end() ; ++vit)
+ {
+ if (!tr.is_infinite(*vit))
+ output << index_of_vertex[*vit] << " ";
+ }
+ output << std::endl;
+ ++number_of_triangles;
+ }
+ }
+ }
+ else
+ {
+ // Parse finite cells
+ for (Finite_full_cell_iterator fch = tr.finite_full_cells_begin() ;
+ fch != tr.finite_full_cells_end() ; ++fch)
+ {
+ output << "3 "
+ << index_of_vertex[fch->vertex(0)] << " "
+ << index_of_vertex[fch->vertex(1)] << " "
+ << index_of_vertex[fch->vertex(2)]
+ << std::endl;
+ output << "3 "
+ << index_of_vertex[fch->vertex(0)] << " "
+ << index_of_vertex[fch->vertex(2)] << " "
+ << index_of_vertex[fch->vertex(3)]
+ << std::endl;
+ output << "3 "
+ << index_of_vertex[fch->vertex(1)] << " "
+ << index_of_vertex[fch->vertex(2)] << " "
+ << index_of_vertex[fch->vertex(3)]
+ << std::endl;
+ output << "3 "
+ << index_of_vertex[fch->vertex(0)] << " "
+ << index_of_vertex[fch->vertex(1)] << " "
+ << index_of_vertex[fch->vertex(3)]
+ << std::endl;
+ number_of_triangles += 4;
+ }
+ }
+ }
+
+ os << "OFF \n"
+ << n << " "
+ << number_of_triangles << " 0\n"
+ << output.str();
+
+ return os;
+}
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_IO_H
diff --git a/src/common/include/gudhi_patches/CGAL/Kd_tree.h b/src/common/include/gudhi_patches/CGAL/Kd_tree.h
new file mode 100644
index 00000000..f085b0da
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Kd_tree.h
@@ -0,0 +1,582 @@
+// Copyright (c) 2002,2011,2014 Utrecht University (The Netherlands), Max-Planck-Institute Saarbruecken (Germany).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Hans Tangelder (<hanst@cs.uu.nl>),
+// : Waqar Khan <wkhan@mpi-inf.mpg.de>
+
+#ifndef CGAL_KD_TREE_H
+#define CGAL_KD_TREE_H
+
+#include "Kd_tree_node.h"
+
+#include <CGAL/basic.h>
+#include <CGAL/assertions.h>
+#include <vector>
+
+#include <CGAL/algorithm.h>
+#include <CGAL/internal/Get_dimension_tag.h>
+#include <CGAL/Search_traits.h>
+
+
+#include <deque>
+#include <boost/container/deque.hpp>
+#include <boost/optional.hpp>
+
+#ifdef CGAL_HAS_THREADS
+#include <CGAL/mutex.h>
+#endif
+
+namespace CGAL {
+
+//template <class SearchTraits, class Splitter_=Median_of_rectangle<SearchTraits>, class UseExtendedNode = Tag_true >
+template <class SearchTraits, class Splitter_=Sliding_midpoint<SearchTraits>, class UseExtendedNode = Tag_true >
+class Kd_tree {
+
+public:
+ typedef SearchTraits Traits;
+ typedef Splitter_ Splitter;
+ typedef typename SearchTraits::Point_d Point_d;
+ typedef typename Splitter::Container Point_container;
+
+ typedef typename SearchTraits::FT FT;
+ typedef Kd_tree_node<SearchTraits, Splitter, UseExtendedNode > Node;
+ typedef Kd_tree_leaf_node<SearchTraits, Splitter, UseExtendedNode > Leaf_node;
+ typedef Kd_tree_internal_node<SearchTraits, Splitter, UseExtendedNode > Internal_node;
+ typedef Kd_tree<SearchTraits, Splitter> Tree;
+ typedef Kd_tree<SearchTraits, Splitter,UseExtendedNode> Self;
+
+ typedef Node* Node_handle;
+ typedef const Node* Node_const_handle;
+ typedef Leaf_node* Leaf_node_handle;
+ typedef const Leaf_node* Leaf_node_const_handle;
+ typedef Internal_node* Internal_node_handle;
+ typedef const Internal_node* Internal_node_const_handle;
+ typedef typename std::vector<const Point_d*>::const_iterator Point_d_iterator;
+ typedef typename std::vector<const Point_d*>::const_iterator Point_d_const_iterator;
+ typedef typename Splitter::Separator Separator;
+ typedef typename std::vector<Point_d>::const_iterator iterator;
+ typedef typename std::vector<Point_d>::const_iterator const_iterator;
+
+ typedef typename std::vector<Point_d>::size_type size_type;
+
+ typedef typename internal::Get_dimension_tag<SearchTraits>::Dimension D;
+
+private:
+ SearchTraits traits_;
+ Splitter split;
+
+
+ // wokaround for https://svn.boost.org/trac/boost/ticket/9332
+#if (_MSC_VER == 1800) && (BOOST_VERSION == 105500)
+ std::deque<Internal_node> internal_nodes;
+ std::deque<Leaf_node> leaf_nodes;
+#else
+ boost::container::deque<Internal_node> internal_nodes;
+ boost::container::deque<Leaf_node> leaf_nodes;
+#endif
+
+ Node_handle tree_root;
+
+ Kd_tree_rectangle<FT,D>* bbox;
+ std::vector<Point_d> pts;
+
+ // Instead of storing the points in arrays in the Kd_tree_node
+ // we put all the data in a vector in the Kd_tree.
+ // and we only store an iterator range in the Kd_tree_node.
+ //
+ std::vector<const Point_d*> data;
+
+
+ #ifdef CGAL_HAS_THREADS
+ mutable CGAL_MUTEX building_mutex;//mutex used to protect const calls inducing build()
+ #endif
+ bool built_;
+ bool removed_;
+
+ // protected copy constructor
+ Kd_tree(const Tree& tree)
+ : traits_(tree.traits_),built_(tree.built_)
+ {};
+
+
+ // Instead of the recursive construction of the tree in the class Kd_tree_node
+ // we do this in the tree class. The advantage is that we then can optimize
+ // the allocation of the nodes.
+
+ // The leaf node
+ Node_handle
+ create_leaf_node(Point_container& c)
+ {
+ Leaf_node node(true , static_cast<unsigned int>(c.size()));
+ std::ptrdiff_t tmp = c.begin() - data.begin();
+ node.data = pts.begin() + tmp;
+
+ leaf_nodes.push_back(node);
+ Leaf_node_handle nh = &leaf_nodes.back();
+
+
+ return nh;
+ }
+
+
+ // The internal node
+
+ Node_handle
+ create_internal_node(Point_container& c, const Tag_true&)
+ {
+ return create_internal_node_use_extension(c);
+ }
+
+ Node_handle
+ create_internal_node(Point_container& c, const Tag_false&)
+ {
+ return create_internal_node(c);
+ }
+
+
+
+ // TODO: Similiar to the leaf_init function above, a part of the code should be
+ // moved to a the class Kd_tree_node.
+ // It is not proper yet, but the goal was to see if there is
+ // a potential performance gain through the Compact_container
+ Node_handle
+ create_internal_node_use_extension(Point_container& c)
+ {
+ Internal_node node(false);
+ internal_nodes.push_back(node);
+ Internal_node_handle nh = &internal_nodes.back();
+
+ Separator sep;
+ Point_container c_low(c.dimension(),traits_);
+ split(sep, c, c_low);
+ nh->set_separator(sep);
+
+ int cd = nh->cutting_dimension();
+ if(!c_low.empty()){
+ nh->lower_low_val = c_low.tight_bounding_box().min_coord(cd);
+ nh->lower_high_val = c_low.tight_bounding_box().max_coord(cd);
+ }
+ else{
+ nh->lower_low_val = nh->cutting_value();
+ nh->lower_high_val = nh->cutting_value();
+ }
+ if(!c.empty()){
+ nh->upper_low_val = c.tight_bounding_box().min_coord(cd);
+ nh->upper_high_val = c.tight_bounding_box().max_coord(cd);
+ }
+ else{
+ nh->upper_low_val = nh->cutting_value();
+ nh->upper_high_val = nh->cutting_value();
+ }
+
+ CGAL_assertion(nh->cutting_value() >= nh->lower_low_val);
+ CGAL_assertion(nh->cutting_value() <= nh->upper_high_val);
+
+ if (c_low.size() > split.bucket_size()){
+ nh->lower_ch = create_internal_node_use_extension(c_low);
+ }else{
+ nh->lower_ch = create_leaf_node(c_low);
+ }
+ if (c.size() > split.bucket_size()){
+ nh->upper_ch = create_internal_node_use_extension(c);
+ }else{
+ nh->upper_ch = create_leaf_node(c);
+ }
+
+
+
+
+ return nh;
+ }
+
+
+ // Note also that I duplicated the code to get rid if the if's for
+ // the boolean use_extension which was constant over the construction
+ Node_handle
+ create_internal_node(Point_container& c)
+ {
+ Internal_node node(false);
+ internal_nodes.push_back(node);
+ Internal_node_handle nh = &internal_nodes.back();
+ Separator sep;
+
+ Point_container c_low(c.dimension(),traits_);
+ split(sep, c, c_low);
+ nh->set_separator(sep);
+
+ if (c_low.size() > split.bucket_size()){
+ nh->lower_ch = create_internal_node(c_low);
+ }else{
+ nh->lower_ch = create_leaf_node(c_low);
+ }
+ if (c.size() > split.bucket_size()){
+ nh->upper_ch = create_internal_node(c);
+ }else{
+ nh->upper_ch = create_leaf_node(c);
+ }
+
+
+
+ return nh;
+ }
+
+
+
+public:
+
+ Kd_tree(Splitter s = Splitter(),const SearchTraits traits=SearchTraits())
+ : traits_(traits),split(s), built_(false), removed_(false)
+ {}
+
+ template <class InputIterator>
+ Kd_tree(InputIterator first, InputIterator beyond,
+ Splitter s = Splitter(),const SearchTraits traits=SearchTraits())
+ : traits_(traits),split(s), built_(false), removed_(false)
+ {
+ pts.insert(pts.end(), first, beyond);
+ }
+
+ bool empty() const {
+ return pts.empty();
+ }
+
+ void
+ build()
+ {
+ // This function is not ready to be called when a tree already exists, one
+ // must call invalidate_built() first.
+ CGAL_assertion(!is_built());
+ CGAL_assertion(!removed_);
+ const Point_d& p = *pts.begin();
+ typename SearchTraits::Construct_cartesian_const_iterator_d ccci=traits_.construct_cartesian_const_iterator_d_object();
+ int dim = static_cast<int>(std::distance(ccci(p), ccci(p,0)));
+
+ data.reserve(pts.size());
+ for(unsigned int i = 0; i < pts.size(); i++){
+ data.push_back(&pts[i]);
+ }
+ Point_container c(dim, data.begin(), data.end(),traits_);
+ bbox = new Kd_tree_rectangle<FT,D>(c.bounding_box());
+ if (c.size() <= split.bucket_size()){
+ tree_root = create_leaf_node(c);
+ }else {
+ tree_root = create_internal_node(c, UseExtendedNode());
+ }
+
+ //Reorder vector for spatial locality
+ std::vector<Point_d> ptstmp;
+ ptstmp.resize(pts.size());
+ for (std::size_t i = 0; i < pts.size(); ++i){
+ ptstmp[i] = *data[i];
+ }
+ for(std::size_t i = 0; i < leaf_nodes.size(); ++i){
+ std::ptrdiff_t tmp = leaf_nodes[i].begin() - pts.begin();
+ leaf_nodes[i].data = ptstmp.begin() + tmp;
+ }
+ pts.swap(ptstmp);
+
+ data.clear();
+
+ built_ = true;
+ }
+
+private:
+ //any call to this function is for the moment not threadsafe
+ void const_build() const {
+ #ifdef CGAL_HAS_THREADS
+ //this ensure that build() will be called once
+ CGAL_SCOPED_LOCK(building_mutex);
+ if(!is_built())
+ #endif
+ const_cast<Self*>(this)->build(); //THIS IS NOT THREADSAFE
+ }
+public:
+
+ bool is_built() const
+ {
+ return built_;
+ }
+
+ void invalidate_built()
+ {
+ if(removed_){
+ // Walk the tree to collect the remaining points.
+ // Writing directly to pts would likely work, but better be safe.
+ std::vector<Point_d> ptstmp;
+ //ptstmp.resize(root()->num_items());
+ root()->tree_items(std::back_inserter(ptstmp));
+ pts.swap(ptstmp);
+ removed_=false;
+ CGAL_assertion(is_built()); // the rest of the cleanup must happen
+ }
+ if(is_built()){
+ internal_nodes.clear();
+ leaf_nodes.clear();
+ data.clear();
+ delete bbox;
+ built_ = false;
+ }
+ }
+
+ void clear()
+ {
+ invalidate_built();
+ pts.clear();
+ removed_ = false;
+ }
+
+ void
+ insert(const Point_d& p)
+ {
+ invalidate_built();
+ pts.push_back(p);
+ }
+
+ template <class InputIterator>
+ void
+ insert(InputIterator first, InputIterator beyond)
+ {
+ invalidate_built();
+ pts.insert(pts.end(),first, beyond);
+ }
+
+private:
+ struct Equal_by_coordinates {
+ SearchTraits const* traits;
+ Point_d const* pp;
+ bool operator()(Point_d const&q) const {
+ typename SearchTraits::Construct_cartesian_const_iterator_d ccci=traits->construct_cartesian_const_iterator_d_object();
+ return std::equal(ccci(*pp), ccci(*pp,0), ccci(q));
+ }
+ };
+ Equal_by_coordinates equal_by_coordinates(Point_d const&p){
+ Equal_by_coordinates ret = { &traits(), &p };
+ return ret;
+ }
+
+public:
+ void
+ remove(const Point_d& p)
+ {
+ remove(p, equal_by_coordinates(p));
+ }
+
+ template<class Equal>
+ void
+ remove(const Point_d& p, Equal const& equal_to_p)
+ {
+#if 0
+ // This code could have quadratic runtime.
+ if (!is_built()) {
+ std::vector<Point_d>::iterator pi = std::find(pts.begin(), pts.end(), p);
+ // Precondition: the point must be there.
+ CGAL_assertion (pi != pts.end());
+ pts.erase(pi);
+ return;
+ }
+#endif
+ bool success = remove_(p, 0, false, 0, false, root(), equal_to_p);
+ CGAL_assertion(success);
+
+ // Do not set the flag is the tree has been cleared.
+ if(is_built())
+ removed_ |= success;
+ }
+private:
+ template<class Equal>
+ bool remove_(const Point_d& p,
+ Internal_node_handle grandparent, bool parent_islower,
+ Internal_node_handle parent, bool islower,
+ Node_handle node, Equal const& equal_to_p) {
+ // Recurse to locate the point
+ if (!node->is_leaf()) {
+ Internal_node_handle newparent = static_cast<Internal_node_handle>(node);
+ // FIXME: This should be if(x<y) remove low; else remove up;
+ if (traits().construct_cartesian_const_iterator_d_object()(p)[newparent->cutting_dimension()] <= newparent->cutting_value()) {
+ if (remove_(p, parent, islower, newparent, true, newparent->lower(), equal_to_p))
+ return true;
+ }
+ //if (traits().construct_cartesian_const_iterator_d_object()(p)[newparent->cutting_dimension()] >= newparent->cutting_value())
+ return remove_(p, parent, islower, newparent, false, newparent->upper(), equal_to_p);
+
+ CGAL_assertion(false); // Point was not found
+ }
+
+ // Actual removal
+ Leaf_node_handle lnode = static_cast<Leaf_node_handle>(node);
+ if (lnode->size() > 1) {
+ iterator pi = std::find_if(lnode->begin(), lnode->end(), equal_to_p);
+ // FIXME: we should ensure this never happens
+ if (pi == lnode->end()) return false;
+ iterator lasti = lnode->end() - 1;
+ if (pi != lasti) {
+ // Hack to get a non-const iterator
+ std::iter_swap(pts.begin()+(pi-pts.begin()), pts.begin()+(lasti-pts.begin()));
+ }
+ lnode->drop_last_point();
+ } else if (!equal_to_p(*lnode->begin())) {
+ // FIXME: we should ensure this never happens
+ return false;
+ } else if (grandparent) {
+ Node_handle brother = islower ? parent->upper() : parent->lower();
+ if (parent_islower)
+ grandparent->set_lower(brother);
+ else
+ grandparent->set_upper(brother);
+ } else if (parent) {
+ tree_root = islower ? parent->upper() : parent->lower();
+ } else {
+ clear();
+ }
+ return true;
+ }
+
+public:
+ //For efficiency; reserve the size of the points vectors in advance (if the number of points is already known).
+ void reserve(size_t size)
+ {
+ pts.reserve(size);
+ }
+
+ //Get the capacity of the underlying points vector.
+ size_t capacity()
+ {
+ return pts.capacity();
+ }
+
+
+ template <class OutputIterator, class FuzzyQueryItem>
+ OutputIterator
+ search(OutputIterator it, const FuzzyQueryItem& q) const
+ {
+ if(! pts.empty()){
+
+ if(! is_built()){
+ const_build();
+ }
+ Kd_tree_rectangle<FT,D> b(*bbox);
+ return tree_root->search(it,q,b);
+ }
+ return it;
+ }
+
+
+ template <class FuzzyQueryItem>
+ boost::optional<Point_d>
+ search_any_point(const FuzzyQueryItem& q) const
+ {
+ if(! pts.empty()){
+
+ if(! is_built()){
+ const_build();
+ }
+ Kd_tree_rectangle<FT,D> b(*bbox);
+ return tree_root->search_any_point(q,b);
+ }
+ return boost::none;
+ }
+
+
+ ~Kd_tree() {
+ if(is_built()){
+ delete bbox;
+ }
+ }
+
+
+ const SearchTraits&
+ traits() const
+ {
+ return traits_;
+ }
+
+ Node_const_handle
+ root() const
+ {
+ if(! is_built()){
+ const_build();
+ }
+ return tree_root;
+ }
+
+ Node_handle
+ root()
+ {
+ if(! is_built()){
+ build();
+ }
+ return tree_root;
+ }
+
+ void
+ print() const
+ {
+ if(! is_built()){
+ const_build();
+ }
+ root()->print();
+ }
+
+ const Kd_tree_rectangle<FT,D>&
+ bounding_box() const
+ {
+ if(! is_built()){
+ const_build();
+ }
+ return *bbox;
+ }
+
+ const_iterator
+ begin() const
+ {
+ return pts.begin();
+ }
+
+ const_iterator
+ end() const
+ {
+ return pts.end();
+ }
+
+ size_type
+ size() const
+ {
+ return pts.size();
+ }
+
+ // Print statistics of the tree.
+ std::ostream&
+ statistics(std::ostream& s) const
+ {
+ if(! is_built()){
+ const_build();
+ }
+ s << "Tree statistics:" << std::endl;
+ s << "Number of items stored: "
+ << root()->num_items() << std::endl;
+ s << "Number of nodes: "
+ << root()->num_nodes() << std::endl;
+ s << " Tree depth: " << root()->depth() << std::endl;
+ return s;
+ }
+
+
+};
+
+} // namespace CGAL
+
+#endif // CGAL_KD_TREE_H
diff --git a/src/common/include/gudhi_patches/CGAL/Kd_tree_node.h b/src/common/include/gudhi_patches/CGAL/Kd_tree_node.h
new file mode 100644
index 00000000..909ee260
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Kd_tree_node.h
@@ -0,0 +1,586 @@
+// Copyright (c) 2002,2011 Utrecht University (The Netherlands).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+//
+// Authors : Hans Tangelder (<hanst@cs.uu.nl>)
+
+#ifndef CGAL_KD_TREE_NODE_H
+#define CGAL_KD_TREE_NODE_H
+
+#include "CGAL/Splitters.h"
+
+#include <CGAL/Compact_container.h>
+#include <boost/cstdint.hpp>
+
+namespace CGAL {
+
+ template <class SearchTraits, class Splitter, class UseExtendedNode>
+ class Kd_tree;
+
+ template < class TreeTraits, class Splitter, class UseExtendedNode >
+ class Kd_tree_node {
+
+ friend class Kd_tree<TreeTraits,Splitter,UseExtendedNode>;
+
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Node_handle Node_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Node_const_handle Node_const_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Internal_node_handle Internal_node_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Internal_node_const_handle Internal_node_const_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Leaf_node_handle Leaf_node_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Leaf_node_const_handle Leaf_node_const_handle;
+ typedef typename TreeTraits::Point_d Point_d;
+
+ typedef typename TreeTraits::FT FT;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Separator Separator;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Point_d_iterator Point_d_iterator;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::iterator iterator;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::D D;
+
+ bool leaf;
+
+ public :
+ Kd_tree_node(bool leaf_)
+ :leaf(leaf_){}
+
+ bool is_leaf() const{
+ return leaf;
+ }
+
+ std::size_t
+ num_items() const
+ {
+ if (is_leaf()){
+ Leaf_node_const_handle node =
+ static_cast<Leaf_node_const_handle>(this);
+ return node->size();
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ return node->lower()->num_items() + node->upper()->num_items();
+ }
+ }
+
+ std::size_t
+ num_nodes() const
+ {
+ if (is_leaf()) return 1;
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ return node->lower()->num_nodes() + node->upper()->num_nodes();
+ }
+ }
+
+ int
+ depth(const int current_max_depth) const
+ {
+ if (is_leaf()){
+ return current_max_depth;
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ return
+ (std::max)( node->lower()->depth(current_max_depth + 1),
+ node->upper()->depth(current_max_depth + 1));
+ }
+ }
+
+ int
+ depth() const
+ {
+ return depth(1);
+ }
+
+ template <class OutputIterator>
+ OutputIterator
+ tree_items(OutputIterator it) const {
+ if (is_leaf()) {
+ Leaf_node_const_handle node =
+ static_cast<Leaf_node_const_handle>(this);
+ if (node->size()>0)
+ for (iterator i=node->begin(); i != node->end(); i++)
+ {*it=*i; ++it;}
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ it=node->lower()->tree_items(it);
+ it=node->upper()->tree_items(it);
+ }
+ return it;
+ }
+
+
+ boost::optional<Point_d>
+ any_tree_item() const {
+ boost::optional<Point_d> result = boost::none;
+ if (is_leaf()) {
+ Leaf_node_const_handle node =
+ static_cast<Leaf_node_const_handle>(this);
+ if (node->size()>0){
+ return boost::make_optional(*(node->begin()));
+ }
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ result = node->lower()->any_tree_item();
+ if(! result){
+ result = node->upper()->any_tree_item();
+ }
+ }
+ return result;
+ }
+
+
+ void
+ indent(int d) const
+ {
+ for(int i = 0; i < d; i++){
+ std::cout << " ";
+ }
+ }
+
+
+ void
+ print(int d = 0) const
+ {
+ if (is_leaf()) {
+ Leaf_node_const_handle node =
+ static_cast<Leaf_node_const_handle>(this);
+ indent(d);
+ std::cout << "leaf" << std::endl;
+ if (node->size()>0)
+ for (iterator i=node->begin(); i != node->end(); i++)
+ {indent(d);std::cout << *i << std::endl;}
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ indent(d);
+ std::cout << "lower tree" << std::endl;
+ node->lower()->print(d+1);
+ indent(d);
+ std::cout << "separator: dim = " << node->cutting_dimension() << " val = " << node->cutting_value() << std::endl;
+ indent(d);
+ std::cout << "upper tree" << std::endl;
+ node->upper()->print(d+1);
+ }
+ }
+
+
+ template <class OutputIterator, class FuzzyQueryItem>
+ OutputIterator
+ search(OutputIterator it, const FuzzyQueryItem& q,
+ Kd_tree_rectangle<FT,D>& b) const
+ {
+ if (is_leaf()) {
+ Leaf_node_const_handle node =
+ static_cast<Leaf_node_const_handle>(this);
+ if (node->size()>0)
+ for (iterator i=node->begin(); i != node->end(); i++)
+ if (q.contains(*i))
+ {*it++=*i;}
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ // after splitting b denotes the lower part of b
+ Kd_tree_rectangle<FT,D> b_upper(b);
+ b.split(b_upper, node->cutting_dimension(),
+ node->cutting_value());
+
+ if (q.outer_range_contains(b))
+ it=node->lower()->tree_items(it);
+ else
+ if (q.inner_range_intersects(b))
+ it=node->lower()->search(it,q,b);
+ if (q.outer_range_contains(b_upper))
+ it=node->upper()->tree_items(it);
+ else
+ if (q.inner_range_intersects(b_upper))
+ it=node->upper()->search(it,q,b_upper);
+ };
+ return it;
+ }
+
+
+ template <class FuzzyQueryItem>
+ boost::optional<Point_d>
+ search_any_point(const FuzzyQueryItem& q,
+ Kd_tree_rectangle<FT,D>& b) const
+ {
+ boost::optional<Point_d> result = boost::none;
+ if (is_leaf()) {
+ Leaf_node_const_handle node =
+ static_cast<Leaf_node_const_handle>(this);
+ if (node->size()>0)
+ for (iterator i=node->begin(); i != node->end(); i++)
+ if (q.contains(*i))
+ { result = *i; break; }
+ }
+ else {
+ Internal_node_const_handle node =
+ static_cast<Internal_node_const_handle>(this);
+ // after splitting b denotes the lower part of b
+ Kd_tree_rectangle<FT,D> b_upper(b);
+ b.split(b_upper, node->cutting_dimension(),
+ node->cutting_value());
+
+ if (q.outer_range_contains(b)){
+ result = node->lower()->any_tree_item();
+ }else{
+ if (q.inner_range_intersects(b)){
+ result = node->lower()->search_any_point(q,b);
+ }
+ }
+ if(result){
+ return result;
+ }
+ if (q.outer_range_contains(b_upper)){
+ result = node->upper()->any_tree_item();
+ }else{
+ if (q.inner_range_intersects(b_upper))
+ result = node->upper()->search_any_point(q,b_upper);
+ }
+ }
+ return result;
+ }
+
+ };
+
+
+ template < class TreeTraits, class Splitter, class UseExtendedNode >
+ class Kd_tree_leaf_node : public Kd_tree_node< TreeTraits, Splitter, UseExtendedNode >{
+
+ friend class Kd_tree<TreeTraits,Splitter,UseExtendedNode>;
+
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::iterator iterator;
+ typedef Kd_tree_node< TreeTraits, Splitter, UseExtendedNode> Base;
+ typedef typename TreeTraits::Point_d Point_d;
+
+ private:
+
+ // private variables for leaf nodes
+ boost::int32_t n; // denotes number of items in a leaf node
+ iterator data; // iterator to data in leaf node
+
+
+ public:
+
+ // default constructor
+ Kd_tree_leaf_node()
+ {}
+
+ Kd_tree_leaf_node(bool leaf_ )
+ : Base(leaf_)
+ {}
+
+ Kd_tree_leaf_node(bool leaf_,unsigned int n_ )
+ : Base(leaf_), n(n_)
+ {}
+
+ // members for all nodes
+
+ // members for leaf nodes only
+ inline
+ unsigned int
+ size() const
+ {
+ return n;
+ }
+
+ inline
+ iterator
+ begin() const
+ {
+ return data;
+ }
+
+ inline
+ iterator
+ end() const
+ {
+ return data + n;
+ }
+
+ inline
+ void
+ drop_last_point()
+ {
+ --n;
+ }
+
+ }; //leaf node
+
+
+
+ template < class TreeTraits, class Splitter, class UseExtendedNode>
+ class Kd_tree_internal_node : public Kd_tree_node< TreeTraits, Splitter, UseExtendedNode >{
+
+ friend class Kd_tree<TreeTraits,Splitter,UseExtendedNode>;
+
+ typedef Kd_tree_node< TreeTraits, Splitter, UseExtendedNode> Base;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Node_handle Node_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Node_const_handle Node_const_handle;
+
+ typedef typename TreeTraits::FT FT;
+ typedef typename Kd_tree<TreeTraits,Splitter,UseExtendedNode>::Separator Separator;
+
+ private:
+
+ // private variables for internal nodes
+ boost::int32_t cut_dim;
+ FT cut_val;
+ Node_handle lower_ch, upper_ch;
+
+
+ // private variables for extended internal nodes
+ FT upper_low_val;
+ FT upper_high_val;
+ FT lower_low_val;
+ FT lower_high_val;
+
+
+ public:
+
+ // default constructor
+ Kd_tree_internal_node()
+ {}
+
+ Kd_tree_internal_node(bool leaf_)
+ : Base(leaf_)
+ {}
+
+
+ // members for internal node and extended internal node
+
+ inline
+ Node_const_handle
+ lower() const
+ {
+ return lower_ch;
+ }
+
+ inline
+ Node_const_handle
+ upper() const
+ {
+ return upper_ch;
+ }
+
+ inline
+ Node_handle
+ lower()
+ {
+ return lower_ch;
+ }
+
+ inline
+ Node_handle
+ upper()
+ {
+ return upper_ch;
+ }
+
+ inline
+ void
+ set_lower(Node_handle nh)
+ {
+ lower_ch = nh;
+ }
+
+ inline
+ void
+ set_upper(Node_handle nh)
+ {
+ upper_ch = nh;
+ }
+
+ // inline Separator& separator() {return sep; }
+ // use instead
+ inline
+ void set_separator(Separator& sep){
+ cut_dim = sep.cutting_dimension();
+ cut_val = sep.cutting_value();
+ }
+
+ inline
+ FT
+ cutting_value() const
+ {
+ return cut_val;
+ }
+
+ inline
+ int
+ cutting_dimension() const
+ {
+ return cut_dim;
+ }
+
+ // members for extended internal node only
+ inline
+ FT
+ upper_low_value() const
+ {
+ return upper_low_val;
+ }
+
+ inline
+ FT
+ upper_high_value() const
+ {
+ return upper_high_val;
+ }
+
+ inline
+ FT
+ lower_low_value() const
+ {
+ return lower_low_val;
+ }
+
+ inline
+ FT
+ lower_high_value() const
+ {
+ return lower_high_val;
+ }
+
+ /*Separator&
+ separator()
+ {
+ return Separator(cutting_dimension,cutting_value);
+ }*/
+
+
+ };//internal node
+
+ template < class TreeTraits, class Splitter>
+ class Kd_tree_internal_node<TreeTraits,Splitter,Tag_false> : public Kd_tree_node< TreeTraits, Splitter, Tag_false >{
+
+ friend class Kd_tree<TreeTraits,Splitter,Tag_false>;
+
+ typedef Kd_tree_node< TreeTraits, Splitter, Tag_false> Base;
+ typedef typename Kd_tree<TreeTraits,Splitter,Tag_false>::Node_handle Node_handle;
+ typedef typename Kd_tree<TreeTraits,Splitter,Tag_false>::Node_const_handle Node_const_handle;
+
+ typedef typename TreeTraits::FT FT;
+ typedef typename Kd_tree<TreeTraits,Splitter,Tag_false>::Separator Separator;
+
+ private:
+
+ // private variables for internal nodes
+ boost::uint8_t cut_dim;
+ FT cut_val;
+
+ Node_handle lower_ch, upper_ch;
+
+ public:
+
+ // default constructor
+ Kd_tree_internal_node()
+ {}
+
+ Kd_tree_internal_node(bool leaf_)
+ : Base(leaf_)
+ {}
+
+
+ // members for internal node and extended internal node
+
+ inline
+ Node_const_handle
+ lower() const
+ {
+ return lower_ch;
+ }
+
+ inline
+ Node_const_handle
+ upper() const
+ {
+ return upper_ch;
+ }
+
+ inline
+ Node_handle
+ lower()
+ {
+ return lower_ch;
+ }
+
+ inline
+ Node_handle
+ upper()
+ {
+ return upper_ch;
+ }
+
+ inline
+ void
+ set_lower(Node_handle nh)
+ {
+ lower_ch = nh;
+ }
+
+ inline
+ void
+ set_upper(Node_handle nh)
+ {
+ upper_ch = nh;
+ }
+
+ // inline Separator& separator() {return sep; }
+ // use instead
+
+ inline
+ void set_separator(Separator& sep){
+ cut_dim = sep.cutting_dimension();
+ cut_val = sep.cutting_value();
+ }
+
+ inline
+ FT
+ cutting_value() const
+ {
+ return cut_val;
+ }
+
+ inline
+ int
+ cutting_dimension() const
+ {
+ return cut_dim;
+ }
+
+ /* Separator&
+ separator()
+ {
+ return Separator(cutting_dimension,cutting_value);
+ }*/
+
+
+ };//internal node
+
+
+
+} // namespace CGAL
+#endif // CGAL_KDTREE_NODE_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h
new file mode 100644
index 00000000..c13a9801
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h
@@ -0,0 +1,177 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_LA_BASE_H
+#define CGAL_KERNEL_D_CARTESIAN_LA_BASE_H
+
+#include <CGAL/basic.h>
+#include <CGAL/Origin.h>
+#include <boost/type_traits/integral_constant.hpp>
+#include <CGAL/representation_tags.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Uncertain.h>
+#include <CGAL/typeset.h>
+#include <CGAL/NewKernel_d/Dimension_base.h>
+#include <CGAL/NewKernel_d/Cartesian_LA_functors.h>
+#include <CGAL/NewKernel_d/Vector/array.h>
+#include <CGAL/NewKernel_d/Vector/vector.h>
+#include <CGAL/NewKernel_d/Vector/mix.h>
+#ifdef CGAL_EIGEN3_ENABLED
+#include <CGAL/NewKernel_d/LA_eigen/LA.h>
+#else
+#error Eigen3 is required
+#endif
+
+namespace CGAL {
+
+template < typename FT_, typename Dim_,
+#if 1
+ typename Vec_=Mix_vector<Array_vector<FT_, Dim_>,
+ Vector_vector<FT_, Dim_>,
+ FT_, Dim_>,
+#elif 0
+ typename Vec_=Array_vector<FT_, Dim_>,
+#elif 0
+ typename Vec_=Vector_vector<FT_, Dim_>,
+#else
+ // Dangerous because of alignment. Ok on x86_64 without AVX.
+ typename Vec_=LA_eigen<FT_, Dim_>,
+#endif
+ typename LA_=LA_eigen<FT_,Dim_> >
+ /* Default LA to Vec or to LA_eigen? */
+struct Cartesian_LA_base_d : public Dimension_base<Dim_>
+{
+ typedef Cartesian_LA_base_d<FT_,Dim_> Self;
+ typedef Cartesian_tag Rep_tag;
+ typedef Cartesian_tag Kernel_tag;
+ typedef Dim_ Default_ambient_dimension;
+ typedef Dim_ Max_ambient_dimension;
+ typedef Dim_ Dimension;
+ typedef LA_ LA;
+ template <class> struct Ambient_dimension { typedef Dim_ type; };
+
+ typedef Vec_ LA_vector;
+ typedef typename LA_vector::Vector Point;
+ typedef typename LA_vector::Vector Vector;
+ typedef typename LA_vector::Vector Vector_;
+ typedef typename LA_vector::Construct_vector Constructor;
+ typedef typename LA_vector::Vector_const_iterator Point_cartesian_const_iterator;
+ typedef typename LA_vector::Vector_const_iterator Vector_cartesian_const_iterator;
+
+ template<class, class=void> struct Type {};
+ template<class D> struct Type< Point_tag, D> { typedef Vector_ type; };
+ template<class D> struct Type<Vector_tag, D> { typedef Vector_ type; };
+ template<class D> struct Type< FT_tag, D> { typedef FT_ type; };
+ template<class D> struct Type< RT_tag, D> { typedef FT_ type; };
+
+ typedef typeset<Point_tag>
+ ::add<Vector_tag>::type
+ // FIXME: These have nothing to do here.
+ ::add<Segment_tag>::type
+ ::add<Hyperplane_tag>::type
+ ::add<Sphere_tag>::type
+ ::add<Weighted_point_tag>::type
+ Object_list;
+
+ typedef typeset< Point_cartesian_const_iterator_tag>::type
+ ::add<Vector_cartesian_const_iterator_tag>::type
+ Iterator_list;
+
+ template<class, class=void, class=boost::integral_constant<int,0> > struct Functor {
+ typedef Null_functor type;
+ };
+ template<class D> struct Functor<Construct_ttag<Vector_tag>,D> {
+ typedef CartesianDVectorBase::Construct_LA_vector<Self,Null_vector> type;
+ };
+ template<class D> struct Functor<Construct_ttag<Point_tag>,D> {
+ typedef CartesianDVectorBase::Construct_LA_vector<Self,Origin> type;
+ };
+ template<class D> struct Functor<Construct_ttag<Point_cartesian_const_iterator_tag>,D> {
+ typedef CartesianDVectorBase::Construct_cartesian_const_iterator<Self> type;
+ };
+ template<class D> struct Functor<Construct_ttag<Vector_cartesian_const_iterator_tag>,D> {
+ typedef CartesianDVectorBase::Construct_cartesian_const_iterator<Self> type;
+ };
+ template<class D> struct Functor<Sum_of_vectors_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Has_vector_plus_minus_tag>::value> > {
+ typedef CartesianDVectorBase::Sum_of_vectors<Self> type;
+ };
+ template<class D> struct Functor<Difference_of_vectors_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Has_vector_plus_minus_tag>::value> > {
+ typedef CartesianDVectorBase::Difference_of_vectors<Self> type;
+ };
+ template<class D> struct Functor<Opposite_vector_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Has_vector_plus_minus_tag>::value> > {
+ typedef CartesianDVectorBase::Opposite_vector<Self> type;
+ };
+ template<class D> struct Functor<Midpoint_tag,D,
+ boost::integral_constant<int,
+ !LA_vector::template Property<Has_vector_plus_minus_tag>::value
+ || !LA_vector::template Property<Has_vector_scalar_ops_tag>::value> > {
+ typedef CartesianDVectorBase::Midpoint<Self> type;
+ };
+ template<class D> struct Functor<Compute_point_cartesian_coordinate_tag,D> {
+ typedef CartesianDVectorBase::Compute_cartesian_coordinate<Self> type;
+ };
+ template<class D> struct Functor<Compute_vector_cartesian_coordinate_tag,D> {
+ typedef CartesianDVectorBase::Compute_cartesian_coordinate<Self> type;
+ };
+ template<class D> struct Functor<Point_dimension_tag,D> {
+ typedef CartesianDVectorBase::PV_dimension<Self> type;
+ };
+ template<class D> struct Functor<Vector_dimension_tag,D> {
+ typedef CartesianDVectorBase::PV_dimension<Self> type;
+ };
+ template<class D> struct Functor<Orientation_of_vectors_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Has_determinant_of_iterator_to_vectors_tag>::value> > {
+ typedef CartesianDVectorBase::Orientation_of_vectors<Self> type;
+ };
+ template<class D> struct Functor<Orientation_of_points_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Has_determinant_of_iterator_to_points_tag>::value> > {
+ typedef CartesianDVectorBase::Orientation_of_points<Self> type;
+ };
+ template<class D> struct Functor<Scalar_product_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Has_dot_product_tag>::value> > {
+ typedef CartesianDVectorBase::Scalar_product<Self> type;
+ };
+ template<class D> struct Functor<Squared_distance_to_origin_tag,D,
+ boost::integral_constant<int,!LA_vector::template Property<Stores_squared_norm_tag>::value> > {
+ typedef CartesianDVectorBase::Squared_distance_to_origin_stored<Self> type;
+ };
+ // Use integral_constant<int,2> in case of failure, to distinguish from the previous one.
+ template<class D> struct Functor<Squared_distance_to_origin_tag,D,
+ boost::integral_constant<int,
+ (LA_vector::template Property<Stores_squared_norm_tag>::value
+ || !LA_vector::template Property<Has_dot_product_tag>::value)*2> > {
+ typedef CartesianDVectorBase::Squared_distance_to_origin_via_dotprod<Self> type;
+ };
+ template<class D> struct Functor<Point_to_vector_tag,D> {
+ typedef CartesianDVectorBase::Identity_functor<Self> type;
+ };
+ template<class D> struct Functor<Vector_to_point_tag,D> {
+ typedef CartesianDVectorBase::Identity_functor<Self> type;
+ };
+
+ CGAL_CONSTEXPR Cartesian_LA_base_d(){}
+ CGAL_CONSTEXPR Cartesian_LA_base_d(int d):Dimension_base<Dim_>(d){}
+};
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_CARTESIAN_LA_BASE_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h
new file mode 100644
index 00000000..871c463a
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h
@@ -0,0 +1,344 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_CARTESIAN_LA_FUNCTORS_H
+#define CGAL_CARTESIAN_LA_FUNCTORS_H
+
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/is_iterator.h>
+#include <CGAL/argument_swaps.h>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <CGAL/Dimension.h>
+
+namespace CGAL {
+namespace CartesianDVectorBase {
+#ifndef CGAL_CXX11
+namespace internal {
+template<class R_,class Dim_> struct Construct_LA_vector_ {
+ struct Never_use {};
+ void operator()(Never_use)const;
+};
+#define CGAL_CODE(Z,N,_) template<class R> struct Construct_LA_vector_<R,Dimension_tag<N> > { \
+ typedef typename R::Constructor Constructor; \
+ typedef typename Get_type<R, RT_tag>::type RT; \
+ typedef typename R::Vector_ result_type; \
+ result_type operator() \
+ (BOOST_PP_ENUM_PARAMS(N,RT const& t)) const { \
+ return typename Constructor::Values()(BOOST_PP_ENUM_PARAMS(N,t)); \
+ } \
+ result_type operator() \
+ (BOOST_PP_ENUM_PARAMS(BOOST_PP_INC(N),RT const& t)) const { \
+ return typename Constructor::Values_divide()(t##N,BOOST_PP_ENUM_PARAMS(N,t)); \
+ } \
+ };
+BOOST_PP_REPEAT_FROM_TO(2, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+}
+#endif
+
+template<class R_,class Zero_> struct Construct_LA_vector
+: private Store_kernel<R_>
+#ifndef CGAL_CXX11
+, public internal::Construct_LA_vector_<R_,typename R_::Default_ambient_dimension>
+#endif
+{
+ //CGAL_FUNCTOR_INIT_IGNORE(Construct_LA_vector)
+ CGAL_FUNCTOR_INIT_STORE(Construct_LA_vector)
+ typedef R_ R;
+ typedef typename R::Constructor Constructor;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename R::Vector_ result_type;
+ typedef typename R_::Default_ambient_dimension Dimension;
+ result_type operator()(int d)const{
+ CGAL_assertion(check_dimension_eq(d,this->kernel().dimension()));
+ return typename Constructor::Dimension()(d);
+ }
+ result_type operator()()const{
+ return typename Constructor::Dimension()((std::max)(0,this->kernel().dimension()));
+ }
+ result_type operator()(int d, Zero_ const&)const{
+ CGAL_assertion(check_dimension_eq(d,this->kernel().dimension()));
+ return typename Constructor::Dimension()(d);
+ }
+ result_type operator()(Zero_ const&)const{
+ // Makes no sense for an unknown dimension.
+ return typename Constructor::Dimension()(this->kernel().dimension());
+ }
+ result_type operator()(result_type const& v)const{
+ return v;
+ }
+#ifdef CGAL_CXX11
+ result_type operator()(result_type&& v)const{
+ return std::move(v);
+ }
+#endif
+#ifdef CGAL_CXX11
+ template<class...U>
+ typename std::enable_if<Constructible_from_each<RT,U...>::value &&
+ boost::is_same<Dimension_tag<sizeof...(U)>, Dimension>::value,
+ result_type>::type
+ operator()(U&&...u)const{
+ return typename Constructor::Values()(std::forward<U>(u)...);
+ }
+ //template<class...U,class=typename std::enable_if<Constructible_from_each<RT,U...>::value>::type,class=typename std::enable_if<(sizeof...(U)==static_dim+1)>::type,class=void>
+ template<class...U>
+ typename std::enable_if<Constructible_from_each<RT,U...>::value &&
+ boost::is_same<Dimension_tag<sizeof...(U)-1>, Dimension>::value,
+ result_type>::type
+ operator()(U&&...u)const{
+ return Apply_to_last_then_rest()(typename Constructor::Values_divide(),std::forward<U>(u)...);
+ }
+#else
+ using internal::Construct_LA_vector_<R_,typename R::Default_ambient_dimension>::operator();
+#endif
+ template<class Iter> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::forward_iterator_tag>,result_type>::type operator()
+ (Iter f,Iter g,Cartesian_tag t)const
+ {
+ return this->operator()((int)std::distance(f,g),f,g,t);
+ }
+ template<class Iter> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::forward_iterator_tag>,result_type>::type operator()
+ (int d,Iter f,Iter g,Cartesian_tag)const
+ {
+ CGAL_assertion(d==std::distance(f,g));
+ CGAL_assertion(check_dimension_eq(d,this->kernel().dimension()));
+ return typename Constructor::Iterator()(d,f,g);
+ }
+ template<class Iter> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::bidirectional_iterator_tag>,result_type>::type operator()
+ (Iter f,Iter g,Homogeneous_tag)const
+ {
+ --g;
+ return this->operator()((int)std::distance(f,g),f,g,*g);
+ }
+ template<class Iter> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::bidirectional_iterator_tag>,result_type>::type operator()
+ (int d,Iter f,Iter g,Homogeneous_tag)const
+ {
+ --g;
+ return this->operator()(d,f,g,*g);
+ }
+ template<class Iter> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::forward_iterator_tag>,result_type>::type operator()
+ (Iter f,Iter g)const
+ {
+ // Shouldn't it try comparing dist(f,g) to the dimension if it is known?
+ return this->operator()(f,g,typename R::Rep_tag());
+ }
+ template<class Iter> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::forward_iterator_tag>,result_type>::type operator()
+ (int d,Iter f,Iter g)const
+ {
+ return this->operator()(d,f,g,typename R::Rep_tag());
+ }
+
+ // Last homogeneous coordinate given separately
+ template<class Iter,class NT> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::forward_iterator_tag>,result_type>::type operator()
+ (int d,Iter f,Iter g,NT const&l)const
+ {
+ CGAL_assertion(d==std::distance(f,g));
+ CGAL_assertion(check_dimension_eq(d,this->kernel().dimension()));
+ // RT? better be safe for now
+ return typename Constructor::Iterator()(d,CGAL::make_transforming_iterator(f,Divide<FT,NT>(l)),CGAL::make_transforming_iterator(g,Divide<FT,NT>(l)));
+ }
+ template<class Iter,class NT> inline
+ typename boost::enable_if<is_iterator_type<Iter,std::forward_iterator_tag>,result_type>::type operator()
+ (Iter f,Iter g,NT const&l)const
+ {
+ return this->operator()((int)std::distance(f,g),f,g,l);
+ }
+};
+
+template<class R_> struct Compute_cartesian_coordinate {
+ CGAL_FUNCTOR_INIT_IGNORE(Compute_cartesian_coordinate)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename R::Vector_ first_argument_type;
+ typedef int second_argument_type;
+ typedef Tag_true Is_exact;
+#ifdef CGAL_CXX11
+ typedef decltype(std::declval<const first_argument_type>()[0]) result_type;
+#else
+ typedef RT const& result_type;
+ // RT const& doesn't work with some LA (Eigen2 for instance) so we
+ // should use plain RT or find a way to detect this.
+#endif
+
+ result_type operator()(first_argument_type const& v,int i)const{
+ return v[i];
+ }
+};
+
+template<class R_> struct Construct_cartesian_const_iterator {
+ CGAL_FUNCTOR_INIT_IGNORE(Construct_cartesian_const_iterator)
+ typedef R_ R;
+ typedef typename R::Vector_ argument_type;
+ typedef typename R::LA_vector S_;
+ typedef typename R::Point_cartesian_const_iterator result_type;
+ // same as Vector
+ typedef Tag_true Is_exact;
+
+ result_type operator()(argument_type const& v,Begin_tag)const{
+ return S_::vector_begin(v);
+ }
+ result_type operator()(argument_type const& v,End_tag)const{
+ return S_::vector_end(v);
+ }
+};
+
+template<class R_> struct Midpoint {
+ CGAL_FUNCTOR_INIT_IGNORE(Midpoint)
+ typedef R_ R;
+ typedef typename Get_type<R, Point_tag>::type first_argument_type;
+ typedef typename Get_type<R, Point_tag>::type second_argument_type;
+ typedef typename Get_type<R, Point_tag>::type result_type;
+
+ result_type operator()(result_type const& a, result_type const& b)const{
+ return (a+b)/2;
+ }
+};
+
+template<class R_> struct Sum_of_vectors {
+ CGAL_FUNCTOR_INIT_IGNORE(Sum_of_vectors)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type first_argument_type;
+ typedef typename Get_type<R, Vector_tag>::type second_argument_type;
+ typedef typename Get_type<R, Vector_tag>::type result_type;
+
+ result_type operator()(result_type const& a, result_type const& b)const{
+ return a+b;
+ }
+};
+
+template<class R_> struct Difference_of_vectors {
+ CGAL_FUNCTOR_INIT_IGNORE(Difference_of_vectors)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type first_argument_type;
+ typedef typename Get_type<R, Vector_tag>::type second_argument_type;
+ typedef typename Get_type<R, Vector_tag>::type result_type;
+
+ result_type operator()(result_type const& a, result_type const& b)const{
+ return a-b;
+ }
+};
+
+template<class R_> struct Opposite_vector {
+ CGAL_FUNCTOR_INIT_IGNORE(Opposite_vector)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type result_type;
+ typedef typename Get_type<R, Vector_tag>::type argument_type;
+
+ result_type operator()(result_type const& v)const{
+ return -v;
+ }
+};
+
+template<class R_> struct Scalar_product {
+ CGAL_FUNCTOR_INIT_IGNORE(Scalar_product)
+ typedef R_ R;
+ typedef typename R::LA_vector LA;
+ typedef typename Get_type<R, RT_tag>::type result_type;
+ typedef typename Get_type<R, Vector_tag>::type first_argument_type;
+ typedef typename Get_type<R, Vector_tag>::type second_argument_type;
+
+ result_type operator()(first_argument_type const& a, second_argument_type const& b)const{
+ return LA::dot_product(a,b);
+ }
+};
+
+template<class R_> struct Squared_distance_to_origin_stored {
+ CGAL_FUNCTOR_INIT_IGNORE(Squared_distance_to_origin_stored)
+ typedef R_ R;
+ typedef typename R::LA_vector LA;
+ typedef typename Get_type<R, RT_tag>::type result_type;
+ typedef typename Get_type<R, Point_tag>::type argument_type;
+
+ result_type operator()(argument_type const& a)const{
+ return LA::squared_norm(a);
+ }
+};
+
+template<class R_> struct Squared_distance_to_origin_via_dotprod {
+ CGAL_FUNCTOR_INIT_IGNORE(Squared_distance_to_origin_via_dotprod)
+ typedef R_ R;
+ typedef typename R::LA_vector LA;
+ typedef typename Get_type<R, RT_tag>::type result_type;
+ typedef typename Get_type<R, Point_tag>::type argument_type;
+
+ result_type operator()(argument_type const& a)const{
+ return LA::dot_product(a,a);
+ }
+};
+
+template<class R_> struct Orientation_of_vectors {
+ CGAL_FUNCTOR_INIT_IGNORE(Orientation_of_vectors)
+ typedef R_ R;
+ typedef typename R::Vector_cartesian_const_iterator first_argument_type;
+ typedef typename R::Vector_cartesian_const_iterator second_argument_type;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename R::LA_vector LA;
+
+ template<class Iter>
+ result_type operator()(Iter const& f, Iter const& e) const {
+ return LA::determinant_of_iterators_to_vectors(f,e);
+ }
+};
+
+template<class R_> struct Orientation_of_points {
+ CGAL_FUNCTOR_INIT_IGNORE(Orientation_of_points)
+ typedef R_ R;
+ typedef typename R::Point_cartesian_const_iterator first_argument_type;
+ typedef typename R::Point_cartesian_const_iterator second_argument_type;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename R::LA_vector LA;
+
+ template<class Iter>
+ result_type operator()(Iter const& f, Iter const& e) const {
+ return LA::determinant_of_iterators_to_points(f,e);
+ }
+};
+
+template<class R_> struct PV_dimension {
+ CGAL_FUNCTOR_INIT_IGNORE(PV_dimension)
+ typedef R_ R;
+ typedef typename R::Vector_ argument_type;
+ typedef int result_type;
+ typedef typename R::LA_vector LA;
+ typedef Tag_true Is_exact;
+
+ template<class T>
+ result_type operator()(T const& v) const {
+ return LA::size_of_vector(v);
+ }
+};
+
+template<class R_> struct Identity_functor {
+ CGAL_FUNCTOR_INIT_IGNORE(Identity_functor)
+ template<class T>
+ T const& operator()(T const&t) const { return t; }
+};
+
+}
+} // namespace CGAL
+#endif // CGAL_CARTESIAN_LA_FUNCTORS_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h
new file mode 100644
index 00000000..641bf8ae
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h
@@ -0,0 +1,40 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_BASE_H
+#define CGAL_KERNEL_D_CARTESIAN_BASE_H
+
+#include <CGAL/basic.h>
+#include <CGAL/NewKernel_d/Cartesian_complete.h>
+#include <CGAL/NewKernel_d/Cartesian_LA_base.h>
+
+namespace CGAL {
+#define CGAL_BASE \
+ Cartesian_LA_base_d< FT_, Dim_ >
+template < typename FT_, typename Dim_, typename Derived_=Default>
+struct Cartesian_base_d : public CGAL_BASE
+{
+ CGAL_CONSTEXPR Cartesian_base_d(){}
+ CGAL_CONSTEXPR Cartesian_base_d(int d):CGAL_BASE(d){}
+};
+#undef CGAL_BASE
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_CARTESIAN_BASE_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h
new file mode 100644
index 00000000..e09c72d0
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h
@@ -0,0 +1,117 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H
+#define CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H
+
+#include <CGAL/basic.h>
+#include <CGAL/NT_converter.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/NewKernel_d/Cartesian_complete.h>
+
+namespace CGAL {
+
+template < typename Base_, typename FT_, typename LA_=CGAL::LA_eigen<FT_,typename Base_::Default_ambient_dimension> >
+struct Cartesian_change_FT_base : public
+ Base_
+{
+ CGAL_CONSTEXPR Cartesian_change_FT_base(){}
+ CGAL_CONSTEXPR Cartesian_change_FT_base(int d):Base_(d){}
+
+ typedef Cartesian_change_FT_base Self;
+ typedef Base_ Kernel_base;
+ typedef LA_ LA;
+
+ template <class T, class D=void> struct Type : Inherit_type<Base_, T> {};
+ template <class D> struct Type <FT_tag, D> { typedef FT_ type; };
+ template <class D> struct Type <RT_tag, D> { typedef FT_ type; };
+
+ typedef NT_converter<typename Get_type<Kernel_base, FT_tag>::type,FT_> FT_converter;
+ typedef transforming_iterator<FT_converter,typename Kernel_base::Point_cartesian_const_iterator> Point_cartesian_const_iterator;
+ typedef transforming_iterator<FT_converter,typename Kernel_base::Vector_cartesian_const_iterator> Vector_cartesian_const_iterator;
+ //FIXME: use Iterator_list!
+ /*
+ template<class T,bool=CGAL_BOOSTD is_same<typename iterator_tag_traits<T>::value_tag,FT_tag>::value>
+ struct Iterator : Get_type<Kernel_base,T> {};
+ template<class T> struct Iterator<T,true> {
+ typedef transforming_iterator<FT_converter,typename Get_type<Kernel_base,T>::type> type;
+ };
+ */
+
+ template<class Tag_,class Type_>
+ struct Construct_cartesian_const_iterator_ {
+ typedef typename Get_functor<Kernel_base, Tag_>::type Functor_base;
+ Construct_cartesian_const_iterator_(){}
+ Construct_cartesian_const_iterator_(Self const&r):f(r){}
+ Functor_base f;
+ typedef Type_ result_type;
+ template<class T>
+ result_type operator()(T const& v, Begin_tag)const{
+ return make_transforming_iterator(f(v,Begin_tag()),FT_converter());
+ }
+ template<class T>
+ result_type operator()(T const& v, End_tag)const{
+ return make_transforming_iterator(f(v,End_tag()),FT_converter());
+ }
+ };
+ typedef Construct_cartesian_const_iterator_<Construct_ttag<Point_cartesian_const_iterator_tag>,Point_cartesian_const_iterator> Construct_point_cartesian_const_iterator;
+ typedef Construct_cartesian_const_iterator_<Construct_ttag<Vector_cartesian_const_iterator_tag>,Vector_cartesian_const_iterator> Construct_vector_cartesian_const_iterator;
+
+ template<class Tag_>
+ struct Compute_cartesian_coordinate {
+ typedef typename Get_functor<Kernel_base, Tag_>::type Functor_base;
+ Compute_cartesian_coordinate(){}
+ Compute_cartesian_coordinate(Self const&r):f(r){}
+ Functor_base f;
+ typedef FT_ result_type;
+ template<class Obj_>
+ result_type operator()(Obj_ const& v,int i)const{
+ return FT_converter()(f(v,i));
+ }
+ };
+
+ template<class T,class U=void,class=typename Get_functor_category<Cartesian_change_FT_base,T>::type> struct Functor :
+ Inherit_functor<Kernel_base,T,U> { };
+ template<class T,class U> struct Functor<T,U,Compute_tag> { };
+ template<class T,class U> struct Functor<T,U,Predicate_tag> { };
+ template<class D> struct Functor<Compute_point_cartesian_coordinate_tag,D,Compute_tag> {
+ typedef Compute_cartesian_coordinate<Compute_point_cartesian_coordinate_tag> type;
+ };
+ template<class D> struct Functor<Compute_vector_cartesian_coordinate_tag,D,Compute_tag> {
+ typedef Compute_cartesian_coordinate<Compute_vector_cartesian_coordinate_tag> type;
+ };
+ template<class D> struct Functor<Construct_ttag<Point_cartesian_const_iterator_tag>,D,Construct_iterator_tag> {
+ typedef Construct_point_cartesian_const_iterator type;
+ };
+ template<class D> struct Functor<Construct_ttag<Vector_cartesian_const_iterator_tag>,D,Construct_iterator_tag> {
+ typedef Construct_vector_cartesian_const_iterator type;
+ };
+};
+
+template < typename Base_, typename FT_>
+struct Cartesian_change_FT : public
+ Cartesian_change_FT_base<Base_,FT_>
+{
+ CGAL_CONSTEXPR Cartesian_change_FT(){}
+ CGAL_CONSTEXPR Cartesian_change_FT(int d):Cartesian_change_FT_base<Base_,FT_>(d){}
+};
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h
new file mode 100644
index 00000000..ef8921db
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h
@@ -0,0 +1,33 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_COMPLETE_H
+#define CGAL_KERNEL_D_CARTESIAN_COMPLETE_H
+
+#include <CGAL/NewKernel_d/function_objects_cartesian.h>
+#include <CGAL/NewKernel_d/Cartesian_per_dimension.h>
+#include <CGAL/NewKernel_d/Types/Segment.h>
+#include <CGAL/NewKernel_d/Types/Sphere.h>
+#include <CGAL/NewKernel_d/Types/Hyperplane.h>
+#include <CGAL/NewKernel_d/Types/Aff_transformation.h>
+#include <CGAL/NewKernel_d/Types/Line.h>
+#include <CGAL/NewKernel_d/Types/Ray.h>
+#include <CGAL/NewKernel_d/Types/Iso_box.h>
+
+#endif // CGAL_KERNEL_D_CARTESIAN_COMPLETE_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h
new file mode 100644
index 00000000..179e97bf
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h
@@ -0,0 +1,79 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_FILTER_K_H
+#define CGAL_KERNEL_D_CARTESIAN_FILTER_K_H
+
+#include <CGAL/basic.h>
+#include <CGAL/NewKernel_d/KernelD_converter.h>
+#include <CGAL/NewKernel_d/Filtered_predicate2.h>
+#include <boost/mpl/if.hpp>
+#include <boost/mpl/and.hpp>
+
+namespace CGAL {
+
+template < typename Base_, typename AK_, typename EK_ >
+struct Cartesian_filter_K : public Base_,
+ private Store_kernel<AK_>, private Store_kernel2<EK_>
+{
+ CGAL_CONSTEXPR Cartesian_filter_K(){}
+ CGAL_CONSTEXPR Cartesian_filter_K(int d):Base_(d){}
+ //FIXME: or do we want an instance of AK and EK belonging to this kernel,
+ //instead of a reference to external ones?
+ CGAL_CONSTEXPR Cartesian_filter_K(AK_ const&a,EK_ const&b):Base_(),Store_kernel<AK_>(a),Store_kernel2<EK_>(b){}
+ CGAL_CONSTEXPR Cartesian_filter_K(int d,AK_ const&a,EK_ const&b):Base_(d),Store_kernel<AK_>(a),Store_kernel2<EK_>(b){}
+ typedef Base_ Kernel_base;
+ typedef AK_ AK;
+ typedef EK_ EK;
+ typedef typename Store_kernel<AK_>::reference_type AK_rt;
+ AK_rt approximate_kernel()const{return this->kernel();}
+ typedef typename Store_kernel2<EK_>::reference2_type EK_rt;
+ EK_rt exact_kernel()const{return this->kernel2();}
+
+ // MSVC is too dumb to perform the empty base optimization.
+ typedef boost::mpl::and_<
+ internal::Do_not_store_kernel<Kernel_base>,
+ internal::Do_not_store_kernel<AK>,
+ internal::Do_not_store_kernel<EK> > Do_not_store_kernel;
+
+ //TODO: C2A/C2E could be able to convert *this into this->kernel() or this->kernel2().
+ typedef KernelD_converter<Kernel_base,AK> C2A;
+ typedef KernelD_converter<Kernel_base,EK> C2E;
+
+ // fix the types
+ // TODO: only fix some types, based on some criterion?
+ template<class T> struct Type : Get_type<Kernel_base,T> {};
+
+ template<class T,class D=void,class=typename Get_functor_category<Cartesian_filter_K,T>::type> struct Functor :
+ Inherit_functor<Kernel_base,T,D> {};
+ template<class T,class D> struct Functor<T,D,Predicate_tag> {
+ typedef typename Get_functor<AK, T>::type AP;
+ typedef typename Get_functor<EK, T>::type EP;
+ typedef Filtered_predicate2<EP,AP,C2E,C2A> type;
+ };
+// TODO:
+// template<class T> struct Functor<T,No_filter_tag,Predicate_tag> :
+// Kernel_base::template Functor<T,No_filter_tag> {};
+// TODO:
+// detect when Less_cartesian_coordinate doesn't need filtering
+};
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_CARTESIAN_FILTER_K_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h
new file mode 100644
index 00000000..c390a55c
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h
@@ -0,0 +1,93 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H
+#define CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H
+
+#include <CGAL/basic.h>
+#include <CGAL/NewKernel_d/Cartesian_change_FT.h>
+#include <CGAL/internal/Exact_type_selector.h>
+
+namespace CGAL {
+
+template < typename Base_ >
+struct Cartesian_filter_NT : public Base_
+{
+ CGAL_CONSTEXPR Cartesian_filter_NT(){}
+ CGAL_CONSTEXPR Cartesian_filter_NT(int d):Base_(d){}
+ typedef Base_ Kernel_base;
+ typedef Cartesian_change_FT<Kernel_base,Interval_nt_advanced> K1;
+ typedef typename internal::Exact_field_selector<typename Get_type<Kernel_base, FT_tag>::type>::Type Exact_nt;
+ typedef Cartesian_change_FT<Kernel_base,Exact_nt> K2;
+
+ template<class T,class D=void,class=typename Get_functor_category<Cartesian_filter_NT,T>::type> struct Functor :
+ Inherit_functor<Kernel_base,T,D> {};
+ template<class T,class D> struct Functor<T,D,Predicate_tag> {
+ struct type {
+ //TODO: use compression (derive from a compressed_pair?)
+ typedef typename Get_functor<K1, T>::type P1; P1 p1;
+ typedef typename Get_functor<K2, T>::type P2; P2 p2;
+ typedef typename P2::result_type result_type;
+ type(){}
+ type(Cartesian_filter_NT const&k):p1(reinterpret_cast<K1 const&>(k)),p2(reinterpret_cast<K2 const&>(k)){}
+ //FIXME: if predicate's constructor takes a kernel as argument, how do we translate that? reinterpret_cast is really ugly and possibly unsafe.
+
+#ifdef CGAL_CXX11
+ template<class...U> result_type operator()(U&&...u)const{
+ {
+ Protect_FPU_rounding<true> p;
+ try {
+ typename P1::result_type res=p1(u...); // don't forward as u may be reused
+ if(is_certain(res)) return get_certain(res);
+ } catch (Uncertain_conversion_exception) {}
+ }
+ return p2(std::forward<U>(u)...);
+ }
+#else
+ result_type operator()()const{ // does it make sense to have 0 argument?
+ {
+ Protect_FPU_rounding<true> p;
+ try {
+ typename P1::result_type res=p1();
+ if(is_certain(res)) return get_certain(res);
+ } catch (Uncertain_conversion_exception) {}
+ }
+ return p2();
+ }
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t))const{ \
+ { \
+ Protect_FPU_rounding<true> p; \
+ try { \
+ typename P1::result_type res=p1(BOOST_PP_ENUM_PARAMS(N,t)); \
+ if(is_certain(res)) return get_certain(res); \
+ } catch (Uncertain_conversion_exception) {} \
+ } \
+ return p2(BOOST_PP_ENUM_PARAMS(N,t)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+#endif
+ };
+ };
+};
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h
new file mode 100644
index 00000000..179f7319
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h
@@ -0,0 +1,33 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_CARTESIAN_PER_DIM_H
+#define CGAL_KD_CARTESIAN_PER_DIM_H
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/predicates/sign_of_determinant.h>
+
+// Should probably disappear.
+
+namespace CGAL {
+template <class Dim_, class R_, class Derived_>
+struct Cartesian_per_dimension : public R_ {};
+}
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h
new file mode 100644
index 00000000..693e962a
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h
@@ -0,0 +1,95 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_CARTESIAN_STATIC_FILTERS_H
+#define CGAL_KD_CARTESIAN_STATIC_FILTERS_H
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/internal/Static_filters/tools.h> // bug, should be included by the next one
+#include <CGAL/internal/Static_filters/Orientation_2.h>
+#include <boost/mpl/if.hpp>
+
+namespace CGAL {
+namespace SFA { // static filter adapter
+// Note that this would be quite a bit simpler without stateful kernels
+template <class Base_,class R_> struct Orientation_of_points_2 : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation_of_points_2)
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, Orientation_tag>::type result_type;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ typedef typename Get_functor<R_, Compute_point_cartesian_coordinate_tag>::type CC;
+ typedef typename Get_functor<Base_, Orientation_of_points_tag>::type Orientation_base;
+ // TODO: Move this out for easy reuse
+ struct Adapter {
+ struct Point_2 {
+ R_ const&r; CC const&c; Point const& p;
+ Point_2(R_ const&r_, CC const&c_, Point const&p_):r(r_),c(c_),p(p_){}
+ // use result_of instead?
+ typename CC::result_type x()const{return c(p,0);}
+ typename CC::result_type y()const{return c(p,1);}
+ };
+ struct Vector_2 {};
+ struct Circle_2 {};
+ struct Orientation_2 {
+ typedef typename Orientation_of_points_2::result_type result_type;
+ result_type operator()(Point_2 const&A, Point_2 const&B, Point_2 const&C)const{
+ Point const* t[3]={&A.p,&B.p,&C.p};
+ return Orientation_base(A.r)(make_transforming_iterator<Dereference_functor>(t+0),make_transforming_iterator<Dereference_functor>(t+3));
+ }
+ };
+ };
+ template<class Iter> result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{
+ CC c(this->kernel());
+ Point const& A=*f;
+ Point const& B=*++f;
+ Point const& C=*++f;
+ CGAL_assertion(++f==e);
+ typedef typename Adapter::Point_2 P;
+ return typename internal::Static_filters_predicates::Orientation_2<Adapter>()(P(this->kernel(),c,A),P(this->kernel(),c,B),P(this->kernel(),c,C));
+ }
+};
+}
+
+template <class Dim_ /* should be implicit */, class R_, class Derived_=Default>
+struct Cartesian_static_filters : public R_ {
+ CGAL_CONSTEXPR Cartesian_static_filters(){}
+ CGAL_CONSTEXPR Cartesian_static_filters(int d):R_(d){}
+};
+
+template <class R_, class Derived_>
+struct Cartesian_static_filters<Dimension_tag<2>, R_, Derived_> : public R_ {
+ CGAL_CONSTEXPR Cartesian_static_filters(){}
+ CGAL_CONSTEXPR Cartesian_static_filters(int d):R_(d){}
+ typedef Cartesian_static_filters<Dimension_tag<2>, R_, Derived_> Self;
+ typedef typename Default::Get<Derived_,Self>::type Derived;
+ template <class T, class=void> struct Functor : Inherit_functor<R_, T> {};
+ template <class D> struct Functor <Orientation_of_points_tag,D> {
+ typedef
+ //typename boost::mpl::if_ <
+ //boost::is_same<D,No_filter_tag>,
+ //typename Get_functor<R_, Orientation_of_points_tag>::type,
+ SFA::Orientation_of_points_2<R_,Derived>
+ // >::type
+ type;
+ };
+};
+
+}
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h
new file mode 100644
index 00000000..43015d24
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h
@@ -0,0 +1,330 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_COAFFINE_H
+#define CGAL_KD_COAFFINE_H
+#include <vector>
+#include <algorithm>
+#include <iterator>
+#include <CGAL/Dimension.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+
+namespace CGAL {
+namespace CartesianDKernelFunctors {
+struct Flat_orientation {
+ std::vector<int> proj;
+ std::vector<int> rest;
+ bool reverse;
+};
+
+// For debugging purposes
+inline std::ostream& operator<< (std::ostream& o, Flat_orientation const& f) {
+ o << "Proj: ";
+ for(std::vector<int>::const_iterator i=f.proj.begin();
+ i!=f.proj.end(); ++i)
+ o << *i << ' ';
+ o << "\nRest: ";
+ for(std::vector<int>::const_iterator i=f.rest.begin();
+ i!=f.rest.end(); ++i)
+ o << *i << ' ';
+ o << "\nInv: " << f.reverse;
+ return o << '\n';
+}
+
+namespace internal {
+namespace coaffine {
+template<class Mat>
+inline void debug_matrix(std::ostream& o, Mat const&mat) {
+ for(int i=0;i<mat.rows();++i){
+ for(int j=0;j<mat.cols();++j){
+ o<<mat(i,j)<<' ';
+ }
+ o<<'\n';
+ }
+}
+}
+}
+
+template<class R_> struct Construct_flat_orientation : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_flat_orientation)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type Dplusone;
+ typedef typename R::LA::template Rebind_dimension<Dynamic_dimension_tag,Dplusone>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+ typedef typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type CCC;
+ typedef typename Get_functor<R, Point_dimension_tag>::type PD;
+ typedef Flat_orientation result_type;
+
+ // This implementation is going to suck. Maybe we should push the
+ // functionality into LA. And we should check (in debug mode) that
+ // the points are affinely independent.
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ Iter f_save = f;
+ PD pd (this->kernel());
+ CCC ccc (this->kernel());
+ int dim = pd(*f);
+ Matrix coord (dim+1, dim+1); // use distance(f,e)? This matrix doesn't need to be square.
+ int col = 0;
+ Flat_orientation o;
+ std::vector<int>& proj=o.proj;
+ std::vector<int>& rest=o.rest; rest.reserve(dim+1);
+ for(int i=0; i<dim+1; ++i) rest.push_back(i);
+ for( ; f != e ; ++col, ++f ) {
+ //std::cerr << "(*f)[0]=" << (*f)[0] << std::endl;
+ Point const&p=*f;
+ // use a coordinate iterator instead?
+ for(int i=0; i<dim; ++i) coord(col, i) = ccc(p, i);
+ coord(col,dim)=1;
+ int d = (int)proj.size()+1;
+ Matrix m (d, d);
+ // Fill the matrix with what we already have
+ for(int i=0; i<d; ++i)
+ for(int j=0; j<d-1; ++j)
+ m(i,j) = coord(i, proj[j]);
+ // Try to complete with any other coordinate
+ // TODO: iterate on rest by the end, or use a (forward_)list.
+ for(std::vector<int>::iterator it=rest.begin();;++it) {
+ CGAL_assertion(it!=rest.end());
+ for(int i=0; i<d; ++i) m(i,d-1) = coord(i, *it);
+ if(LA::sign_of_determinant(m)!=0) {
+ proj.push_back(*it);
+ rest.erase(it);
+ break;
+ }
+ }
+ }
+ std::sort(proj.begin(),proj.end());
+ typename Get_functor<R, In_flat_orientation_tag>::type ifo(this->kernel());
+ o.reverse = false;
+ o.reverse = ifo(o, f_save, e) != CGAL::POSITIVE;
+ return o;
+ }
+};
+
+template<class R_> struct Contained_in_affine_hull : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Contained_in_affine_hull)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+ typedef typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type CCC;
+ typedef typename Get_functor<R, Point_dimension_tag>::type PD;
+ //typedef typename Increment_dimension<typename R::Default_ambient_dimension>::type D1;
+ //typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type D2;
+ //typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type Dplusone;
+ typedef typename R::LA::template Rebind_dimension<Dynamic_dimension_tag,Dplusone>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+
+ // mostly copied from Construct_flat_orientation. TODO: dedup this code or use LA.
+ template<class Iter>
+ result_type operator()(Iter f, Iter e, Point const&x) const {
+ // FIXME: are the points in (f,e) required to be affinely independent?
+ PD pd (this->kernel());
+ CCC ccc (this->kernel());
+ int dim=pd(*f);
+ Matrix coord (dim+1, dim+1); // use distance
+ int col = 0;
+ std::vector<int> proj;
+ std::vector<int> rest; rest.reserve(dim+1);
+ for(int i=0; i<dim+1; ++i) rest.push_back(i);
+ for( ; f != e ; ++col, ++f ) {
+ Point const&p=*f;
+ for(int i=0; i<dim; ++i) coord(col, i) = ccc(p, i);
+ coord(col,dim)=1;
+ int d = (int)proj.size()+1;
+ Matrix m (d, d);
+ for(int i=0; i<d; ++i)
+ for(int j=0; j<d-1; ++j)
+ m(i,j) = coord(i, proj[j]);
+ for(std::vector<int>::iterator it=rest.begin();it!=rest.end();++it) {
+ for(int i=0; i<d; ++i) m(i,d-1) = coord(i, *it);
+ if(LA::sign_of_determinant(m)!=0) {
+ proj.push_back(*it);
+ rest.erase(it);
+ break;
+ }
+ }
+ }
+ for(int i=0; i<dim; ++i) coord(col, i) = ccc(x, i);
+ coord(col,dim)=1;
+ int d = (int)proj.size()+1;
+ Matrix m (d, d);
+ for(int i=0; i<d; ++i)
+ for(int j=0; j<d-1; ++j)
+ m(i,j) = coord(i, proj[j]);
+ for(std::vector<int>::iterator it=rest.begin();it!=rest.end();++it) {
+ for(int i=0; i<d; ++i) m(i,d-1) = coord(i, *it);
+ if(LA::sign_of_determinant(m)!=0) return false;
+ }
+ return true;
+ }
+};
+
+template<class R_> struct In_flat_orientation : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(In_flat_orientation)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename Increment_dimension<typename R::Default_ambient_dimension>::type D1;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type D2;
+ typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Flat_orientation const&o, Iter f, Iter e) const {
+ // TODO: work in the projection instead of the ambient space.
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ int d=pd(*f);
+ Matrix m(d+1,d+1);
+ int i=0;
+ for(;f!=e;++f,++i) {
+ Point const& p=*f;
+ m(i,0)=1;
+ for(int j=0;j<d;++j){
+ m(i,j+1)=c(p,j);
+ }
+ }
+ for(std::vector<int>::const_iterator it = o.rest.begin(); it != o.rest.end() /* i<d+1 */; ++i, ++it) {
+ m(i,0)=1;
+ for(int j=0;j<d;++j){
+ m(i,j+1)=0; // unneeded if the matrix is initialized to 0
+ }
+ if(*it != d) m(i,1+*it)=1;
+ }
+
+ result_type ret = LA::sign_of_determinant(CGAL_MOVE(m));
+ if(o.reverse) ret=-ret;
+ return ret;
+ }
+};
+
+template<class R_> struct In_flat_side_of_oriented_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(In_flat_side_of_oriented_sphere)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename Increment_dimension<typename R::Default_ambient_dimension,2>::type D1;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension,2>::type D2;
+ typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Flat_orientation const&o, Iter f, Iter e, Point const&x) const {
+ // TODO: can't work in the projection, but we should at least remove the row of 1s.
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ int d=pd(*f);
+ Matrix m(d+2,d+2);
+ int i=0;
+ for(;f!=e;++f,++i) {
+ Point const& p=*f;
+ m(i,0)=1;
+ m(i,d+1)=0;
+ for(int j=0;j<d;++j){
+ m(i,j+1)=c(p,j);
+ m(i,d+1)+=CGAL_NTS square(m(i,j+1));
+ }
+ }
+ for(std::vector<int>::const_iterator it = o.rest.begin(); it != o.rest.end() /* i<d+1 */; ++i, ++it) {
+ m(i,0)=1;
+ for(int j=0;j<d;++j){
+ m(i,j+1)=0; // unneeded if the matrix is initialized to 0
+ }
+ if(*it != d) m(i,d+1)=m(i,1+*it)=1;
+ else m(i,d+1)=0;
+ }
+ m(d+1,0)=1;
+ m(d+1,d+1)=0;
+ for(int j=0;j<d;++j){
+ m(d+1,j+1)=c(x,j);
+ m(d+1,d+1)+=CGAL_NTS square(m(d+1,j+1));
+ }
+
+ result_type ret = -LA::sign_of_determinant(CGAL_MOVE(m));
+ if(o.reverse) ret=-ret;
+ return ret;
+ }
+};
+
+template<class R_> struct In_flat_power_side_of_power_sphere_raw : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(In_flat_power_side_of_power_sphere_raw)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename Increment_dimension<typename R::Default_ambient_dimension,2>::type D1;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension,2>::type D2;
+ typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+
+ template<class Iter, class IterW, class Wt>
+ result_type operator()(Flat_orientation const&o, Iter f, Iter e, IterW fw, Point const&x, Wt const&w) const {
+ // TODO: can't work in the projection, but we should at least remove the row of 1s.
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ int d=pd(*f);
+ Matrix m(d+2,d+2);
+ int i=0;
+ for(;f!=e;++f,++fw,++i) {
+ Point const& p=*f;
+ m(i,0)=1;
+ m(i,d+1)=-*fw;
+ for(int j=0;j<d;++j){
+ m(i,j+1)=c(p,j);
+ m(i,d+1)+=CGAL_NTS square(m(i,j+1));
+ }
+ }
+ for(std::vector<int>::const_iterator it = o.rest.begin(); it != o.rest.end() /* i<d+1 */; ++i, ++it) {
+ m(i,0)=1;
+ for(int j=0;j<d;++j){
+ m(i,j+1)=0; // unneeded if the matrix is initialized to 0
+ }
+ if(*it != d) m(i,d+1)=m(i,1+*it)=1;
+ else m(i,d+1)=0;
+ }
+ m(d+1,0)=1;
+ m(d+1,d+1)=-w;
+ for(int j=0;j<d;++j){
+ m(d+1,j+1)=c(x,j);
+ m(d+1,d+1)+=CGAL_NTS square(m(d+1,j+1));
+ }
+
+ result_type ret = -LA::sign_of_determinant(CGAL_MOVE(m));
+ if(o.reverse) ret=-ret;
+ return ret;
+ }
+};
+
+
+}
+CGAL_KD_DEFAULT_TYPE(Flat_orientation_tag,(CGAL::CartesianDKernelFunctors::Flat_orientation),(),());
+CGAL_KD_DEFAULT_FUNCTOR(In_flat_orientation_tag,(CartesianDKernelFunctors::In_flat_orientation<K>),(Point_tag),(Compute_point_cartesian_coordinate_tag,Point_dimension_tag));
+CGAL_KD_DEFAULT_FUNCTOR(In_flat_side_of_oriented_sphere_tag,(CartesianDKernelFunctors::In_flat_side_of_oriented_sphere<K>),(Point_tag),(Compute_point_cartesian_coordinate_tag,Point_dimension_tag));
+CGAL_KD_DEFAULT_FUNCTOR(In_flat_power_side_of_power_sphere_raw_tag,(CartesianDKernelFunctors::In_flat_power_side_of_power_sphere_raw<K>),(Point_tag),(Compute_point_cartesian_coordinate_tag,Point_dimension_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Construct_flat_orientation_tag,(CartesianDKernelFunctors::Construct_flat_orientation<K>),(Point_tag),(Compute_point_cartesian_coordinate_tag,Point_dimension_tag,In_flat_orientation_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Contained_in_affine_hull_tag,(CartesianDKernelFunctors::Contained_in_affine_hull<K>),(Point_tag),(Compute_point_cartesian_coordinate_tag,Point_dimension_tag));
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Define_kernel_types.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Define_kernel_types.h
new file mode 100644
index 00000000..6a40515b
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Define_kernel_types.h
@@ -0,0 +1,50 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_DEFINE_KERNEL_TYPES_H
+#define CGAL_DEFINE_KERNEL_TYPES_H
+#include <CGAL/config.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/typeset.h>
+#ifdef CGAL_CXX11
+#include <type_traits>
+#else
+#include <boost/type_traits.hpp>
+#endif
+
+namespace CGAL {
+ namespace internal {
+ template<class K,class Tag_,bool=iterator_tag_traits<Tag_>::is_iterator>
+ struct Type_or_iter : K::template Type<Tag_> {};
+ template<class K,class Tag_>
+ struct Type_or_iter<K, Tag_, true> : K::template Iterator<Tag_> {};
+ }
+ template<class K, class Base=K, class List=typename typeset_union<typename K::Object_list,typename K::Iterator_list>::type> struct Define_kernel_types;
+ template<class K, class Base>
+ struct Define_kernel_types <K, Base, typeset<> > : Base {};
+ template<class K>
+ struct Define_kernel_types <K, void, typeset<> > {};
+ template<class K, class Base, class List>
+ struct Define_kernel_types :
+ Typedef_tag_type<typename List::head,
+ typename internal::Type_or_iter<K,typename List::head>::type,
+ Define_kernel_types<K, Base, typename List::tail>
+ > {};
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Dimension_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Dimension_base.h
new file mode 100644
index 00000000..be875e63
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Dimension_base.h
@@ -0,0 +1,49 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_DIMENSION_BASE_h
+#define CGAL_KD_DIMENSION_BASE_h
+#include <CGAL/Dimension.h>
+#include <CGAL/assertions.h>
+#include <CGAL/NewKernel_d/utils.h>
+namespace CGAL {
+struct Store_dimension_base {
+ //TODO: add some assertions
+ Store_dimension_base(int dim=UNKNOWN_DIMENSION):dim_(dim){}
+ int dimension()const{return dim_;}
+ void set_dimension(int dim){dim_=dim;}
+ private:
+ int dim_;
+};
+template<class=Dynamic_dimension_tag>
+struct Dimension_base {
+ Dimension_base(int = UNKNOWN_DIMENSION){}
+ int dimension() const { return UNKNOWN_DIMENSION; }
+ void set_dimension(int) {}
+};
+template<int dim_>
+struct Dimension_base<Dimension_tag<dim_> > {
+ Dimension_base(){}
+ Dimension_base(int CGAL_assertion_code(dim)){CGAL_assertion(dim_==dim);}
+ int dimension()const{return dim_;}
+ void set_dimension(int dim){CGAL_assertion(dim_==dim);}
+};
+}
+#endif
+
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Filtered_predicate2.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Filtered_predicate2.h
new file mode 100644
index 00000000..1a6a67bc
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Filtered_predicate2.h
@@ -0,0 +1,137 @@
+// Copyright (c) 2001-2005 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+//
+// Author(s) : Sylvain Pion
+
+#ifndef CGAL_FILTERED_PREDICATE2_H
+#define CGAL_FILTERED_PREDICATE2_H
+
+#include <string>
+#include <CGAL/config.h>
+#include <CGAL/Interval_nt.h>
+#include <CGAL/Uncertain.h>
+#include <CGAL/Profile_counter.h>
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <boost/preprocessor.hpp>
+
+namespace CGAL {
+
+// This template class is a wrapper that implements the filtering for any
+// predicate (dynamic filters with IA).
+
+// TODO :
+// - each predicate in the default kernel should define a tag that says if it
+// wants to be filtered or not (=> all homogeneous predicate define this
+// tag). We could even test-suite that automatically. It makes a strong
+// new requirement on the kernel though...
+// Could be done with a traits mechanism ?
+// A default template could use the current IA, but other tags or whatever
+// could specify no filtering at all, or static filtering...
+// - same thing for constructions => virtual operator() ?
+// - similarly, constructions should have a tag saying if they can throw or
+// not, or we let all this up to the compiler optimizer to figure out ?
+// - Some caching could be done at the Point_2 level.
+
+
+template <class EP, class AP, class C2E, class C2A, bool Protection = true>
+class Filtered_predicate2
+{
+//TODO: pack (at least use a tuple)
+//FIXME: is it better to store those, or just store enough to recreate them
+//(i.e. possibly references to the kernels)?
+ EP ep;
+ AP ap;
+ C2E c2e;
+ C2A c2a;
+
+ typedef typename AP::result_type Ares;
+
+public:
+
+ typedef AP Approximate_predicate;
+ typedef EP Exact_predicate;
+ typedef C2E To_exact_converter;
+ typedef C2A To_approximate_converter;
+
+ // FIXME: should use result_of, see emails by Nico
+ typedef typename EP::result_type result_type;
+ // AP::result_type must be convertible to EP::result_type.
+
+ Filtered_predicate2()
+ {}
+
+ template <class K>
+ Filtered_predicate2(const K& k)
+ : ep(k.exact_kernel()), ap(k.approximate_kernel()), c2e(k,k.exact_kernel()), c2a(k,k.approximate_kernel())
+ {}
+
+#ifdef CGAL_CXX11
+ template <typename... Args>
+ result_type
+ operator()(Args&&... args) const
+ {
+ CGAL_BRANCH_PROFILER(std::string(" failures/calls to : ") + std::string(CGAL_PRETTY_FUNCTION), tmp);
+ // Protection is outside the try block as VC8 has the CGAL_CFG_FPU_ROUNDING_MODE_UNWINDING_VC_BUG
+ {
+ Protect_FPU_rounding<Protection> p;
+ try
+ {
+ // No forward here, the arguments may still be needed
+ Ares res = ap(c2a(args)...);
+ if (is_certain(res))
+ return get_certain(res);
+ }
+ catch (Uncertain_conversion_exception) {}
+ }
+ CGAL_BRANCH_PROFILER_BRANCH(tmp);
+ Protect_FPU_rounding<!Protection> p(CGAL_FE_TONEAREST);
+ return ep(c2e(std::forward<Args>(args))...);
+ }
+#else
+
+#define CGAL_VAR(Z,N,C) C(a##N)
+#define CGAL_CODE(Z,N,_) \
+ template <BOOST_PP_ENUM_PARAMS(N,class A)> \
+ result_type \
+ operator()(BOOST_PP_ENUM_BINARY_PARAMS(N, A, const& a)) const \
+ { \
+ CGAL_BRANCH_PROFILER(std::string(" failures/calls to : ") + std::string(CGAL_PRETTY_FUNCTION), tmp); \
+ { \
+ Protect_FPU_rounding<Protection> p; \
+ try \
+ { \
+ Ares res = ap(BOOST_PP_ENUM(N,CGAL_VAR,c2a)); \
+ if (is_certain(res)) \
+ return get_certain(res); \
+ } \
+ catch (Uncertain_conversion_exception) {} \
+ } \
+ CGAL_BRANCH_PROFILER_BRANCH(tmp); \
+ Protect_FPU_rounding<!Protection> p(CGAL_FE_TONEAREST); \
+ return ep(BOOST_PP_ENUM(N,CGAL_VAR,c2e)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1, 10, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR
+
+#endif
+};
+
+} //namespace CGAL
+
+#endif // CGAL_FILTERED_PREDICATE2_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h
new file mode 100644
index 00000000..a8896976
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h
@@ -0,0 +1,199 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_CONVERTER_H
+#define CGAL_KERNEL_D_CARTESIAN_CONVERTER_H
+
+#include <CGAL/basic.h>
+#include <CGAL/tuple.h>
+#include <CGAL/typeset.h>
+#include <CGAL/Object.h>
+#include <CGAL/Origin.h>
+#include <CGAL/NT_converter.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Kernel/mpl.h>
+#include <CGAL/is_iterator.h>
+#include <CGAL/transforming_iterator.h>
+#include <boost/utility/enable_if.hpp>
+#include <boost/mpl/if.hpp>
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <CGAL/NewKernel_d/Kernel_object_converter.h>
+
+namespace CGAL {
+namespace internal {
+// Reverses order, but that shouldn't matter.
+template<class K,class T> struct Map_taglist_to_typelist :
+ Map_taglist_to_typelist<K,typename T::tail>::type
+ ::template add<typename Get_type<K, typename T::head>::type>
+{};
+template<class K> struct Map_taglist_to_typelist<K,typeset<> > : typeset<> {};
+}
+
+template<class List = typeset<> >
+struct Object_converter {
+ typedef Object result_type;
+ template<class F>
+ result_type operator()(Object const& o, F const& f) const {
+ typedef typename List::head H;
+ if (H const* ptr = object_cast<H>(&o))
+ return make_object(f(*ptr));
+ else
+ return Object_converter<typename List::tail>()(o,f);
+ }
+};
+template<>
+struct Object_converter <typeset<> > {
+ typedef Object result_type;
+ template<class F>
+ result_type operator()(Object const&,F const&)const {
+ CGAL_error_msg("Cartesiand_converter is unable to determine what is wrapped in the Object");
+ return Object();
+ }
+};
+
+
+ //TODO: special case when K1==K2 (or they are very close?)
+template<class Final_, class K1, class K2, class List>
+class KernelD_converter_
+: public KernelD_converter_<Final_,K1,K2,typename List::tail>
+{
+ typedef typename List::head Tag_;
+ typedef typename List::tail Rest;
+ typedef KernelD_converter_<Final_,K1,K2,Rest> Base;
+ typedef typename Get_type<K1,Tag_>::type K1_Obj;
+ typedef typename Get_type<K2,Tag_>::type K2_Obj;
+ typedef typename Get_functor<K1, Convert_ttag<Tag_> >::type K1_Conv;
+ typedef KO_converter<Tag_,K1,K2> KOC;
+ typedef CGAL_BOOSTD is_same<K1_Conv, Null_functor> no_converter;
+ typedef typename internal::Map_taglist_to_typelist<K1,Rest>::type::template contains<K1_Obj> duplicate;
+
+ // Disable the conversion in some cases:
+ struct Do_not_use{};
+
+ // Explicit calls to boost::mpl functions to avoid parenthesis
+ // warning on some versions of GCC
+ typedef typename boost::mpl::if_ <
+ // If Point==Vector, keep only one conversion
+ boost::mpl::or_<boost::mpl::bool_<duplicate::value>,
+ // For iterator objects, the default is make_transforming_iterator
+ boost::mpl::bool_<(iterator_tag_traits<Tag_>::is_iterator && no_converter::value)> >,
+ Do_not_use,K1_Obj>::type argument_type;
+ //typedef typename KOC::argument_type K1_Obj;
+ //typedef typename KOC::result_type K2_Obj;
+ public:
+ using Base::operator(); // don't use directly, just make it accessible to the next level
+ K2_Obj helper(K1_Obj const& o,CGAL_BOOSTD true_type)const{
+ return KOC()(this->myself().kernel(),this->myself().kernel2(),this->myself(),o);
+ }
+ K2_Obj helper(K1_Obj const& o,CGAL_BOOSTD false_type)const{
+ return K1_Conv(this->myself().kernel())(this->myself().kernel2(),this->myself(),o);
+ }
+ K2_Obj operator()(argument_type const& o)const{
+ return helper(o,no_converter());
+ }
+ template<class X,int=0> struct result:Base::template result<X>{};
+ template<int i> struct result<Final_(argument_type),i> {typedef K2_Obj type;};
+};
+
+template<class Final_, class K1, class K2>
+class KernelD_converter_<Final_,K1,K2,typeset<> > {
+ public:
+ struct Do_not_use2{};
+ void operator()(Do_not_use2)const{}
+ template<class T> struct result;
+ Final_& myself(){return *static_cast<Final_*>(this);}
+ Final_ const& myself()const{return *static_cast<Final_ const*>(this);}
+};
+
+
+// TODO: use the intersection of Kn::Object_list.
+template<class K1, class K2, class List_=
+typename typeset_intersection<typename K1::Object_list, typename K2::Object_list>::type
+//typeset<Point_tag>::add<Vector_tag>::type/*::add<Segment_tag>::type*/
+> class KernelD_converter
+ : public Store_kernel<K1>, public Store_kernel2<K2>,
+ public KernelD_converter_<KernelD_converter<K1,K2,List_>,K1,K2,List_>
+{
+ typedef KernelD_converter Self;
+ typedef Self Final_;
+ typedef KernelD_converter_<Self,K1,K2,List_> Base;
+ typedef typename Get_type<K1, FT_tag>::type FT1;
+ typedef typename Get_type<K2, FT_tag>::type FT2;
+ typedef NT_converter<FT1, FT2> NTc;
+ NTc c; // TODO: compressed storage as this is likely empty and the converter gets passed around (and stored in iterators)
+
+ public:
+ KernelD_converter(){}
+ KernelD_converter(K1 const&a,K2 const&b):Store_kernel<K1>(a),Store_kernel2<K2>(b){}
+
+ // For boost::result_of, used in transforming_iterator
+ template<class T,int i=is_iterator<T>::value?42:0> struct result:Base::template result<T>{};
+ template<class T> struct result<Final_(T),42> {
+ typedef transforming_iterator<Final_,T> type;
+ };
+ template<int i> struct result<Final_(K1),i>{typedef K2 type;};
+ template<int i> struct result<Final_(int),i>{typedef int type;};
+ // Ideally the next 2 would come with Point_tag and Vector_tag, but that's hard...
+ template<int i> struct result<Final_(Origin),i>{typedef Origin type;};
+ template<int i> struct result<Final_(Null_vector),i>{typedef Null_vector type;};
+ template<int i> struct result<Final_(Object),i>{typedef Object type;};
+ template<int i> struct result<Final_(FT1),i>{typedef FT2 type;};
+
+ using Base::operator();
+ typename Store_kernel2<K2>::reference2_type operator()(K1 const&)const{return this->kernel2();}
+ int operator()(int i)const{return i;}
+ Origin operator()(Origin const&o)const{return o;}
+ Null_vector operator()(Null_vector const&v)const{return v;}
+ FT2 operator()(FT1 const&x)const{return c(x);}
+ //RT2 operator()(typename First_if_different<RT1,FT1>::Type const&x)const{return cr(x);}
+
+ typename Get_type<K2, Flat_orientation_tag>::type const&
+ operator()(typename Get_type<K1, Flat_orientation_tag>::type const&o)const
+ { return o; } // Both kernels should have the same, returning a reference should warn if not.
+
+ template<class It>
+ transforming_iterator<Final_,typename boost::enable_if<is_iterator<It>,It>::type>
+ operator()(It const& it) const {
+ return make_transforming_iterator(it,*this);
+ }
+
+ template<class T>
+ //TODO: use decltype in C++11 instead of result
+ std::vector<typename result<Final_(T)>::type>
+ operator()(const std::vector<T>& v) const {
+ return std::vector<typename result<Final_(T)>::type>(operator()(v.begin()),operator()(v.begin()));
+ }
+
+ //TODO: convert std::list and other containers?
+
+ Object
+ operator()(const Object &obj) const
+ {
+ typedef typename internal::Map_taglist_to_typelist<K1,List_>::type Possibilities;
+ //TODO: add Empty, vector<Point>, etc to the list.
+ return Object_converter<Possibilities>()(obj,*this);
+ }
+
+ //TODO: convert boost::variant
+
+};
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_CARTESIAN_CONVERTER_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h
new file mode 100644
index 00000000..fa30dff0
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h
@@ -0,0 +1,104 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_KERNEL_2_INTERFACE_H
+#define CGAL_KD_KERNEL_2_INTERFACE_H
+
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/tuple.h>
+
+
+namespace CGAL {
+template <class Base_> struct Kernel_2_interface : public Base_ {
+ typedef Base_ Base;
+ typedef Kernel_2_interface<Base> Kernel;
+ typedef typename Get_type<Base, RT_tag>::type RT;
+ typedef typename Get_type<Base, FT_tag>::type FT;
+ typedef typename Get_type<Base, Bool_tag>::type Boolean;
+ typedef typename Get_type<Base, Sign_tag>::type Sign;
+ typedef typename Get_type<Base, Comparison_result_tag>::type Comparison_result;
+ typedef typename Get_type<Base, Orientation_tag>::type Orientation;
+ typedef typename Get_type<Base, Oriented_side_tag>::type Oriented_side;
+ typedef typename Get_type<Base, Bounded_side_tag>::type Bounded_side;
+ typedef typename Get_type<Base, Angle_tag>::type Angle;
+ typedef typename Get_type<Base, Point_tag>::type Point_2;
+ typedef typename Get_type<Base, Vector_tag>::type Vector_2;
+ typedef typename Get_type<Base, Segment_tag>::type Segment_2;
+ typedef cpp0x::tuple<Point_2,Point_2,Point_2> Triangle_2; // triangulation insists...
+ template <class T,int i> struct Help_2p_i {
+ typedef typename Get_functor<Base, T>::type LT;
+ typedef typename LT::result_type result_type;
+ LT lt;
+ Help_2p_i(Kernel const&k):lt(k){}
+ result_type operator()(Point_2 const&a, Point_2 const&b) {
+ return lt(a,b,i);
+ }
+ };
+ typedef Help_2p_i<Less_point_cartesian_coordinate_tag,0> Less_x_2;
+ typedef Help_2p_i<Less_point_cartesian_coordinate_tag,1> Less_y_2;
+ typedef Help_2p_i<Compare_point_cartesian_coordinate_tag,0> Compare_x_2;
+ typedef Help_2p_i<Compare_point_cartesian_coordinate_tag,1> Compare_y_2;
+ struct Compare_distance_2 {
+ typedef typename Get_functor<Base, Compare_distance_tag>::type CD;
+ typedef typename CD::result_type result_type;
+ CD cd;
+ Compare_distance_2(Kernel const&k):cd(k){}
+ result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c) {
+ return cd(a,b,c);
+ }
+ result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c, Point_2 const&d) {
+ return cd(a,b,c,d);
+ }
+ };
+ struct Orientation_2 {
+ typedef typename Get_functor<Base, Orientation_of_points_tag>::type O;
+ typedef typename O::result_type result_type;
+ O o;
+ Orientation_2(Kernel const&k):o(k){}
+ result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c) {
+ //return o(a,b,c);
+ Point_2 const* t[3]={&a,&b,&c};
+ return o(make_transforming_iterator<Dereference_functor>(t+0),make_transforming_iterator<Dereference_functor>(t+3));
+
+ }
+ };
+ struct Side_of_oriented_circle_2 {
+ typedef typename Get_functor<Base, Side_of_oriented_sphere_tag>::type SOS;
+ typedef typename SOS::result_type result_type;
+ SOS sos;
+ Side_of_oriented_circle_2(Kernel const&k):sos(k){}
+ result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c, Point_2 const&d) {
+ //return sos(a,b,c,d);
+ Point_2 const* t[4]={&a,&b,&c,&d};
+ return sos(make_transforming_iterator<Dereference_functor>(t+0),make_transforming_iterator<Dereference_functor>(t+4));
+ }
+ };
+ Less_x_2 less_x_2_object()const{ return Less_x_2(*this); }
+ Less_y_2 less_y_2_object()const{ return Less_y_2(*this); }
+ Compare_x_2 compare_x_2_object()const{ return Compare_x_2(*this); }
+ Compare_y_2 compare_y_2_object()const{ return Compare_y_2(*this); }
+ Compare_distance_2 compare_distance_2_object()const{ return Compare_distance_2(*this); }
+ Orientation_2 orientation_2_object()const{ return Orientation_2(*this); }
+ Side_of_oriented_circle_2 side_of_oriented_circle_2_object()const{ return Side_of_oriented_circle_2(*this); }
+};
+}
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h
new file mode 100644
index 00000000..96076aa8
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h
@@ -0,0 +1,102 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_KERNEL_3_INTERFACE_H
+#define CGAL_KD_KERNEL_3_INTERFACE_H
+
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/tuple.h>
+
+
+namespace CGAL {
+template <class Base_> struct Kernel_3_interface : public Base_ {
+ typedef Base_ Base;
+ typedef Kernel_3_interface<Base> Kernel;
+ typedef typename Get_type<Base, RT_tag>::type RT;
+ typedef typename Get_type<Base, FT_tag>::type FT;
+ typedef typename Get_type<Base, Bool_tag>::type Boolean;
+ typedef typename Get_type<Base, Sign_tag>::type Sign;
+ typedef typename Get_type<Base, Comparison_result_tag>::type Comparison_result;
+ typedef typename Get_type<Base, Orientation_tag>::type Orientation;
+ typedef typename Get_type<Base, Oriented_side_tag>::type Oriented_side;
+ typedef typename Get_type<Base, Bounded_side_tag>::type Bounded_side;
+ typedef typename Get_type<Base, Angle_tag>::type Angle;
+ typedef typename Get_type<Base, Point_tag>::type Point_3;
+ typedef typename Get_type<Base, Vector_tag>::type Vector_3;
+ typedef typename Get_type<Base, Segment_tag>::type Segment_3;
+ typedef cpp0x::tuple<Point_3,Point_3,Point_3> Triangle_3; // placeholder
+ typedef cpp0x::tuple<Point_3,Point_3,Point_3,Point_3> Tetrahedron_3; // placeholder
+ struct Compare_xyz_3 {
+ typedef typename Get_functor<Base, Compare_lexicographically_tag>::type CL;
+ typedef typename CL::result_type result_type;
+ CL cl;
+ Compare_xyz_3(Kernel const&k):cl(k){}
+ result_type operator()(Point_3 const&a, Point_3 const&b) {
+ return cl(a,b);
+ }
+ };
+ struct Compare_distance_3 {
+ typedef typename Get_functor<Base, Compare_distance_tag>::type CD;
+ typedef typename CD::result_type result_type;
+ CD cd;
+ Compare_distance_3(Kernel const&k):cd(k){}
+ result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c) {
+ return cd(a,b,c);
+ }
+ result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d) {
+ return cd(a,b,c,d);
+ }
+ };
+ struct Orientation_3 {
+ typedef typename Get_functor<Base, Orientation_of_points_tag>::type O;
+ typedef typename O::result_type result_type;
+ O o;
+ Orientation_3(Kernel const&k):o(k){}
+ result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d) {
+ //return o(a,b,c,d);
+ Point_3 const* t[4]={&a,&b,&c,&d};
+ return o(make_transforming_iterator<Dereference_functor>(t+0),make_transforming_iterator<Dereference_functor>(t+4));
+
+ }
+ };
+ struct Side_of_oriented_sphere_3 {
+ typedef typename Get_functor<Base, Side_of_oriented_sphere_tag>::type SOS;
+ typedef typename SOS::result_type result_type;
+ SOS sos;
+ Side_of_oriented_sphere_3(Kernel const&k):sos(k){}
+ result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d, Point_3 const&e) {
+ //return sos(a,b,c,d);
+ Point_3 const* t[5]={&a,&b,&c,&d,&e};
+ return sos(make_transforming_iterator<Dereference_functor>(t+0),make_transforming_iterator<Dereference_functor>(t+5));
+ }
+ };
+
+ // I don't have the Coplanar predicates (yet)
+
+
+ Compare_xyz_3 compare_xyz_3_object()const{ return Compare_xyz_3(*this); }
+ Compare_distance_3 compare_distance_3_object()const{ return Compare_distance_3(*this); }
+ Orientation_3 orientation_3_object()const{ return Orientation_3(*this); }
+ Side_of_oriented_sphere_3 side_of_oriented_sphere_3_object()const{ return Side_of_oriented_sphere_3(*this); }
+};
+}
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h
new file mode 100644
index 00000000..dd888005
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h
@@ -0,0 +1,298 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_KERNEL_D_INTERFACE_H
+#define CGAL_KD_KERNEL_D_INTERFACE_H
+
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/tuple.h>
+
+
+namespace CGAL {
+template <class Base_> struct Kernel_d_interface : public Base_ {
+ CGAL_CONSTEXPR Kernel_d_interface(){}
+ CGAL_CONSTEXPR Kernel_d_interface(int d):Base_(d){}
+
+ typedef Base_ Base;
+ typedef Kernel_d_interface<Base> Kernel;
+ typedef Base_ R_; // for the macros
+ typedef typename Get_type<Base, RT_tag>::type RT;
+ typedef typename Get_type<Base, FT_tag>::type FT;
+ typedef typename Get_type<Base, Bool_tag>::type Boolean;
+ typedef typename Get_type<Base, Sign_tag>::type Sign;
+ typedef typename Get_type<Base, Comparison_result_tag>::type Comparison_result;
+ typedef typename Get_type<Base, Orientation_tag>::type Orientation;
+ typedef typename Get_type<Base, Oriented_side_tag>::type Oriented_side;
+ typedef typename Get_type<Base, Bounded_side_tag>::type Bounded_side;
+ typedef typename Get_type<Base, Angle_tag>::type Angle;
+ typedef typename Get_type<Base, Flat_orientation_tag>::type Flat_orientation_d;
+ typedef typename Get_type<Base, Point_tag>::type Point_d;
+ typedef typename Get_type<Base, Vector_tag>::type Vector_d;
+ typedef typename Get_type<Base, Segment_tag>::type Segment_d;
+ typedef typename Get_type<Base, Sphere_tag>::type Sphere_d;
+ typedef typename Get_type<Base, Hyperplane_tag>::type Hyperplane_d;
+ typedef Vector_d Direction_d;
+ typedef typename Get_type<Base, Line_tag>::type Line_d;
+ typedef typename Get_type<Base, Ray_tag>::type Ray_d;
+ typedef typename Get_type<Base, Iso_box_tag>::type Iso_box_d;
+ typedef typename Get_type<Base, Aff_transformation_tag>::type Aff_transformation_d;
+ typedef typename Get_type<Base, Weighted_point_tag>::type Weighted_point_d;
+ typedef typename Get_functor<Base, Compute_point_cartesian_coordinate_tag>::type Compute_coordinate_d;
+ typedef typename Get_functor<Base, Compare_lexicographically_tag>::type Compare_lexicographically_d;
+ typedef typename Get_functor<Base, Equal_points_tag>::type Equal_d;
+ typedef typename Get_functor<Base, Less_lexicographically_tag>::type Less_lexicographically_d;
+ typedef typename Get_functor<Base, Less_or_equal_lexicographically_tag>::type Less_or_equal_lexicographically_d;
+ // FIXME: and vectors?
+ typedef typename Get_functor<Base, Orientation_of_points_tag>::type Orientation_d;
+ typedef typename Get_functor<Base, Less_point_cartesian_coordinate_tag>::type Less_coordinate_d;
+ typedef typename Get_functor<Base, Point_dimension_tag>::type Point_dimension_d;
+ typedef typename Get_functor<Base, Side_of_oriented_sphere_tag>::type Side_of_oriented_sphere_d;
+ typedef typename Get_functor<Base, Power_side_of_power_sphere_tag>::type Power_side_of_power_sphere_d;
+ typedef typename Get_functor<Base, Power_center_tag>::type Power_center_d;
+ typedef typename Get_functor<Base, Power_distance_tag>::type Power_distance_d;
+ typedef typename Get_functor<Base, Contained_in_affine_hull_tag>::type Contained_in_affine_hull_d;
+ typedef typename Get_functor<Base, Construct_flat_orientation_tag>::type Construct_flat_orientation_d;
+ typedef typename Get_functor<Base, In_flat_orientation_tag>::type In_flat_orientation_d;
+ typedef typename Get_functor<Base, In_flat_side_of_oriented_sphere_tag>::type In_flat_side_of_oriented_sphere_d;
+ typedef typename Get_functor<Base, In_flat_power_side_of_power_sphere_tag>::type In_flat_power_side_of_power_sphere_d;
+ typedef typename Get_functor<Base, Point_to_vector_tag>::type Point_to_vector_d;
+ typedef typename Get_functor<Base, Vector_to_point_tag>::type Vector_to_point_d;
+ typedef typename Get_functor<Base, Translated_point_tag>::type Translated_point_d;
+ typedef typename Get_functor<Base, Scaled_vector_tag>::type Scaled_vector_d;
+ typedef typename Get_functor<Base, Difference_of_vectors_tag>::type Difference_of_vectors_d;
+ typedef typename Get_functor<Base, Difference_of_points_tag>::type Difference_of_points_d;
+ //typedef typename Get_functor<Base, Construct_ttag<Point_tag> >::type Construct_point_d;
+ struct Construct_point_d : private Store_kernel<Kernel> {
+ typedef Kernel R_; // for the macro
+ CGAL_FUNCTOR_INIT_STORE(Construct_point_d)
+ typedef typename Get_functor<Base, Construct_ttag<Point_tag> >::type CP;
+ typedef Point_d result_type;
+ Point_d operator()(Weighted_point_d const&wp)const{
+ return typename Get_functor<Base, Point_drop_weight_tag>::type(this->kernel())(wp);
+ }
+#ifdef CGAL_CXX11
+ Point_d operator()(Weighted_point_d &wp)const{
+ return typename Get_functor<Base, Point_drop_weight_tag>::type(this->kernel())(wp);
+ }
+ Point_d operator()(Weighted_point_d &&wp)const{
+ return typename Get_functor<Base, Point_drop_weight_tag>::type(this->kernel())(std::move(wp));
+ }
+ Point_d operator()(Weighted_point_d const&&wp)const{
+ return typename Get_functor<Base, Point_drop_weight_tag>::type(this->kernel())(std::move(wp));
+ }
+ template<class...T>
+# if __cplusplus >= 201402L
+ decltype(auto)
+# else
+ Point_d
+# endif
+ operator()(T&&...t)const{
+ return CP(this->kernel())(std::forward<T>(t)...);
+ //return CP(this->kernel())(t...);
+ }
+#else
+# define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t))const{ \
+ return CP(this->kernel())(BOOST_PP_ENUM_PARAMS(N,t)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+# undef CGAL_CODE
+ Point_d operator()()const{ \
+ return CP(this->kernel())(); \
+ }
+#endif
+ };
+ typedef typename Get_functor<Base, Construct_ttag<Vector_tag> >::type Construct_vector_d;
+ typedef typename Get_functor<Base, Construct_ttag<Segment_tag> >::type Construct_segment_d;
+ typedef typename Get_functor<Base, Construct_ttag<Sphere_tag> >::type Construct_sphere_d;
+ typedef typename Get_functor<Base, Construct_ttag<Hyperplane_tag> >::type Construct_hyperplane_d;
+ typedef Construct_vector_d Construct_direction_d;
+ typedef typename Get_functor<Base, Construct_ttag<Line_tag> >::type Construct_line_d;
+ typedef typename Get_functor<Base, Construct_ttag<Ray_tag> >::type Construct_ray_d;
+ typedef typename Get_functor<Base, Construct_ttag<Iso_box_tag> >::type Construct_iso_box_d;
+ typedef typename Get_functor<Base, Construct_ttag<Aff_transformation_tag> >::type Construct_aff_transformation_d;
+ typedef typename Get_functor<Base, Construct_ttag<Weighted_point_tag> >::type Construct_weighted_point_d;
+ typedef typename Get_functor<Base, Midpoint_tag>::type Midpoint_d;
+ struct Component_accessor_d : private Store_kernel<Kernel> {
+ typedef Kernel R_; // for the macro
+ CGAL_FUNCTOR_INIT_STORE(Component_accessor_d)
+ int dimension(Point_d const&p){
+ return this->kernel().point_dimension_d_object()(p);
+ }
+ FT cartesian(Point_d const&p, int i){
+ return this->kernel().compute_coordinate_d_object()(p,i);
+ }
+ RT homogeneous(Point_d const&p, int i){
+ if (i == dimension(p))
+ return 1;
+ return cartesian(p, i);
+ }
+ };
+ struct Construct_cartesian_const_iterator_d : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_cartesian_const_iterator_d)
+ typedef typename Get_functor<Base, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CPI;
+ typedef typename Get_functor<Base, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CVI;
+ // FIXME: The following sometimes breaks compilation. The typedef below forces instantiation of this, which forces Point_d, which itself (in the wrapper) needs the derived kernel to tell it what the base kernel is, and that's a cycle. The exact circumstances are not clear, g++ and clang++ are ok in both C++03 and C++11, it is only clang in C++11 without CGAL_CXX11 that breaks. For now, rely on result_type.
+ //typedef typename CGAL::decay<typename boost::result_of<CPI(Point_d,CGAL::Begin_tag)>::type>::type result_type;
+ typedef typename CGAL::decay<typename CPI::result_type>::type result_type;
+ // Kernel_d requires a common iterator type for points and vectors
+ // TODO: provide this mixed functor in preKernel?
+ //CGAL_static_assertion((boost::is_same<typename CGAL::decay<typename boost::result_of<CVI(Vector_d,CGAL::Begin_tag)>::type>::type, result_type>::value));
+ CGAL_static_assertion((boost::is_same<typename CGAL::decay<typename CVI::result_type>::type, result_type>::value));
+ template <class Tag_>
+ result_type operator()(Point_d const&p, Tag_ t)const{
+ return CPI(this->kernel())(p,t);
+ }
+ template <class Tag_>
+ result_type operator()(typename First_if_different<Vector_d,Point_d>::Type const&v, Tag_ t)const{
+ return CVI(this->kernel())(v,t);
+ }
+
+ template <class Obj>
+ result_type operator()(Obj const&o)const{
+ return operator()(o, Begin_tag());
+ }
+ result_type operator()(Point_d const&p, int)const{
+ return operator()(p, End_tag());
+ }
+ result_type operator()(typename First_if_different<Vector_d,Point_d>::Type const&v, int)const{
+ return operator()(v, End_tag());
+ }
+ };
+ struct Compute_squared_radius_d : private Store_kernel<Kernel> {
+ typedef Kernel R_; // for the macro
+ CGAL_FUNCTOR_INIT_STORE(Compute_squared_radius_d)
+ typedef FT result_type;
+ template<class S> FT operator()(CGAL_FORWARDABLE(S) s)const{
+ return typename Get_functor<Base, Squared_radius_tag>::type(this->kernel())(CGAL_FORWARD(S,s));
+ }
+ template<class I> FT operator()(I b, I e)const{
+ return typename Get_functor<Base, Squared_circumradius_tag>::type(this->kernel())(b,e);
+ }
+ };
+ typedef typename Construct_cartesian_const_iterator_d::result_type Cartesian_const_iterator_d;
+ typedef typename Get_functor<Base, Squared_distance_tag>::type Squared_distance_d;
+ typedef typename Get_functor<Base, Squared_length_tag>::type Squared_length_d;
+ typedef typename Get_functor<Base, Scalar_product_tag>::type Scalar_product_d;
+ typedef typename Get_functor<Base, Affine_rank_tag>::type Affine_rank_d;
+ typedef typename Get_functor<Base, Affinely_independent_tag>::type Affinely_independent_d;
+ typedef typename Get_functor<Base, Contained_in_linear_hull_tag>::type Contained_in_linear_hull_d;
+ typedef typename Get_functor<Base, Contained_in_simplex_tag>::type Contained_in_simplex_d;
+ typedef typename Get_functor<Base, Has_on_positive_side_tag>::type Has_on_positive_side_d;
+ typedef typename Get_functor<Base, Linear_rank_tag>::type Linear_rank_d;
+ typedef typename Get_functor<Base, Linearly_independent_tag>::type Linearly_independent_d;
+ typedef typename Get_functor<Base, Oriented_side_tag>::type Oriented_side_d;
+ typedef typename Get_functor<Base, Side_of_bounded_circumsphere_tag>::type Side_of_bounded_sphere_d;
+
+ typedef typename Get_functor<Base, Center_of_sphere_tag>::type Center_of_sphere_d;
+ typedef Center_of_sphere_d Construct_center_d; // RangeSearchTraits
+ typedef typename Get_functor<Base, Construct_circumcenter_tag>::type Construct_circumcenter_d;
+ typedef typename Get_functor<Base, Value_at_tag>::type Value_at_d;
+ typedef typename Get_functor<Base, Point_of_sphere_tag>::type Point_of_sphere_d;
+ typedef typename Get_functor<Base, Orthogonal_vector_tag>::type Orthogonal_vector_d;
+ typedef typename Get_functor<Base, Linear_base_tag>::type Linear_base_d;
+ typedef typename Get_functor<Base, Construct_min_vertex_tag>::type Construct_min_vertex_d;
+ typedef typename Get_functor<Base, Construct_max_vertex_tag>::type Construct_max_vertex_d;
+
+ typedef typename Get_functor<Base, Point_weight_tag>::type Compute_weight_d;
+ typedef typename Get_functor<Base, Point_drop_weight_tag>::type Point_drop_weight_d;
+
+ //TODO:
+ //typedef ??? Intersect_d;
+
+
+ Compute_coordinate_d compute_coordinate_d_object()const{ return Compute_coordinate_d(*this); }
+ Has_on_positive_side_d has_on_positive_side_d_object()const{ return Has_on_positive_side_d(*this); }
+ Compare_lexicographically_d compare_lexicographically_d_object()const{ return Compare_lexicographically_d(*this); }
+ Equal_d equal_d_object()const{ return Equal_d(*this); }
+ Less_lexicographically_d less_lexicographically_d_object()const{ return Less_lexicographically_d(*this); }
+ Less_or_equal_lexicographically_d less_or_equal_lexicographically_d_object()const{ return Less_or_equal_lexicographically_d(*this); }
+ Less_coordinate_d less_coordinate_d_object()const{ return Less_coordinate_d(*this); }
+ Orientation_d orientation_d_object()const{ return Orientation_d(*this); }
+ Oriented_side_d oriented_side_d_object()const{ return Oriented_side_d(*this); }
+ Point_dimension_d point_dimension_d_object()const{ return Point_dimension_d(*this); }
+ Point_of_sphere_d point_of_sphere_d_object()const{ return Point_of_sphere_d(*this); }
+ Side_of_oriented_sphere_d side_of_oriented_sphere_d_object()const{ return Side_of_oriented_sphere_d(*this); }
+ Power_side_of_power_sphere_d power_side_of_power_sphere_d_object()const{ return Power_side_of_power_sphere_d(*this); }
+ Power_center_d power_center_d_object()const{ return Power_center_d(*this); }
+ Power_distance_d power_distance_d_object()const{ return Power_distance_d(*this); }
+ Side_of_bounded_sphere_d side_of_bounded_sphere_d_object()const{ return Side_of_bounded_sphere_d(*this); }
+ Contained_in_affine_hull_d contained_in_affine_hull_d_object()const{ return Contained_in_affine_hull_d(*this); }
+ Contained_in_linear_hull_d contained_in_linear_hull_d_object()const{ return Contained_in_linear_hull_d(*this); }
+ Contained_in_simplex_d contained_in_simplex_d_object()const{ return Contained_in_simplex_d(*this); }
+ Construct_flat_orientation_d construct_flat_orientation_d_object()const{ return Construct_flat_orientation_d(*this); }
+ In_flat_orientation_d in_flat_orientation_d_object()const{ return In_flat_orientation_d(*this); }
+ In_flat_side_of_oriented_sphere_d in_flat_side_of_oriented_sphere_d_object()const{ return In_flat_side_of_oriented_sphere_d(*this); }
+ In_flat_power_side_of_power_sphere_d in_flat_power_side_of_power_sphere_d_object()const{ return In_flat_power_side_of_power_sphere_d(*this); }
+ Point_to_vector_d point_to_vector_d_object()const{ return Point_to_vector_d(*this); }
+ Vector_to_point_d vector_to_point_d_object()const{ return Vector_to_point_d(*this); }
+ Translated_point_d translated_point_d_object()const{ return Translated_point_d(*this); }
+ Scaled_vector_d scaled_vector_d_object()const{ return Scaled_vector_d(*this); }
+ Difference_of_vectors_d difference_of_vectors_d_object()const{ return Difference_of_vectors_d(*this); }
+ Difference_of_points_d difference_of_points_d_object()const{ return Difference_of_points_d(*this); }
+ Affine_rank_d affine_rank_d_object()const{ return Affine_rank_d(*this); }
+ Affinely_independent_d affinely_independent_d_object()const{ return Affinely_independent_d(*this); }
+ Linear_base_d linear_base_d_object()const{ return Linear_base_d(*this); }
+ Linear_rank_d linear_rank_d_object()const{ return Linear_rank_d(*this); }
+ Linearly_independent_d linearly_independent_d_object()const{ return Linearly_independent_d(*this); }
+ Midpoint_d midpoint_d_object()const{ return Midpoint_d(*this); }
+ Value_at_d value_at_d_object()const{ return Value_at_d(*this); }
+ /// Intersect_d intersect_d_object()const{ return Intersect_d(*this); }
+ Component_accessor_d component_accessor_d_object()const{ return Component_accessor_d(*this); }
+ Orthogonal_vector_d orthogonal_vector_d_object()const{ return Orthogonal_vector_d(*this); }
+ Construct_cartesian_const_iterator_d construct_cartesian_const_iterator_d_object()const{ return Construct_cartesian_const_iterator_d(*this); }
+ Construct_point_d construct_point_d_object()const{ return Construct_point_d(*this); }
+ Construct_vector_d construct_vector_d_object()const{ return Construct_vector_d(*this); }
+ Construct_segment_d construct_segment_d_object()const{ return Construct_segment_d(*this); }
+ Construct_sphere_d construct_sphere_d_object()const{ return Construct_sphere_d(*this); }
+ Construct_hyperplane_d construct_hyperplane_d_object()const{ return Construct_hyperplane_d(*this); }
+ Compute_squared_radius_d compute_squared_radius_d_object()const{ return Compute_squared_radius_d(*this); }
+ Squared_distance_d squared_distance_d_object()const{ return Squared_distance_d(*this); }
+ Squared_length_d squared_length_d_object()const{ return Squared_length_d(*this); }
+ Scalar_product_d scalar_product_d_object()const{ return Scalar_product_d(*this); }
+ Center_of_sphere_d center_of_sphere_d_object()const{ return Center_of_sphere_d(*this); }
+ Construct_circumcenter_d construct_circumcenter_d_object()const{ return Construct_circumcenter_d(*this); }
+ Construct_direction_d construct_direction_d_object()const{ return Construct_direction_d(*this); }
+ Construct_line_d construct_line_d_object()const{ return Construct_line_d(*this); }
+ Construct_ray_d construct_ray_d_object()const{ return Construct_ray_d(*this); }
+ Construct_iso_box_d construct_iso_box_d_object()const{ return Construct_iso_box_d(*this); }
+ Construct_aff_transformation_d construct_aff_transformation_d_object()const{ return Construct_aff_transformation_d(*this); }
+ Construct_min_vertex_d construct_min_vertex_d_object()const{ return Construct_min_vertex_d(*this); }
+ Construct_max_vertex_d construct_max_vertex_d_object()const{ return Construct_max_vertex_d(*this); }
+ Construct_weighted_point_d construct_weighted_point_d_object()const{ return Construct_weighted_point_d(*this); }
+
+ Compute_weight_d compute_weight_d_object()const{ return Compute_weight_d(*this); }
+ Point_drop_weight_d point_drop_weight_d_object()const{ return Point_drop_weight_d(*this); }
+
+ // Dummies for those required functors missing a concept.
+ typedef Null_functor Position_on_line_d;
+ Position_on_line_d position_on_line_d_object()const{return Null_functor();}
+ typedef Null_functor Barycentric_coordinates_d;
+ Barycentric_coordinates_d barycentric_coordinates_d_object()const{return Null_functor();}
+
+ /* Not provided because they don't make sense here:
+ Lift_to_paraboloid_d
+ Project_along_d_axis_d
+ */
+};
+}
+
+#endif // CGAL_KD_KERNEL_D_INTERFACE_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h
new file mode 100644
index 00000000..99918ed2
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h
@@ -0,0 +1,134 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_KO_CONVERTER_H
+#define CGAL_KD_KO_CONVERTER_H
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Kernel/mpl.h> // First_if_different
+#include <CGAL/Dimension.h>
+namespace CGAL {
+template <class Tag_, class K1, class K2> struct KO_converter;
+//TODO: It would probably be better if this was a Misc Functor in K1.
+// This way K1 could chose how it wants to present its points (sparse
+// iterator?) and derived classes would inherit it.
+
+namespace internal {
+template <class D /*=Dynamic_dimension_tag*/, class K1, class K2>
+struct Point_converter_help {
+ typedef typename Get_type<K1, Point_tag>::type argument_type;
+ typedef typename Get_type<K2, Point_tag>::type result_type;
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const {
+ typename Get_functor<K1, Construct_ttag<Point_cartesian_const_iterator_tag> >::type i(k1);
+ typename Get_functor<K2, Construct_ttag<Point_tag> >::type cp(k2);
+ return cp(conv(i(p,Begin_tag())),conv(i(p,End_tag())));
+ }
+};
+#ifdef CGAL_CXX11
+// This doesn't seem so useful, the compiler should be able to handle
+// the iterators just as efficiently.
+template <int d, class K1, class K2>
+struct Point_converter_help<Dimension_tag<d>,K1,K2> {
+ typedef typename Get_type<K1, Point_tag>::type argument_type;
+ typedef typename Get_type<K2, Point_tag>::type result_type;
+ template <class C,int...I>
+ result_type help(Indices<I...>, K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const {
+ typename Get_functor<K1, Compute_point_cartesian_coordinate_tag>::type cc(k1);
+ typename Get_functor<K2, Construct_ttag<Point_tag> >::type cp(k2);
+ return cp(conv(cc(p,I))...);
+ }
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const {
+ return help(typename N_increasing_indices<d>::type(),k1,k2,conv,p);
+ }
+};
+#endif
+}
+template <class K1, class K2> struct KO_converter<Point_tag,K1,K2>
+: internal::Point_converter_help<typename K1::Default_ambient_dimension,K1,K2>
+{};
+
+template <class K1, class K2> struct KO_converter<Vector_tag,K1,K2>{
+ typedef typename Get_type<K1, Vector_tag>::type K1_Vector;
+
+ // Disabling is now done in KernelD_converter
+ // // can't use vector without at least a placeholder point because of this
+ // typedef typename K1:: Point K1_Point;
+ // typedef typename First_if_different<K1_Vector,K1_Point>::Type argument_type;
+
+ typedef K1_Vector argument_type;
+ typedef typename Get_type<K2, Vector_tag>::type result_type;
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& v) const {
+ typename Get_functor<K1, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type i(k1);
+ typename Get_functor<K2, Construct_ttag<Vector_tag> >::type cp(k2);
+ return cp(conv(i(v,Begin_tag())),conv(i(v,End_tag())));
+ }
+};
+
+template <class K1, class K2> struct KO_converter<Segment_tag,K1,K2>{
+ typedef typename Get_type<K1, Segment_tag>::type argument_type;
+ typedef typename Get_type<K2, Segment_tag>::type result_type;
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const {
+ typename Get_functor<K1, Segment_extremity_tag>::type f(k1);
+ typename Get_functor<K2, Construct_ttag<Segment_tag> >::type cs(k2);
+ return cs(conv(f(s,0)),conv(f(s,1)));
+ }
+};
+
+template <class K1, class K2> struct KO_converter<Hyperplane_tag,K1,K2>{
+ typedef typename Get_type<K1, Hyperplane_tag>::type argument_type;
+ typedef typename Get_type<K2, Hyperplane_tag>::type result_type;
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& h) const {
+ typename Get_functor<K1, Orthogonal_vector_tag>::type ov(k1);
+ typename Get_functor<K1, Hyperplane_translation_tag>::type ht(k1);
+ typename Get_functor<K2, Construct_ttag<Hyperplane_tag> >::type ch(k2);
+ return ch(conv(ov(h)),conv(ht(h)));
+ }
+};
+
+template <class K1, class K2> struct KO_converter<Sphere_tag,K1,K2>{
+ typedef typename Get_type<K1, Sphere_tag>::type argument_type;
+ typedef typename Get_type<K2, Sphere_tag>::type result_type;
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const {
+ typename Get_functor<K1, Center_of_sphere_tag>::type cos(k1);
+ typename Get_functor<K1, Squared_radius_tag>::type sr(k1);
+ typename Get_functor<K2, Construct_ttag<Sphere_tag> >::type cs(k2);
+ return cs(conv(cos(s)),conv(sr(s)));
+ }
+};
+
+template <class K1, class K2> struct KO_converter<Weighted_point_tag,K1,K2>{
+ typedef typename Get_type<K1, Weighted_point_tag>::type argument_type;
+ typedef typename Get_type<K2, Weighted_point_tag>::type result_type;
+ template <class C>
+ result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const {
+ typename Get_functor<K1, Point_drop_weight_tag>::type pdw(k1);
+ typename Get_functor<K1, Point_weight_tag>::type pw(k1);
+ typename Get_functor<K2, Construct_ttag<Weighted_point_tag> >::type cwp(k2);
+ return cwp(conv(pdw(s)),conv(pw(s)));
+ }
+};
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h
new file mode 100644
index 00000000..ddbdc37b
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h
@@ -0,0 +1,175 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_LA_EIGEN_H
+#define CGAL_LA_EIGEN_H
+#include <CGAL/config.h>
+#ifndef CGAL_EIGEN3_ENABLED
+#error Requires Eigen
+#endif
+#include <boost/type_traits/is_arithmetic.hpp>
+#include <boost/utility/enable_if.hpp>
+#include <CGAL/Dimension.h>
+#include <Eigen/Dense>
+#include <CGAL/NewKernel_d/LA_eigen/constructors.h>
+#include <CGAL/iterator_from_indices.h>
+
+namespace CGAL {
+
+//FIXME: where could we use Matrix_base instead of Matrix?
+// Dim_ real dimension
+// Max_dim_ upper bound on the dimension
+template<class NT_,class Dim_,class Max_dim_=Dim_> struct LA_eigen {
+ typedef NT_ NT;
+ typedef Dim_ Dimension;
+ typedef Max_dim_ Max_dimension;
+ enum { dimension = Eigen_dimension<Dimension>::value };
+ enum { max_dimension = Eigen_dimension<Max_dimension>::value };
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef LA_eigen< NT, D2, D3 > Other;
+ };
+ template<class,class=void> struct Property : boost::false_type {};
+ template<class D> struct Property<Has_vector_plus_minus_tag,D> : boost::true_type {};
+ template<class D> struct Property<Has_vector_scalar_ops_tag,D> : boost::true_type {};
+ template<class D> struct Property<Has_dot_product_tag,D> : boost::true_type {};
+
+ typedef Eigen::Matrix<NT,Eigen_dimension<Dim_>::value,1,Eigen::ColMajor|Eigen::AutoAlign,Eigen_dimension<Max_dim_>::value,1> Vector;
+ typedef Eigen::Matrix<NT,Eigen::Dynamic,1> Dynamic_vector;
+ typedef Construct_eigen<Vector> Construct_vector;
+
+#if (EIGEN_WORLD_VERSION>=3)
+ typedef NT const* Vector_const_iterator;
+#else
+ typedef Iterator_from_indices<const type,const NT
+#ifndef CGAL_CXX11
+ ,NT
+#endif
+ > Vector_const_iterator;
+#endif
+
+ template<class Vec_>static Vector_const_iterator vector_begin(Vec_ const&a){
+#if (EIGEN_WORLD_VERSION>=3)
+ return &a[0];
+#else
+ return Vector_const_iterator(a,0);
+#endif
+ }
+
+ template<class Vec_>static Vector_const_iterator vector_end(Vec_ const&a){
+#if (EIGEN_WORLD_VERSION>=3)
+ // FIXME: Isn't that dangerous if a is an expression and not a concrete vector?
+ return &a[0]+a.size();
+#else
+ return Vector_const_iterator(a,a.size());
+#endif
+ }
+
+ typedef Eigen::Matrix<NT,dimension,dimension,Eigen::ColMajor|Eigen::AutoAlign,max_dimension,max_dimension> Square_matrix;
+ typedef Eigen::Matrix<NT,dimension,Eigen::Dynamic,Eigen::ColMajor|Eigen::AutoAlign,max_dimension,Eigen::Dynamic> Dynamic_matrix;
+ //TODO: don't pass on the values of Max_* for an expensive NT
+ // typedef ... Constructor
+ // typedef ... Accessor
+#if 0
+ private:
+ template <class T> class Canonicalize_vector {
+ typedef typename Dimension_eigen<T::SizeAtCompileTime>::type S1;
+ typedef typename Dimension_eigen<T::MaxSizeAtCompileTime>::type S2;
+ public:
+ typedef typename Vector<S1,S2>::type type;
+ };
+ public:
+#endif
+
+ template<class Vec_>static int size_of_vector(Vec_ const&v){
+ return (int)v.size();
+ }
+
+ template<class Vec_>static NT dot_product(Vec_ const&a,Vec_ const&b){
+ return a.dot(b);
+ }
+
+ template<class Vec_> static int rows(Vec_ const&v) {
+ return (int)v.rows();
+ }
+ template<class Vec_> static int columns(Vec_ const&v) {
+ return (int)v.cols();
+ }
+
+ template<class Mat_> static NT determinant(Mat_ const&m,bool=false){
+ return m.determinant();
+ }
+
+ template<class Mat_> static typename
+ Same_uncertainty_nt<CGAL::Sign, NT>::type
+ sign_of_determinant(Mat_ const&m,bool=false)
+ {
+ return CGAL::sign(m.determinant());
+ }
+
+ template<class Mat_> static int rank(Mat_ const&m){
+ // return m.rank();
+ // This one uses sqrt so cannot be used with Gmpq
+ // TODO: use different algo for different NT?
+ // Eigen::ColPivHouseholderQR<Mat_> decomp(m);
+ Eigen::FullPivLU<Mat_> decomp(m);
+ // decomp.setThreshold(0);
+ return static_cast<int>(decomp.rank());
+ }
+
+ // m*a==b
+ template<class DV, class DM, class V>
+ static void solve(DV&a, DM const&m, V const& b){
+ //a = m.colPivHouseholderQr().solve(b);
+ a = m.fullPivLu().solve(b);
+ }
+ template<class DV, class DM, class V>
+ static bool solve_and_check(DV&a, DM const&m, V const& b){
+ //a = m.colPivHouseholderQr().solve(b);
+ a = m.fullPivLu().solve(b);
+ return b.isApprox(m*a);
+ }
+
+ static Dynamic_matrix basis(Dynamic_matrix const&m){
+ return m.fullPivLu().image(m);
+ }
+
+ template<class Vec1,class Vec2> static Vector homogeneous_add(Vec1 const&a,Vec2 const&b){
+ //TODO: use compile-time size when available
+ int d=a.size();
+ Vector v(d);
+ v << b[d-1]*a.topRows(d-1)+a[d-1]*b.topRows(d-1), a[d-1]*b[d-1];
+ return v;
+ }
+
+ template<class Vec1,class Vec2> static Vector homogeneous_sub(Vec1 const&a,Vec2 const&b){
+ int d=a.size();
+ Vector v(d);
+ v << b[d-1]*a.topRows(d-1)-a[d-1]*b.topRows(d-1), a[d-1]*b[d-1];
+ return v;
+ }
+
+ template<class Vec1,class Vec2> static std::pair<NT,NT> homogeneous_dot_product(Vec1 const&a,Vec2 const&b){
+ int d=a.size();
+ return make_pair(a.topRows(d-1).dot(b.topRows(d-1)), a[d-1]*b[d-1]);
+ }
+
+};
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h
new file mode 100644
index 00000000..3636996f
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h
@@ -0,0 +1,162 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_LA_EIGEN_CONSTRUCTORS_H
+#define CGAL_LA_EIGEN_CONSTRUCTORS_H
+#include <CGAL/config.h>
+
+#if defined(BOOST_MSVC)
+# pragma warning(push)
+# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I'
+ // http://lists.boost.org/boost-users/2014/11/83291.php
+#endif
+
+#ifndef CGAL_EIGEN3_ENABLED
+#error Requires Eigen
+#endif
+#include <boost/type_traits/is_arithmetic.hpp>
+#include <boost/utility/enable_if.hpp>
+#include <CGAL/Dimension.h>
+#include <Eigen/Dense>
+#include <CGAL/iterator_from_indices.h>
+#include <CGAL/NewKernel_d/utils.h>
+#include <boost/preprocessor/repetition.hpp>
+#include <boost/preprocessor/repetition/enum.hpp>
+#include <boost/preprocessor/repetition/enum_params.hpp>
+
+namespace CGAL {
+ template <class Vector_> struct Construct_eigen {
+ typedef Vector_ result_type;
+ typedef typename Vector_::Scalar NT;
+
+ private:
+ static void check_dim(int CGAL_assertion_code(d)){
+ CGAL_assertion_code(int m = result_type::MaxSizeAtCompileTime;)
+ CGAL_assertion((m == Eigen::Dynamic) || (d <= m));
+ }
+ public:
+
+ struct Dimension {
+ // Initialize with NaN if possible?
+ result_type operator()(int d) const {
+ check_dim(d);
+ return result_type(d);
+ }
+ };
+
+ struct Iterator {
+ template<typename Iter>
+ result_type operator()(int d,Iter const& f,Iter const& e) const {
+ check_dim(d);
+ CGAL_assertion(d==std::distance(f,e));
+ result_type a(d);
+ // TODO: check the right way to do this
+ std::copy(f,e,&a[0]);
+ return a;
+ }
+ };
+
+#if 0
+ struct Iterator_add_one {
+ template<typename Iter>
+ result_type operator()(int d,Iter const& f,Iter const& e) const {
+ check_dim(d);
+ CGAL_assertion(d==std::distance(f,e)+1);
+ result_type a(d);
+ std::copy(f,e,&a[0]);
+ a[d-1]=1;
+ return a;
+ }
+ };
+#endif
+
+ struct Iterator_and_last {
+ template<typename Iter,typename T>
+ result_type operator()(int d,Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const {
+ check_dim(d);
+ CGAL_assertion(d==std::distance(f,e)+1);
+ result_type a(d);
+ std::copy(f,e,&a[0]);
+ a[d-1]=CGAL_FORWARD(T,t);
+ return a;
+ }
+ };
+
+#ifdef CGAL_CXX11
+ struct Initializer_list {
+ // Fix T==NT?
+ template<class T>
+ result_type operator()(std::initializer_list<T> l) const {
+ return Iterator()(l.size(),l.begin(),l.end());
+ }
+ };
+#endif
+
+ struct Values {
+#ifdef CGAL_CXX11
+ // TODO avoid going through Initializer_list which may cause extra copies. Possibly use forward_as_tuple.
+ template<class...U>
+ result_type operator()(U&&...u) const {
+ check_dim(sizeof...(U)); // TODO: use static_assert
+ return Initializer_list()({forward_safe<NT,U>(u)...});
+ }
+#else
+
+#define CGAL_CODE(Z,N,_) result_type operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \
+ check_dim(N); \
+ result_type a(N); \
+ a << BOOST_PP_ENUM_PARAMS(N,t); \
+ return a; \
+}
+BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+
+#endif
+ };
+
+ struct Values_divide {
+#ifdef CGAL_CXX11
+ template<class H,class...U>
+ result_type operator()(H const&h,U&&...u) const {
+ check_dim(sizeof...(U)); // TODO: use static_assert
+ return Initializer_list()({Rational_traits<NT>().make_rational(std::forward<U>(u),h)...});
+ }
+#else
+
+#define CGAL_VAR(Z,N,_) ( Rational_traits<NT>().make_rational( t##N ,h) )
+#define CGAL_CODE(Z,N,_) template <class H> result_type \
+ operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \
+ check_dim(N); \
+ result_type a(N); \
+ a << BOOST_PP_ENUM(N,CGAL_VAR,); \
+ return a; \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR
+
+#endif
+ };
+ };
+}
+#if defined(BOOST_MSVC)
+# pragma warning(pop)
+#endif
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h
new file mode 100644
index 00000000..9ecc2b63
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h
@@ -0,0 +1,188 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_LAZY_CARTESIAN_H
+#define CGAL_KERNEL_D_LAZY_CARTESIAN_H
+
+#include <CGAL/basic.h>
+#include <CGAL/algorithm.h>
+#include <CGAL/Lazy.h>
+#include <CGAL/Default.h>
+#include <CGAL/NewKernel_d/Filtered_predicate2.h>
+#include <CGAL/iterator_from_indices.h>
+#include <CGAL/NewKernel_d/Define_kernel_types.h>
+
+namespace CGAL {
+
+template<class K,class T>
+struct Nth_iterator_element : private Store_kernel<K> {
+ Nth_iterator_element(){}
+ Nth_iterator_element(K const&k):Store_kernel<K>(k){}
+ typedef typename Get_type<K, typename iterator_tag_traits<T>::value_tag>::type result_type;
+ template<class U> result_type operator()(CGAL_FORWARDABLE(U) u, int i) const {
+ typename Get_functor<K, Construct_ttag<T> >::type ci(this->kernel());
+ return *cpp0x::next(ci(CGAL_FORWARD(U,u),Begin_tag()),i);
+ }
+};
+ //typedef typename Functor<typename iterator_tag_traits<T>::nth_element>::type nth_elem;
+template<class K, class T, bool = iterator_tag_traits<T>::has_nth_element>
+struct Select_nth_element_functor {
+ typedef Nth_iterator_element<K, T> type;
+};
+template<class K, class T>
+struct Select_nth_element_functor <K, T, true> :
+ Get_functor<K, typename iterator_tag_traits<T>::nth_element> {};
+
+namespace internal {
+ template<class A,class B,class C,bool/*is_NT=false*/>
+ struct Lazy_construction_maybe_nt {
+ typedef Lazy_construction<A,B,C> type;
+ };
+ template<class A,class B,class C>
+ struct Lazy_construction_maybe_nt<A,B,C,true> {
+ typedef Lazy_construction_nt<A,B,C> type;
+ };
+}
+
+template <class EK_, class AK_, class E2A_, class Kernel_>
+struct Lazy_cartesian_types
+{
+ typedef typename typeset_intersection<
+ typename AK_::Object_list,
+ typename EK_::Object_list
+ >::type Object_list;
+
+ typedef typename typeset_intersection<
+ typename AK_::Iterator_list,
+ typename EK_::Iterator_list
+ >::type Iterator_list;
+
+ template <class T,class=typename Get_type_category<Kernel_,T>::type> struct Type {};
+ template <class T> struct Type<T,Object_tag> {
+ typedef Lazy<
+ typename Get_type<AK_,T>::type,
+ typename Get_type<EK_,T>::type,
+ typename Get_type<EK_, FT_tag>::type,
+ E2A_> type;
+ };
+ template <class T> struct Type<T,Number_tag> {
+ typedef CGAL::Lazy_exact_nt<typename Get_type<EK_,T>::type> type;
+ };
+
+ template <class T> struct Iterator {
+ typedef typename iterator_tag_traits<T>::value_tag Vt;
+ typedef typename Type<Vt>::type V;
+ typedef typename Select_nth_element_functor<AK_,T>::type AF;
+ typedef typename Select_nth_element_functor<EK_,T>::type EF;
+
+ typedef typename internal::Lazy_construction_maybe_nt<
+ Kernel_, AF, EF, is_NT_tag<Vt>::value
+ >::type nth_elem;
+
+ typedef Iterator_from_indices<
+ const typename Type<typename iterator_tag_traits<T>::container>::type,
+ const V, V, nth_elem
+ > type;
+ };
+};
+
+template <class EK_, class AK_, class E2A_/*, class Kernel_=Default*/>
+struct Lazy_cartesian : Dimension_base<typename EK_::Default_ambient_dimension>,
+ Lazy_cartesian_types<EK_,AK_,E2A_,Lazy_cartesian<EK_,AK_,E2A_> >
+{
+ //CGAL_CONSTEXPR Lazy_cartesian(){}
+ //CGAL_CONSTEXPR Lazy_cartesian(int d):Base_(d){}
+
+ //TODO: Do we want to store an AK and an EK? Or just references?
+ //FIXME: references would be better I guess.
+ //TODO: In any case, make sure that we don't end up storing this kernel for
+ //nothing (it is not empty but references empty kernels or something)
+ AK_ ak; EK_ ek;
+ AK_ const& approximate_kernel()const{return ak;}
+ EK_ const& exact_kernel()const{return ek;}
+
+ typedef Lazy_cartesian Self;
+ typedef Lazy_cartesian_types<EK_,AK_,E2A_,Self> Base;
+ //typedef typename Default::Get<Kernel_,Self>::type Kernel;
+ typedef Self Kernel;
+ typedef AK_ Approximate_kernel;
+ typedef EK_ Exact_kernel;
+ typedef E2A_ E2A;
+ typedef Approx_converter<Kernel, Approximate_kernel> C2A;
+ typedef Exact_converter<Kernel, Exact_kernel> C2E;
+
+ typedef typename Exact_kernel::Rep_tag Rep_tag;
+ typedef typename Exact_kernel::Kernel_tag Kernel_tag;
+ typedef typename Exact_kernel::Default_ambient_dimension Default_ambient_dimension;
+ typedef typename Exact_kernel::Max_ambient_dimension Max_ambient_dimension;
+ //typedef typename Exact_kernel::Flat_orientation Flat_orientation;
+ // Check that Approximate_kernel agrees with all that...
+
+ template<class T,class D=void,class=typename Get_functor_category<Lazy_cartesian,T,D>::type> struct Functor {
+ typedef Null_functor type;
+ };
+ //FIXME: what do we do with D here?
+ template<class T,class D> struct Functor<T,D,Predicate_tag> {
+ typedef typename Get_functor<Approximate_kernel, T>::type FA;
+ typedef typename Get_functor<Exact_kernel, T>::type FE;
+ typedef Filtered_predicate2<FE,FA,C2E,C2A> type;
+ };
+ template<class T,class D> struct Functor<T,D,Compute_tag> {
+ typedef typename Get_functor<Approximate_kernel, T>::type FA;
+ typedef typename Get_functor<Exact_kernel, T>::type FE;
+ typedef Lazy_construction_nt<Kernel,FA,FE> type;
+ };
+ template<class T,class D> struct Functor<T,D,Construct_tag> {
+ typedef typename Get_functor<Approximate_kernel, T>::type FA;
+ typedef typename Get_functor<Exact_kernel, T>::type FE;
+ typedef Lazy_construction<Kernel,FA,FE> type;
+ };
+
+ //typedef typename Iterator<Point_cartesian_const_iterator_tag>::type Point_cartesian_const_iterator;
+ //typedef typename Iterator<Vector_cartesian_const_iterator_tag>::type Vector_cartesian_const_iterator;
+
+ template<class U>
+ struct Construct_iter : private Store_kernel<Kernel> {
+ Construct_iter(){}
+ Construct_iter(Kernel const&k):Store_kernel<Kernel>(k){}
+ //FIXME: pass the kernel to the functor in the iterator
+ typedef U result_type;
+ template<class T>
+ result_type operator()(T const& t,Begin_tag)const{
+ return result_type(t,0,this->kernel());
+ }
+ template<class T>
+ result_type operator()(T const& t,End_tag)const{
+ return result_type(t,Self().dimension(),this->kernel());
+ }
+ };
+ template<class T,class D> struct Functor<T,D,Construct_iterator_tag> {
+ typedef Construct_iter<typename Base::template Iterator<typename map_result_tag<T>::type>::type> type;
+ };
+
+
+ //TODO: what about other functors of the Misc category?
+ // for Point_dimension, we should apply it to the approximate point
+ // for printing, we should??? just not do printing this way?
+};
+
+
+} //namespace CGAL
+
+#endif // CGAL_KERNEL_D_LAZY_CARTESIAN_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h
new file mode 100644
index 00000000..6d9f070f
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h
@@ -0,0 +1,59 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_TYPE_AFF_TRANSFORMATION_H
+#define CGAL_KD_TYPE_AFF_TRANSFORMATION_H
+#include <CGAL/config.h>
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <boost/preprocessor/repetition.hpp>
+
+// Dummy, that's all the Kernel_d concept requires, so a useful class will wait.
+
+namespace CGAL {
+template<class R_>
+struct Aff_transformation {
+ typedef R_ R;
+};
+namespace CartesianDKernelFunctors {
+template<class R_> struct Construct_aff_transformation {
+ CGAL_FUNCTOR_INIT_IGNORE(Construct_aff_transformation)
+ typedef R_ R;
+ typedef typename Get_type<R, Aff_transformation_tag>::type result_type;
+#ifdef CGAL_CXX11
+ template<class...T>
+ result_type operator()(T&&...)const{return result_type();}
+#else
+ result_type operator()()const{
+ return result_type();
+ }
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U)> \
+ result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const& BOOST_PP_INTERCEPT))const{ \
+ return result_type(); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1, 9, CGAL_CODE, _ )
+#undef CGAL_CODE
+
+#endif
+};
+}
+CGAL_KD_DEFAULT_TYPE(Aff_transformation_tag,(CGAL::Aff_transformation<K>),(),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Aff_transformation_tag>,(CartesianDKernelFunctors::Construct_aff_transformation<K>),(Aff_transformation_tag),());
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h
new file mode 100644
index 00000000..14e35b01
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h
@@ -0,0 +1,159 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_TYPE_HYPERPLANE_H
+#define CGAL_KD_TYPE_HYPERPLANE_H
+#include <CGAL/enum.h>
+#include <CGAL/number_utils.h>
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <boost/iterator/transform_iterator.hpp>
+#include <boost/iterator/counting_iterator.hpp>
+namespace CGAL {
+template <class R_> class Hyperplane {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Vector_tag>::type Vector_;
+ Vector_ v_;
+ FT_ s_;
+
+ public:
+ Hyperplane(Vector_ const&v, FT_ const&s): v_(v), s_(s) {}
+ // TODO: Add a piecewise constructor?
+
+ Vector_ const& orthogonal_vector()const{return v_;}
+ FT_ translation()const{return s_;}
+};
+namespace CartesianDKernelFunctors {
+template <class R_> struct Construct_hyperplane : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_hyperplane)
+ typedef typename Get_type<R_, Hyperplane_tag>::type result_type;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, Vector_tag>::type Vector;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ private:
+ struct One {
+ typedef int result_type;
+ template<class T>int const& operator()(T const&)const{
+ static const int one = 1;
+ return one;
+ }
+ };
+ public:
+
+ result_type operator()(Vector const&a, FT const&b)const{
+ return result_type(a,b);
+ }
+ // Not really needed
+ result_type operator()()const{
+ typename Get_functor<R_, Construct_ttag<Vector_tag> >::type cv(this->kernel());
+ return result_type(cv(),0);
+ }
+
+ template <class Iter>
+ result_type through(Iter f, Iter e)const{
+ typedef typename R_::LA LA;
+ typedef typename R_::Default_ambient_dimension D1;
+ typedef typename R_::Max_ambient_dimension D2;
+ typedef typename Increment_dimension<D1>::type D1i;
+ typedef typename Increment_dimension<D2>::type D2i;
+
+ typedef Eigen::Matrix<FT, Eigen_dimension<D1>::value, Eigen_dimension<D1i>::value,
+ Eigen::ColMajor|Eigen::AutoAlign, Eigen_dimension<D2>::value, Eigen_dimension<D2i>::value> Matrix;
+ typedef Eigen::Matrix<FT, Eigen_dimension<D1i>::value, 1,
+ Eigen::ColMajor|Eigen::AutoAlign, Eigen_dimension<D2i>::value, 1> Vec;
+ typename Get_functor<R_, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R_, Construct_ttag<Vector_tag> >::type cv(this->kernel());
+ typename Get_functor<R_, Point_dimension_tag>::type pd(this->kernel());
+
+ Point const& p0=*f;
+ int d = pd(p0);
+ Matrix m(d,d+1);
+ for(int j=0;j<d;++j)
+ m(0,j)=c(p0,j);
+ // Write the point coordinates in lines.
+ int i;
+ for (i=1; ++f!=e; ++i) {
+ Point const& p=*f;
+ for(int j=0;j<d;++j)
+ m(i,j)=c(p,j);
+ }
+ CGAL_assertion (i == d);
+ for(i=0;i<d;++i)
+ m(i,d)=-1;
+ Eigen::FullPivLU<Matrix> lu(m);
+ Vec res = lu.kernel().col(0);
+ return this->operator()(cv(d,LA::vector_begin(res),LA::vector_end(res)-1),res(d));
+ }
+ template <class Iter>
+ result_type operator()(Iter f, Iter e, Point const&p, CGAL::Oriented_side s=ON_ORIENTED_BOUNDARY)const{
+ result_type ret = through(f, e);
+ // I don't really like using ON_ORIENTED_BOUNDARY to mean that we don't care, we might as well not pass 'p' at all.
+ if (s == ON_ORIENTED_BOUNDARY)
+ return ret;
+ typename Get_functor<R_, Oriented_side_tag>::type os(this->kernel());
+ CGAL::Oriented_side o = os(ret, p);
+ if (o == ON_ORIENTED_BOUNDARY || o == s)
+ return ret;
+ typename Get_functor<R_, Opposite_vector_tag>::type ov(this->kernel());
+ typename Get_functor<R_, Construct_ttag<Vector_tag> >::type cv(this->kernel());
+ return this->operator()(ov(ret.orthogonal_vector()), -ret.translation());
+ }
+};
+template <class R_> struct Orthogonal_vector {
+ CGAL_FUNCTOR_INIT_IGNORE(Orthogonal_vector)
+ typedef typename Get_type<R_, Hyperplane_tag>::type Hyperplane;
+ typedef typename Get_type<R_, Vector_tag>::type const& result_type;
+ result_type operator()(Hyperplane const&s)const{
+ return s.orthogonal_vector();
+ }
+};
+template <class R_> struct Hyperplane_translation {
+ CGAL_FUNCTOR_INIT_IGNORE(Hyperplane_translation)
+ typedef typename Get_type<R_, Hyperplane_tag>::type Hyperplane;
+ typedef typename Get_type<R_, FT_tag>::type result_type;
+ // TODO: Is_exact?
+ result_type operator()(Hyperplane const&s)const{
+ return s.translation();
+ }
+};
+template <class R_> struct Value_at : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Value_at)
+ typedef typename Get_type<R_, Hyperplane_tag>::type Hyperplane;
+ typedef typename Get_type<R_, Vector_tag>::type Vector;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ typedef FT result_type;
+ typedef typename Get_functor<R_, Scalar_product_tag>::type Dot;
+ typedef typename Get_functor<R_, Point_to_vector_tag>::type P2V;
+ result_type operator()(Hyperplane const&h, Point const&p)const{
+ Dot dot(this->kernel());
+ P2V p2v(this->kernel());
+ return dot(h.orthogonal_vector(),p2v(p));
+ // Use Orthogonal_vector to make it generic?
+ // Copy the code from Scalar_product to avoid p2v?
+ }
+};
+}
+//TODO: Add a condition that the hyperplane type is the one from this file.
+CGAL_KD_DEFAULT_TYPE(Hyperplane_tag,(CGAL::Hyperplane<K>),(Vector_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Hyperplane_tag>,(CartesianDKernelFunctors::Construct_hyperplane<K>),(Vector_tag,Hyperplane_tag),(Opposite_vector_tag,Oriented_side_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Orthogonal_vector_tag,(CartesianDKernelFunctors::Orthogonal_vector<K>),(Vector_tag,Hyperplane_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Hyperplane_translation_tag,(CartesianDKernelFunctors::Hyperplane_translation<K>),(Hyperplane_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Value_at_tag,(CartesianDKernelFunctors::Value_at<K>),(Point_tag,Vector_tag,Hyperplane_tag),(Scalar_product_tag,Point_to_vector_tag));
+} // namespace CGAL
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h
new file mode 100644
index 00000000..d053f351
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h
@@ -0,0 +1,88 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNELD_TYPES_ISO_BOX_H
+#define CGAL_KERNELD_TYPES_ISO_BOX_H
+#include <utility>
+#include <CGAL/basic.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Kernel/mpl.h>
+#include <CGAL/transforming_pair_iterator.h>
+namespace CGAL {
+template <class R_> class Iso_box {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef std::pair<Point_,Point_> Data_;
+ Data_ data;
+ public:
+ Iso_box(){}
+ Iso_box(Point_ const&a, Point_ const&b): data(a,b) {}
+ Point_ min BOOST_PREVENT_MACRO_SUBSTITUTION ()const{
+ return data.first;
+ }
+ Point_ max BOOST_PREVENT_MACRO_SUBSTITUTION ()const{
+ return data.second;
+ }
+};
+namespace CartesianDKernelFunctors {
+ template <class R_> struct Construct_iso_box : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_iso_box)
+ typedef typename Get_type<R_, Iso_box_tag>::type result_type;
+ typedef typename Get_type<R_, RT_tag>::type RT;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_functor<R_, Construct_ttag<Point_tag> >::type Cp_;
+ typedef typename Get_functor<R_, Construct_ttag<Point_cartesian_const_iterator_tag> >::type Ci_;
+ result_type operator()(Point const&a, Point const&b)const{
+ Cp_ cp(this->kernel());
+ Ci_ ci(this->kernel());
+ return result_type(cp(
+ make_transforming_pair_iterator(ci(a,Begin_tag()), ci(b,Begin_tag()), Min<RT>()),
+ make_transforming_pair_iterator(ci(a,End_tag()), ci(b,End_tag()), Min<RT>())),
+ cp(
+ make_transforming_pair_iterator(ci(a,Begin_tag()), ci(b,Begin_tag()), Max<RT>()),
+ make_transforming_pair_iterator(ci(a,End_tag()), ci(b,End_tag()), Max<RT>())));
+ }
+ };
+
+ template <class R_> struct Construct_min_vertex {
+ CGAL_FUNCTOR_INIT_IGNORE(Construct_min_vertex)
+ typedef typename Get_type<R_, Iso_box_tag>::type argument_type;
+ //TODO: make result_type a reference
+ typedef typename Get_type<R_, Point_tag>::type result_type;
+ result_type operator()(argument_type const&b)const{
+ return b.min BOOST_PREVENT_MACRO_SUBSTITUTION ();
+ }
+ };
+ template <class R_> struct Construct_max_vertex {
+ CGAL_FUNCTOR_INIT_IGNORE(Construct_max_vertex)
+ typedef typename Get_type<R_, Iso_box_tag>::type argument_type;
+ typedef typename Get_type<R_, Point_tag>::type result_type;
+ result_type operator()(argument_type const&b)const{
+ return b.max BOOST_PREVENT_MACRO_SUBSTITUTION ();
+ }
+ };
+}
+//TODO (other types as well) only enable these functors if the Iso_box type is the one defined in this file...
+CGAL_KD_DEFAULT_TYPE(Iso_box_tag,(CGAL::Iso_box<K>),(Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Iso_box_tag>,(CartesianDKernelFunctors::Construct_iso_box<K>),(Iso_box_tag,Point_tag),(Construct_ttag<Point_cartesian_const_iterator_tag>,Construct_ttag<Point_tag>));
+CGAL_KD_DEFAULT_FUNCTOR(Construct_min_vertex_tag,(CartesianDKernelFunctors::Construct_min_vertex<K>),(Iso_box_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_max_vertex_tag,(CartesianDKernelFunctors::Construct_max_vertex<K>),(Iso_box_tag),());
+} // namespace CGAL
+
+#endif // CGAL_KERNELD_TYPES_ISO_BOX_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h
new file mode 100644
index 00000000..6a09571c
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h
@@ -0,0 +1,66 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNELD_TYPES_LINE_H
+#define CGAL_KERNELD_TYPES_LINE_H
+#include <utility>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Kernel/mpl.h>
+namespace CGAL {
+template <class R_> class Line {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef std::pair<Point_,Point_> Data_;
+ Data_ data;
+ public:
+ Line(){}
+ Line(Point_ const&a, Point_ const&b): data(a,b) {}
+ Point_ point(int i)const{
+ if(i==0) return data.first;
+ if(i==1) return data.second;
+ throw "not implemented";
+ }
+ Line opposite()const{
+ return Line(data.second,data.first);
+ }
+};
+namespace CartesianDKernelFunctors {
+ template <class R_> struct Construct_line : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_line)
+ typedef typename Get_type<R_, Line_tag>::type result_type;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, Vector_tag>::type Vector;
+ typedef typename Get_functor<R_, Translated_point_tag>::type Tp_;
+ //typedef typename Get_functor<R_, Difference_of_points_tag>::type Dp_;
+ //typedef typename Get_functor<R_, Scaled_vector_tag>::type Sv_;
+ result_type operator()(Point const&a, Point const&b)const{
+ return result_type(a,b);
+ }
+ result_type operator()(Point const&a, typename First_if_different<Vector,Point>::Type const&b)const{
+ Tp_ tp(this->kernel());
+ return result_type(a,tp(a,b));
+ }
+ };
+}
+CGAL_KD_DEFAULT_TYPE(Line_tag,(CGAL::Line<K>),(Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Line_tag>,(CartesianDKernelFunctors::Construct_line<K>),(Line_tag,Point_tag,Vector_tag),(Translated_point_tag));
+
+} // namespace CGAL
+
+#endif // CGAL_KERNELD_TYPES_LINE_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h
new file mode 100644
index 00000000..be845e76
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h
@@ -0,0 +1,66 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNELD_TYPES_RAY_H
+#define CGAL_KERNELD_TYPES_RAY_H
+#include <utility>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Kernel/mpl.h>
+namespace CGAL {
+template <class R_> class Ray {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef typename Get_type<R_, Vector_tag>::type Vector_;
+ typedef std::pair<Point_,Vector_> Data_;
+ Data_ data;
+ public:
+ Ray(){}
+ Ray(Point_ const&a, Vector_ const&b): data(a,b) {}
+ Point_ source()const{
+ return data.first;
+ }
+ // FIXME: return a R_::Direction?
+ Vector_ direction()const{
+ return data.second;
+ }
+};
+namespace CartesianDKernelFunctors {
+ template <class R_> struct Construct_ray : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_ray)
+ typedef typename Get_type<R_, Ray_tag>::type result_type;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, Vector_tag>::type Vector;
+ typedef typename Get_functor<R_, Difference_of_points_tag>::type Dp_;
+ //typedef typename Get_functor<R_, Translated_point_tag>::type Tp_;
+ //typedef typename Get_functor<R_, Scaled_vector_tag>::type Sv_;
+ result_type operator()(Point const&a, Vector const&b)const{
+ return result_type(a,b);
+ }
+ result_type operator()(Point const&a, typename First_if_different<Point,Vector>::Type const&b)const{
+ Dp_ dp(this->kernel());
+ return result_type(a,dp(b,a));
+ }
+ };
+}
+CGAL_KD_DEFAULT_TYPE(Ray_tag,(CGAL::Ray<K>),(Point_tag,Vector_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Ray_tag>,(CartesianDKernelFunctors::Construct_ray<K>),(Point_tag,Ray_tag,Vector_tag),(Difference_of_points_tag));
+
+} // namespace CGAL
+
+#endif // CGAL_KERNELD_TYPES_RAY_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h
new file mode 100644
index 00000000..38361c2b
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNELD_SEGMENTD_H
+#define CGAL_KERNELD_SEGMENTD_H
+#include <CGAL/config.h>
+#include <utility>
+#include <CGAL/NewKernel_d/functor_tags.h>
+namespace CGAL {
+template <class R_> class Segment {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ //typedef typename R_::Vector Vector_;
+ //typedef typename Get_functor<R_, Construct_ttag<Vector_tag> >::type Cv_;
+// typedef typename R_::Squared_distance Csd_;
+ typedef std::pair<Point_,Point_> Data_;
+ Data_ data;
+ public:
+ //typedef Segmentd<R_> Segment;
+#ifdef CGAL_CXX11
+ //FIXME: don't forward directly, piecewise_constuct should call the point construction functor (I guess? or is it unnecessary?)
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Segment>>::value>::type>
+ Segment(U&&...u):data(std::forward<U>(u)...){}
+#else
+ Segment(){}
+ Segment(Point_ const&a, Point_ const&b): data(a,b) {}
+ //template<class A,class T1,class T2>
+ //Segment(A const&,T1 const&t1,T2 const&t2)
+#endif
+ Point_ source()const{return data.first;}
+ Point_ target()const{return data.second;}
+ Point_ operator[](int i)const{
+ if((i%2)==0)
+ return source();
+ else
+ return target();
+ }
+ Segment opposite()const{
+ return Segment(target(),source());
+ }
+ //Vector_ vector()const{
+ // return Cv_()(data.first,data.second);
+ //}
+// FT_ squared_length()const{
+// return Csd_()(data.first,data.second);
+// }
+};
+
+namespace CartesianDKernelFunctors {
+
+template<class R_> struct Construct_segment : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_segment)
+ typedef R_ R;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, Segment_tag>::type Segment;
+ typedef typename Get_functor<R_, Construct_ttag<Point_tag> >::type CP;
+ typedef Segment result_type;
+ result_type operator()(Point const&a, Point const&b)const{
+ return result_type(a,b);
+ }
+ // Not really needed, especially since it forces us to store the kernel
+ result_type operator()()const{
+ Point p = typename Get_functor<R_, Construct_ttag<Point_tag> >::type (this->kernel()) ();
+ return result_type (p, p);
+ }
+ // T should only be std::piecewise_construct_t, but we shouldn't fail if it doesn't exist.
+ template<class T,class U,class V>
+ result_type operator()(CGAL_FORWARDABLE(T),CGAL_FORWARDABLE(U) u,CGAL_FORWARDABLE(V) v)const{
+ CP cp(this->kernel());
+ result_type r = {{
+ call_on_tuple_elements<Point>(cp, CGAL_FORWARD(U,u)),
+ call_on_tuple_elements<Point>(cp, CGAL_FORWARD(V,v)) }};
+ return r;
+ }
+};
+
+// This should be part of Construct_point, according to Kernel_23 conventions
+template<class R_> struct Segment_extremity {
+ CGAL_FUNCTOR_INIT_IGNORE(Segment_extremity)
+ typedef R_ R;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, Segment_tag>::type Segment;
+ typedef Point result_type;
+ result_type operator()(Segment const&s, int i)const{
+ if(i==0) return s.source();
+ CGAL_assertion(i==1);
+ return s.target();
+ }
+#ifdef CGAL_CXX11
+ result_type operator()(Segment &&s, int i)const{
+ if(i==0) return std::move(s.source());
+ CGAL_assertion(i==1);
+ return std::move(s.target());
+ }
+#endif
+};
+} // CartesianDKernelFunctors
+
+CGAL_KD_DEFAULT_TYPE(Segment_tag,(CGAL::Segment<K>),(Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Segment_tag>,(CartesianDKernelFunctors::Construct_segment<K>),(Segment_tag,Point_tag),(Construct_ttag<Point_tag>));
+CGAL_KD_DEFAULT_FUNCTOR(Segment_extremity_tag,(CartesianDKernelFunctors::Segment_extremity<K>),(Segment_tag,Point_tag),());
+
+} // namespace CGAL
+
+#endif // CGAL_KERNELD_SEGMENTD_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h
new file mode 100644
index 00000000..114410b4
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h
@@ -0,0 +1,132 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_TYPE_SPHERE_H
+#define CGAL_KD_TYPE_SPHERE_H
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <boost/iterator/counting_iterator.hpp>
+namespace CGAL {
+template <class R_> class Sphere {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ Point_ c_;
+ FT_ r2_;
+
+ public:
+ Sphere(Point_ const&p, FT_ const&r2): c_(p), r2_(r2) {}
+ // TODO: Add a piecewise constructor?
+
+ Point_ const& center()const{return c_;}
+ FT_ const& squared_radius()const{return r2_;}
+};
+
+namespace CartesianDKernelFunctors {
+template <class R_> struct Construct_sphere : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_sphere)
+ typedef typename Get_type<R_, Sphere_tag>::type result_type;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ result_type operator()(Point const&a, FT const&b)const{
+ return result_type(a,b);
+ }
+ // Not really needed
+ result_type operator()()const{
+ typename Get_functor<R_, Construct_ttag<Point_tag> >::type cp(this->kernel());
+ return result_type(cp(),0);
+ }
+ template <class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R_, Construct_circumcenter_tag>::type cc(this->kernel());
+ typename Get_functor<R_, Squared_distance_tag>::type sd(this->kernel());
+
+ // It should be possible to avoid copying the center by moving this code to a constructor.
+ Point center = cc(f, e);
+ FT const& r2 = sd(center, *f);
+ return this->operator()(CGAL_MOVE(center), r2);
+ }
+};
+
+template <class R_> struct Center_of_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Center_of_sphere)
+ typedef typename Get_type<R_, Sphere_tag>::type Sphere;
+ // No reference because of the second overload
+ typedef typename Get_type<R_, Point_tag>::type result_type;
+
+ result_type const& operator()(Sphere const&s)const{
+ return s.center();
+ }
+
+ template<class Iter>
+ result_type operator()(Iter b, Iter e)const{
+ typename Get_functor<R_, Construct_ttag<Sphere_tag> >::type cs(this->kernel());
+ return operator()(cs(b,e)); // computes the radius needlessly
+ }
+};
+
+template <class R_> struct Squared_radius {
+ CGAL_FUNCTOR_INIT_IGNORE(Squared_radius)
+ typedef typename Get_type<R_, Sphere_tag>::type Sphere;
+ typedef typename Get_type<R_, FT_tag>::type const& result_type;
+ // TODO: Is_exact?
+ result_type operator()(Sphere const&s)const{
+ return s.squared_radius();
+ }
+};
+
+// FIXME: Move it to the generic functors, using the two above and conditional to the existence of sqrt(FT)
+template<class R_> struct Point_of_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Point_of_sphere)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Sphere_tag>::type Sphere;
+ typedef typename Get_functor<R, Construct_ttag<Point_tag> >::type CP;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ typedef typename Get_functor<R, Point_dimension_tag>::type PD;
+ typedef Point result_type;
+ typedef Sphere first_argument_type;
+ typedef int second_argument_type;
+ struct Trans : std::binary_function<FT,int,FT> {
+ FT const& r_; int idx; bool sgn;
+ Trans (int n, FT const& r, bool b) : r_(r), idx(n), sgn(b) {}
+ FT operator()(FT const&x, int i)const{
+ return (i == idx) ? sgn ? x + r_ : x - r_ : x;
+ }
+ };
+ result_type operator()(Sphere const&s, int i)const{
+ CI ci(this->kernel());
+ PD pd(this->kernel());
+ typedef boost::counting_iterator<int,std::random_access_iterator_tag> Count;
+ Point const&c = s.center();
+ int d=pd(c);
+ bool last = (i == d);
+ FT r = sqrt(s.squared_radius());
+ Trans t(last ? 0 : i, r, !last);
+ return CP(this->kernel())(make_transforming_pair_iterator(ci(c,Begin_tag()),Count(0),t),make_transforming_pair_iterator(ci(c,End_tag()),Count(d),t));
+ }
+};
+}
+CGAL_KD_DEFAULT_TYPE(Sphere_tag,(CGAL::Sphere<K>),(Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Sphere_tag>,(CartesianDKernelFunctors::Construct_sphere<K>),(Sphere_tag,Point_tag),(Construct_ttag<Point_tag>,Compute_point_cartesian_coordinate_tag,Squared_distance_tag,Squared_distance_to_origin_tag,Point_dimension_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Center_of_sphere_tag,(CartesianDKernelFunctors::Center_of_sphere<K>),(Sphere_tag,Point_tag),(Construct_ttag<Sphere_tag>));
+CGAL_KD_DEFAULT_FUNCTOR(Squared_radius_tag,(CartesianDKernelFunctors::Squared_radius<K>),(Sphere_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Point_of_sphere_tag,(CartesianDKernelFunctors::Point_of_sphere<K>),(Sphere_tag,Point_tag),(Construct_ttag<Point_tag>, Construct_ttag<Point_cartesian_const_iterator_tag>));
+} // namespace CGAL
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h
new file mode 100644
index 00000000..1caf8701
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h
@@ -0,0 +1,205 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_TYPE_WP_H
+#define CGAL_KD_TYPE_WP_H
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <boost/iterator/counting_iterator.hpp>
+namespace CGAL {
+namespace KerD {
+template <class R_> class Weighted_point {
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ Point_ c_;
+ FT_ w_;
+
+ public:
+ Weighted_point(Point_ const&p, FT_ const&w): c_(p), w_(w) {}
+ // TODO: Add a piecewise constructor?
+
+ Point_ const& point()const{return c_;}
+ FT_ const& weight()const{return w_;}
+};
+}
+
+namespace CartesianDKernelFunctors {
+template <class R_> struct Construct_weighted_point : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_weighted_point)
+ typedef typename Get_type<R_, Weighted_point_tag>::type result_type;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ result_type operator()(Point const&a, FT const&b)const{
+ return result_type(a,b);
+ }
+ // Not really needed
+ result_type operator()()const{
+ typename Get_functor<R_, Construct_ttag<Point_tag> >::type cp(this->kernel());
+ return result_type(cp(),0);
+ }
+};
+
+template <class R_> struct Point_drop_weight {
+ CGAL_FUNCTOR_INIT_IGNORE(Point_drop_weight)
+ typedef typename Get_type<R_, Weighted_point_tag>::type argument_type;
+ typedef typename Get_type<R_, Point_tag>::type const& result_type;
+ // Returning a reference is fragile
+
+ result_type operator()(argument_type const&s)const{
+ return s.point();
+ }
+};
+
+template <class R_> struct Point_weight {
+ CGAL_FUNCTOR_INIT_IGNORE(Point_weight)
+ typedef typename Get_type<R_, Weighted_point_tag>::type argument_type;
+ typedef typename Get_type<R_, FT_tag>::type result_type;
+
+ result_type operator()(argument_type const&s)const{
+ return s.weight();
+ }
+};
+
+template <class R_> struct Power_distance : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Power_distance)
+ typedef typename Get_type<R_, Weighted_point_tag>::type first_argument_type;
+ typedef first_argument_type second_argument_type;
+ typedef typename Get_type<R_, FT_tag>::type result_type;
+
+ result_type operator()(first_argument_type const&a, second_argument_type const&b)const{
+ typename Get_functor<R_, Point_drop_weight_tag>::type pdw(this->kernel());
+ typename Get_functor<R_, Point_weight_tag>::type pw(this->kernel());
+ typename Get_functor<R_, Squared_distance_tag>::type sd(this->kernel());
+ return sd(pdw(a),pdw(b))-pw(a)-pw(b);
+ }
+};
+template <class R_> struct Power_distance_to_point : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Power_distance_to_point)
+ typedef typename Get_type<R_, Weighted_point_tag>::type first_argument_type;
+ typedef typename Get_type<R_, Point_tag>::type second_argument_type;
+ typedef typename Get_type<R_, FT_tag>::type result_type;
+
+ result_type operator()(first_argument_type const&a, second_argument_type const&b)const{
+ typename Get_functor<R_, Point_drop_weight_tag>::type pdw(this->kernel());
+ typename Get_functor<R_, Point_weight_tag>::type pw(this->kernel());
+ typename Get_functor<R_, Squared_distance_tag>::type sd(this->kernel());
+ return sd(pdw(a),b)-pw(a);
+ }
+};
+
+template<class R_> struct Power_side_of_power_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Power_side_of_power_sphere)
+ typedef R_ R;
+ typedef typename Get_type<R, Oriented_side_tag>::type result_type;
+
+ template<class Iter, class Pt>
+ result_type operator()(Iter const& f, Iter const& e, Pt const& p0) const {
+ typename Get_functor<R, Power_side_of_power_sphere_raw_tag>::type ptr(this->kernel());
+ typename Get_functor<R, Point_drop_weight_tag>::type pdw(this->kernel());
+ typename Get_functor<R, Point_weight_tag>::type pw(this->kernel());
+ return ptr (
+ make_transforming_iterator (f, pdw),
+ make_transforming_iterator (e, pdw),
+ make_transforming_iterator (f, pw),
+ pdw (p0),
+ pw (p0));
+ }
+};
+
+template<class R_> struct In_flat_power_side_of_power_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(In_flat_power_side_of_power_sphere)
+ typedef R_ R;
+ typedef typename Get_type<R, Oriented_side_tag>::type result_type;
+
+ template<class Fo, class Iter, class Pt>
+ result_type operator()(Fo const& fo, Iter const& f, Iter const& e, Pt const& p0) const {
+ typename Get_functor<R, In_flat_power_side_of_power_sphere_raw_tag>::type ptr(this->kernel());
+ typename Get_functor<R, Point_drop_weight_tag>::type pdw(this->kernel());
+ typename Get_functor<R, Point_weight_tag>::type pw(this->kernel());
+ return ptr (
+ fo,
+ make_transforming_iterator (f, pdw),
+ make_transforming_iterator (e, pdw),
+ make_transforming_iterator (f, pw),
+ pdw (p0),
+ pw (p0));
+ }
+};
+
+// Construct a point at (weighted) distance 0 from all the input
+template <class R_> struct Power_center : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Power_center)
+ typedef typename Get_type<R_, Weighted_point_tag>::type WPoint;
+ typedef WPoint result_type;
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ template <class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ // 2*(x-y).c == (x^2-wx^2)-(y^2-wy^2)
+ typedef typename R_::LA LA;
+ typedef typename LA::Square_matrix Matrix;
+ typedef typename LA::Vector Vec;
+ typedef typename LA::Construct_vector CVec;
+ typename Get_functor<R_, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R_, Construct_ttag<Point_tag> >::type cp(this->kernel());
+ typename Get_functor<R_, Point_dimension_tag>::type pd(this->kernel());
+ typename Get_functor<R_, Squared_distance_to_origin_tag>::type sdo(this->kernel());
+ typename Get_functor<R_, Power_distance_to_point_tag>::type pdp(this->kernel());
+ typename Get_functor<R_, Point_drop_weight_tag>::type pdw(this->kernel());
+ typename Get_functor<R_, Point_weight_tag>::type pw(this->kernel());
+ typename Get_functor<R_, Construct_ttag<Weighted_point_tag> >::type cwp(this->kernel());
+
+ WPoint const& wp0 = *f;
+ Point const& p0 = pdw(wp0);
+ int d = pd(p0);
+ FT const& n0 = sdo(p0) - pw(wp0);
+ Matrix m(d,d);
+ Vec b = typename CVec::Dimension()(d);
+ // Write the point coordinates in lines.
+ int i;
+ for(i=0; ++f!=e; ++i) {
+ WPoint const& wp=*f;
+ Point const& p=pdw(wp);
+ FT const& np = sdo(p) - pw(wp);
+ for(int j=0;j<d;++j) {
+ m(i,j)=2*(c(p,j)-c(p0,j));
+ b[i] = np - n0;
+ }
+ }
+ CGAL_assertion (i == d);
+ Vec res = typename CVec::Dimension()(d);;
+ //std::cout << "Mat: " << m << "\n Vec: " << one << std::endl;
+ LA::solve(res, CGAL_MOVE(m), CGAL_MOVE(b));
+ //std::cout << "Sol: " << res << std::endl;
+ Point center = cp(d,LA::vector_begin(res),LA::vector_end(res));
+ FT const& r2 = pdp (wp0, center);
+ return cwp(CGAL_MOVE(center), r2);
+ }
+};
+}
+CGAL_KD_DEFAULT_TYPE(Weighted_point_tag,(CGAL::KerD::Weighted_point<K>),(Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag<Weighted_point_tag>,(CartesianDKernelFunctors::Construct_weighted_point<K>),(Weighted_point_tag,Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Point_drop_weight_tag,(CartesianDKernelFunctors::Point_drop_weight<K>),(Weighted_point_tag,Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Point_weight_tag,(CartesianDKernelFunctors::Point_weight<K>),(Weighted_point_tag,Point_tag),());
+CGAL_KD_DEFAULT_FUNCTOR(Power_side_of_power_sphere_tag,(CartesianDKernelFunctors::Power_side_of_power_sphere<K>),(Weighted_point_tag),(Power_side_of_power_sphere_raw_tag,Point_drop_weight_tag,Point_weight_tag));
+CGAL_KD_DEFAULT_FUNCTOR(In_flat_power_side_of_power_sphere_tag,(CartesianDKernelFunctors::In_flat_power_side_of_power_sphere<K>),(Weighted_point_tag),(In_flat_power_side_of_power_sphere_raw_tag,Point_drop_weight_tag,Point_weight_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Power_distance_tag,(CartesianDKernelFunctors::Power_distance<K>),(Weighted_point_tag,Point_tag),(Squared_distance_tag,Point_drop_weight_tag,Point_weight_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Power_distance_to_point_tag,(CartesianDKernelFunctors::Power_distance_to_point<K>),(Weighted_point_tag,Point_tag),(Squared_distance_tag,Point_drop_weight_tag,Point_weight_tag));
+CGAL_KD_DEFAULT_FUNCTOR(Power_center_tag,(CartesianDKernelFunctors::Power_center<K>),(Weighted_point_tag,Point_tag),(Compute_point_cartesian_coordinate_tag,Construct_ttag<Point_tag>,Construct_ttag<Weighted_point_tag>,Point_dimension_tag,Squared_distance_to_origin_tag,Point_drop_weight_tag,Point_weight_tag,Power_distance_to_point_tag));
+} // namespace CGAL
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h
new file mode 100644
index 00000000..0ad9bb36
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h
@@ -0,0 +1,165 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_ARRAY_H
+#define CGAL_VECTOR_ARRAY_H
+#include <boost/type_traits/is_arithmetic.hpp>
+#include <boost/utility/enable_if.hpp>
+#include <CGAL/Dimension.h>
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/array.h>
+#include <boost/preprocessor/repetition.hpp>
+#include <boost/preprocessor/repetition/enum.hpp>
+
+#include <CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h>
+#include <CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h>
+#include <CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h>
+#include <CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h>
+#include <CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h>
+
+
+
+namespace CGAL {
+
+// May not be safe to use with dim!=max_dim.
+// In that case, we should store the real dim next to the array.
+template<class NT_,class Dim_,class Max_dim_=Dim_> struct Array_vector {
+ typedef NT_ NT;
+ typedef Dim_ Dimension;
+ typedef Max_dim_ Max_dimension;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef Array_vector< NT, D2, D3 > Other;
+ };
+ template<class> struct Property : boost::false_type {};
+
+ static const unsigned d_=Max_dim_::value;
+ CGAL_static_assertion(d_ != (unsigned)UNKNOWN_DIMENSION);
+
+ typedef cpp0x::array<NT,d_> Vector;
+ struct Construct_vector {
+ struct Dimension {
+ // Initialize with NaN if possible?
+ Vector operator()(unsigned CGAL_assertion_code(d)) const {
+ CGAL_assertion(d<=d_);
+ return Vector();
+ }
+ };
+
+ struct Iterator {
+ template<typename Iter>
+ Vector operator()(unsigned CGAL_assertion_code(d),Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==(unsigned) std::distance(f,e));
+ CGAL_assertion(d<=d_);
+ //TODO: optimize for forward iterators
+ Vector a;
+ std::copy(f,e,a.begin());
+ return a;
+ }
+ };
+
+#if 0
+ struct Iterator_add_one {
+ template<typename Iter>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==std::distance(f,e)+1);
+ CGAL_assertion(d<=d_);
+ //TODO: optimize
+ Vector a;
+ std::copy(f,e,a.begin());
+ a.back()=1;
+ return a;
+ }
+ };
+#endif
+
+ struct Iterator_and_last {
+ template<typename Iter,typename T>
+ Vector operator()(unsigned CGAL_assertion_code(d),Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const {
+ CGAL_assertion(d==std::distance(f,e)+1);
+ CGAL_assertion(d<=d_);
+ //TODO: optimize for forward iterators
+ Vector a;
+ std::copy(f,e,a.begin());
+ a.back()=CGAL_FORWARD(T,t);
+ return a;
+ }
+ };
+
+ struct Values {
+#ifdef CGAL_CXX11
+ template<class...U>
+ Vector operator()(U&&...u) const {
+ static_assert(sizeof...(U)<=d_,"too many arguments");
+ Vector a={{forward_safe<NT,U>(u)...}};
+ return a;
+ }
+#else
+
+#define CGAL_CODE(Z,N,_) Vector operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \
+ CGAL_assertion(N<=d_); \
+ Vector a={{BOOST_PP_ENUM_PARAMS(N,t)}}; \
+ return a; \
+}
+BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+
+#endif
+ };
+
+ struct Values_divide {
+#ifdef CGAL_CXX11
+ template<class H,class...U>
+ Vector operator()(H const& h,U&&...u) const {
+ static_assert(sizeof...(U)<=d_,"too many arguments");
+ Vector a={{Rational_traits<NT>().make_rational(std::forward<U>(u),h)...}};
+ return a;
+ }
+#else
+
+#define CGAL_VAR(Z,N,_) Rational_traits<NT>().make_rational( t##N , h)
+#define CGAL_CODE(Z,N,_) template <class H> Vector \
+ operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \
+ CGAL_assertion(N<=d_); \
+ Vector a={{BOOST_PP_ENUM(N,CGAL_VAR,_)}}; \
+ return a; \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR
+
+#endif
+ };
+ };
+
+ typedef NT const* Vector_const_iterator;
+ static Vector_const_iterator vector_begin(Vector const&a){
+ return &a[0];
+ }
+ static Vector_const_iterator vector_end(Vector const&a){
+ return &a[0]+d_; // Don't know the real size
+ }
+ static unsigned size_of_vector(Vector const&){
+ return d_; // Don't know the real size
+ }
+
+};
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h
new file mode 100644
index 00000000..954a3c1b
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h
@@ -0,0 +1,213 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_AVX4_H
+#define CGAL_VECTOR_AVX4_H
+
+#if !defined __AVX__ || (__GNUC__ * 100 + __GNUC_MINOR__ < 408)
+#error Requires AVX and gcc 4.8+
+#endif
+#include <x86intrin.h>
+
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/enum.h> // CGAL::Sign
+#include <CGAL/number_utils.h> // CGAL::sign
+
+
+
+namespace CGAL {
+
+ struct Avx_vector_4 {
+ typedef double NT;
+ typedef Dimension_tag<4> Dimension;
+ typedef Dimension_tag<4> Max_dimension;
+ // No Rebind_dimension, this is a building block
+ template<class,bool=true> struct Property : boost::false_type {};
+ template<bool b> struct Property<Has_vector_plus_minus_tag,b>
+ : boost::true_type {};
+ /* MAYBE?
+ template<bool b> struct Property<Has_vector_scalar_ops_tag,b>
+ : boost::true_type {};
+ */
+ template<bool b> struct Property<Has_determinant_of_vectors_tag,b>
+ : boost::true_type {};
+ template<bool b> struct Property<Has_dot_product_tag,b>
+ : boost::true_type {};
+ template<bool b> struct Property<Has_determinant_of_vectors_omit_last_tag,b>
+ : boost::true_type {};
+
+ typedef __m256d Vector;
+ struct Construct_vector {
+ struct Dimension {
+ // Initialize with NaN?
+ Vector operator()(unsigned d) const {
+ CGAL_assertion(d==4);
+ return Vector();
+ }
+ };
+
+ struct Iterator {
+ template<typename Iter>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==4);
+ double x0 = *f;
+ double x1 = *++f;
+ double x2 = *++f;
+ double x3 = *++f;
+ CGAL_assertion(++f==e);
+ Vector a = { x0, x1, x2, x3 };
+ return a;
+ }
+ };
+
+ struct Iterator_and_last {
+ template<typename Iter,typename T>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const {
+ CGAL_assertion(d==4);
+ double x0 = *f;
+ double x1 = *++f;
+ double x2 = *++f;
+ CGAL_assertion(++f==e);
+ Vector a = { x0, x1, x2, t };
+ return a;
+ }
+ };
+
+ struct Values {
+ Vector operator()(double a,double b,double c,double d) const {
+ Vector r = { a, b, c, d };
+ return r;
+ }
+ };
+
+ struct Values_divide {
+ Vector operator()(double h,double a,double b,double c,double d) const {
+ // {a,b,c,d}/{h,h,h,h} should be roughly the same
+ Vector r = { a/h, b/h, c/h, d/h };
+ return r;
+ }
+ };
+ };
+
+ public:
+ typedef double const* Vector_const_iterator;
+ static inline Vector_const_iterator vector_begin(Vector const&a){
+ return (Vector_const_iterator)(&a);
+ }
+ static inline Vector_const_iterator vector_end(Vector const&a){
+ return (Vector_const_iterator)(&a)+4;
+ }
+ static inline unsigned size_of_vector(Vector){
+ return 4;
+ }
+ static inline double dot_product(__m256d x, __m256d y){
+ __m256d p=x*y;
+ __m256d z=_mm256_hadd_pd(p,p);
+ return z[0]+z[2];
+ }
+ private:
+ static inline __m256d avx_sym(__m256d x){
+#if 0
+ return __builtin_shuffle(x,(__m256i){2,3,0,1});
+#else
+ return _mm256_permute2f128_pd(x,x,1);
+#endif
+ }
+ static inline __m256d avx_left(__m256d x){
+#if 0
+ return __builtin_shuffle(x,(__m256i){1,2,3,0});
+#else
+#ifdef __AVX2__
+ return _mm256_permute4x64_pd(x,1+2*4+3*16+0*64);
+#else
+ __m256d s = _mm256_permute2f128_pd(x,x,1);
+ return _mm256_shuffle_pd(x,s,5);
+#endif
+#endif
+ }
+ static inline __m256d avx_right(__m256d x){
+#if 0
+ return __builtin_shuffle(x,(__m256i){3,0,1,2});
+#else
+#ifdef __AVX2__
+ return _mm256_permute4x64_pd(x,3+0*4+1*16+2*64);
+#else
+ __m256d s = _mm256_permute2f128_pd(x,x,1);
+ return _mm256_shuffle_pd(s,x,5);
+#endif
+#endif
+ }
+ static inline double avx_altprod(__m256d x, __m256d y){
+ __m256d p=x*y;
+ __m256d z=_mm256_hsub_pd(p,p);
+ return z[0]+z[2];
+ }
+ public:
+ static double
+ determinant_of_vectors(Vector a, Vector b, Vector c, Vector d) {
+ __m256d x=a*avx_left(b)-avx_left(a)*b;
+ __m256d yy=a*avx_sym(b);
+ __m256d y=yy-avx_sym(yy);
+ __m256d z0=x*avx_sym(c);
+ __m256d z1=avx_left(x)*c;
+ __m256d z2=y*avx_left(c);
+ __m256d z=z0+z1-z2;
+ return avx_altprod(z,avx_right(d));
+ }
+ static CGAL::Sign
+ sign_of_determinant_of_vectors(Vector a, Vector b, Vector c, Vector d) {
+ return CGAL::sign(determinant_of_vectors(a,b,c,d));
+ }
+
+ private:
+ static inline __m256d avx3_right(__m256d x){
+#if 0
+ return __builtin_shuffle(x,(__m256i){2,0,1,3}); // can replace 3 with anything
+#else
+#ifdef __AVX2__
+ return _mm256_permute4x64_pd(x,2+0*4+1*16+3*64);
+#else
+ __m256d s = _mm256_permute2f128_pd(x,x,1);
+ return _mm256_shuffle_pd(s,x,12);
+#endif
+#endif
+ }
+ public:
+ static inline double dot_product_omit_last(__m256d x, __m256d y){
+ __m256d p=x*y;
+ __m128d q=_mm256_extractf128_pd(p,0);
+ double z=_mm_hadd_pd(q,q)[0];
+ return z+p[2];
+ }
+ // Note: without AVX2, is it faster than the scalar computation?
+ static double
+ determinant_of_vectors_omit_last(Vector a, Vector b, Vector c) {
+ __m256d x=a*avx3_right(b)-avx3_right(a)*b;
+ return dot_product_omit_last(c,avx3_right(x));
+ }
+ static CGAL::Sign
+ sign_of_determinant_of_vectors_omit_last(Vector a, Vector b, Vector c) {
+ return CGAL::sign(determinant_of_vectors_omit_last(a,b,c));
+ }
+
+ };
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h
new file mode 100644
index 00000000..b8efbe28
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h
@@ -0,0 +1,76 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_DET_ITER_PTS_ITER_VEC_H
+#define CGAL_VECTOR_DET_ITER_PTS_ITER_VEC_H
+#include <functional>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+
+namespace CGAL {
+
+template <class LA, class Dim_=typename LA::Dimension,
+ class Max_dim_=typename LA::Max_dimension,
+ bool = LA::template Property<Has_determinant_of_iterator_to_points_tag>::value,
+ bool = LA::template Property<Has_determinant_of_iterator_to_vectors_tag>::value>
+struct Add_determinant_of_iterator_to_points_from_iterator_to_vectors : LA {
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_iterator_to_vectors<LA2> Other;
+ };
+};
+
+template <class LA, class Dim_,class Max_dim_>
+struct Add_determinant_of_iterator_to_points_from_iterator_to_vectors
+<LA, Dim_, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_iterator_to_vectors<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_points_tag, D> :
+ boost::true_type {};
+
+ // TODO: use std::minus, boost::bind, etc
+ template<class T> struct Minus_fixed {
+ T const& a;
+ Minus_fixed(T const&a_):a(a_){}
+ T operator()(T const&b)const{return b-a;}
+ };
+ template<class Iter>
+ static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Minus_fixed<Vector> f(a);
+ return LA::determinant_of_iterator_to_vectors(make_transforming_iterator(first,f),make_transforming_iterator(end,f));
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Minus_fixed<Vector> f(a);
+ return LA::sign_of_determinant_of_iterator_to_vectors(make_transforming_iterator(first,f),make_transforming_iterator(end,f));
+ }
+};
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h
new file mode 100644
index 00000000..71a31d81
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h
@@ -0,0 +1,211 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_DET_ITER_PTS_PTS_H
+#define CGAL_VECTOR_DET_ITER_PTS_PTS_H
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+
+namespace CGAL {
+
+template <class LA, class Dim_=typename LA::Dimension,
+ class Max_dim_=typename LA::Max_dimension,
+ bool = LA::template Property<Has_determinant_of_iterator_to_points_tag>::value,
+ bool = LA::template Property<Has_determinant_of_points_tag>::value>
+struct Add_determinant_of_iterator_to_points_from_points : LA {
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_points<LA2> Other;
+ };
+};
+
+//FIXME: Use variadics and boost so it works in any dimension.
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_points_from_points
+<LA, Dimension_tag<2>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_points<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_points_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_points(a,b,c);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_points(a,b,c);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_points_from_points
+<LA, Dimension_tag<3>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_points<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_points_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_points(a,b,c,d);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_points(a,b,c,d);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_points_from_points
+<LA, Dimension_tag<4>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_points<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_points_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_points(a,b,c,d,e);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_points(a,b,c,d,e);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_points_from_points
+<LA, Dimension_tag<5>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_points<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_points_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; ++first;
+ Vector const&f=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_points(a,b,c,d,e,f);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; ++first;
+ Vector const&f=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_points(a,b,c,d,e,f);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_points_from_points
+<LA, Dimension_tag<6>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_points_from_points<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_points_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; ++first;
+ Vector const&f=*first; ++first;
+ Vector const&g=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_points(a,b,c,d,e,f,g);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; ++first;
+ Vector const&f=*first; ++first;
+ Vector const&g=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_points(a,b,c,d,e,f,g);
+ }
+};
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h
new file mode 100644
index 00000000..f096d6c7
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h
@@ -0,0 +1,201 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_DET_ITER_VEC_VEC_H
+#define CGAL_VECTOR_DET_ITER_VEC_VEC_H
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+
+namespace CGAL {
+
+template <class LA, class Dim_=typename LA::Dimension,
+ class Max_dim_=typename LA::Max_dimension,
+ bool = LA::template Property<Has_determinant_of_iterator_to_vectors_tag>::value,
+ bool = LA::template Property<Has_determinant_of_vectors_tag>::value>
+struct Add_determinant_of_iterator_to_vectors_from_vectors : LA {
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_vectors_from_vectors<LA2> Other;
+ };
+};
+
+//FIXME: Use variadics and boost so it works in any dimension.
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_vectors_from_vectors
+<LA, Dimension_tag<2>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_vectors_from_vectors<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_vectors_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_vectors(a,b);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_vectors(a,b);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_vectors_from_vectors
+<LA, Dimension_tag<3>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_vectors_from_vectors<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_vectors_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_vectors(a,b,c);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_vectors(a,b,c);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_vectors_from_vectors
+<LA, Dimension_tag<4>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_vectors_from_vectors<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_vectors_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_vectors(a,b,c,d);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_vectors(a,b,c,d);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_vectors_from_vectors
+<LA, Dimension_tag<5>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_vectors_from_vectors<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_vectors_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_vectors(a,b,c,d,e);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_vectors(a,b,c,d,e);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_iterator_to_vectors_from_vectors
+<LA, Dimension_tag<6>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_iterator_to_vectors_from_vectors<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_iterator_to_vectors_tag, D> :
+ boost::true_type {};
+
+ template<class Iter>
+ static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; ++first;
+ Vector const&f=*first; CGAL_assertion(++first==end);
+ return LA::determinant_of_vectors(a,b,c,d,e,f);
+ }
+ template<class Iter>
+ static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){
+ Vector const&a=*first; ++first;
+ Vector const&b=*first; ++first;
+ Vector const&c=*first; ++first;
+ Vector const&d=*first; ++first;
+ Vector const&e=*first; ++first;
+ Vector const&f=*first; CGAL_assertion(++first==end);
+ return LA::sign_of_determinant_of_vectors(a,b,c,d,e,f);
+ }
+};
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h
new file mode 100644
index 00000000..7ddb73c3
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h
@@ -0,0 +1,164 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_DETPTS_H
+#define CGAL_VECTOR_DETPTS_H
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+
+namespace CGAL {
+
+template <class LA, class Dim_=typename LA::Dimension,
+ class Max_dim_=typename LA::Max_dimension,
+ bool = LA::template Property<Has_determinant_of_points_tag>::value,
+ bool = LA::template Property<Has_determinant_of_vectors_tag>::value
+ && LA::template Property<Has_vector_plus_minus_tag>::value>
+struct Add_determinant_of_points_from_vectors_and_minus : LA {
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_points_from_vectors_and_minus<LA2> Other;
+ };
+};
+
+//FIXME: Use variadics and boost so it works in any dimension.
+template <class LA, class Max_dim_>
+struct Add_determinant_of_points_from_vectors_and_minus
+<LA, Dimension_tag<2>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_points_from_vectors_and_minus<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_points_tag, D> :
+ boost::true_type {};
+
+ static NT determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c){
+ return LA::determinant_of_vectors(b-a,c-a);
+ }
+ static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c){
+ return LA::sign_of_determinant_of_vectors(b-a,c-a);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_points_from_vectors_and_minus
+<LA, Dimension_tag<3>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_points_from_vectors_and_minus<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_points_tag, D> :
+ boost::true_type {};
+
+ static NT determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d){
+ return LA::determinant_of_vectors(b-a,c-a,d-a);
+ }
+ static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d){
+ return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_points_from_vectors_and_minus
+<LA, Dimension_tag<4>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_points_from_vectors_and_minus<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_points_tag, D> :
+ boost::true_type {};
+
+ static NT determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e){
+ return LA::determinant_of_vectors(b-a,c-a,d-a,e-a);
+ }
+ static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e){
+ return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_points_from_vectors_and_minus
+<LA, Dimension_tag<5>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_points_from_vectors_and_minus<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_points_tag, D> :
+ boost::true_type {};
+
+ static NT determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f){
+ return LA::determinant_of_vectors(b-a,c-a,d-a,e-a,f-a);
+ }
+ static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f){
+ return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a,f-a);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct Add_determinant_of_points_from_vectors_and_minus
+<LA, Dimension_tag<6>, Max_dim_, false, true> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef Add_determinant_of_points_from_vectors_and_minus<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<Has_determinant_of_points_tag, D> :
+ boost::true_type {};
+
+ static NT determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f,
+ Vector const&g){
+ return LA::determinant_of_vectors(b-a,c-a,d-a,e-a,f-a,g-a);
+ }
+ static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f,
+ Vector const&g){
+ return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a,f-a,g-a);
+ }
+};
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h
new file mode 100644
index 00000000..64eafe69
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h
@@ -0,0 +1,58 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_DETVEC_SMALL_H
+#define CGAL_VECTOR_DETVEC_SMALL_H
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/determinant_of_vectors.h>
+
+#define CGAL_ALLOWED_INCLUSION 1
+
+#define CGAL_CLASS Add_determinant_of_vectors_small_dim
+#define CGAL_TAG Has_determinant_of_vectors_tag
+#define CGAL_FUNC determinant_of_vectors
+#define CGAL_SIGN_FUNC sign_of_determinant_of_vectors
+#define CGAL_SHIFT 0
+
+#include <CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h>
+
+#undef CGAL_CLASS
+#undef CGAL_TAG
+#undef CGAL_FUNC
+#undef CGAL_SIGN_FUNC
+#undef CGAL_SHIFT
+
+#define CGAL_CLASS Add_determinant_of_vectors_omit_last_small_dim
+#define CGAL_TAG Has_determinant_of_vectors_omit_last_tag
+#define CGAL_FUNC determinant_of_vectors_omit_last
+#define CGAL_SIGN_FUNC sign_of_determinant_of_vectors_omit_last
+#define CGAL_SHIFT 1
+
+#include <CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h>
+
+#undef CGAL_CLASS
+#undef CGAL_TAG
+#undef CGAL_FUNC
+#undef CGAL_SIGN_FUNC
+#undef CGAL_SHIFT
+
+#undef CGAL_ALLOWED_INCLUSION
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h
new file mode 100644
index 00000000..b4856742
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h
@@ -0,0 +1,164 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_ALLOWED_INCLUSION
+#error Must not include this header directly
+#endif
+#if !defined(CGAL_TAG) \
+ || ! defined(CGAL_CLASS) \
+ || ! defined(CGAL_FUNC) \
+ || ! defined(CGAL_SIGN_FUNC) \
+ || ! defined(CGAL_SHIFT)
+
+#error Forgot one macro
+#endif
+
+namespace CGAL {
+
+template <class LA, class Dim_=typename LA::Dimension,
+ class Max_dim_=typename LA::Max_dimension,
+ bool=LA::template Property<CGAL_TAG>::value>
+struct CGAL_CLASS : LA {
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef CGAL_CLASS<LA2> Other;
+ };
+};
+
+template <class LA, class Max_dim_>
+struct CGAL_CLASS
+<LA, Dimension_tag<2+CGAL_SHIFT>, Max_dim_, false> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef CGAL_CLASS<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<CGAL_TAG, D> :
+ boost::true_type {};
+
+ static NT CGAL_FUNC(Vector const&a, Vector const&b){
+ return CGAL::determinant_of_vectors<NT>(a,b);
+ }
+ template <class V1, class V2>
+ static Sign CGAL_SIGN_FUNC(V1 const&a, V2 const&b){
+ return CGAL::sign_of_determinant_of_vectors<NT>(a,b);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct CGAL_CLASS
+<LA, Dimension_tag<3+CGAL_SHIFT>, Max_dim_, false> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef CGAL_CLASS<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<CGAL_TAG, D> :
+ boost::true_type {};
+
+ static NT CGAL_FUNC(Vector const&a, Vector const&b,
+ Vector const&c){
+ return CGAL::determinant_of_vectors<NT>(a,b,c);
+ }
+ static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b,
+ Vector const&c){
+ return CGAL::sign_of_determinant_of_vectors<NT>(a,b,c);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct CGAL_CLASS
+<LA, Dimension_tag<4+CGAL_SHIFT>, Max_dim_, false> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef CGAL_CLASS<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<CGAL_TAG, D> :
+ boost::true_type {};
+
+ static NT CGAL_FUNC(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d){
+ return CGAL::determinant_of_vectors<NT>(a,b,c,d);
+ }
+ static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d){
+ return CGAL::sign_of_determinant_of_vectors<NT>(a,b,c,d);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct CGAL_CLASS
+<LA, Dimension_tag<5+CGAL_SHIFT>, Max_dim_, false> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef CGAL_CLASS<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<CGAL_TAG, D> :
+ boost::true_type {};
+
+ static NT CGAL_FUNC(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e){
+ return CGAL::determinant_of_vectors<NT>(a,b,c,d,e);
+ }
+ static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e){
+ return CGAL::sign_of_determinant_of_vectors<NT>(a,b,c,d,e);
+ }
+};
+
+template <class LA, class Max_dim_>
+struct CGAL_CLASS
+<LA, Dimension_tag<6+CGAL_SHIFT>, Max_dim_, false> : LA {
+ typedef typename LA::NT NT;
+ typedef typename LA::Vector Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef typename LA::template Rebind_dimension<D2,D3> LA2;
+ typedef CGAL_CLASS<LA2> Other;
+ };
+ template<class P,class=void> struct Property : LA::template Property<P> {};
+ template<class D> struct Property<CGAL_TAG, D> :
+ boost::true_type {};
+
+ static NT CGAL_FUNC(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f){
+ return CGAL::determinant_of_vectors<NT>(a,b,c,d,e,f);
+ }
+ static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f){
+ return CGAL::sign_of_determinant_of_vectors<NT>(a,b,c,d,e,f);
+ }
+};
+
+}
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h
new file mode 100644
index 00000000..d4cfeeb1
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h
@@ -0,0 +1,46 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KD_MIX_VECTOR_H
+#define CGAL_KD_MIX_VECTOR_H
+#include <CGAL/Dimension.h>
+namespace CGAL {
+
+template <class Static_, class Dynamic_, class NT_ ,class Dim_, class Max_dim_ = Dim_>
+struct Mix_vector
+: Dynamic_::template Rebind_dimension<Dim_, Max_dim_>::Other
+{
+ template <class D2, class D3 = D2>
+ struct Rebind_dimension {
+ typedef Mix_vector<Static_, Dynamic_, NT_, D2, D3> Other;
+ };
+};
+
+template <class Static_, class Dynamic_, class NT_, int d, class Max_dim_>
+struct Mix_vector<Static_, Dynamic_, NT_, Dimension_tag<d>, Max_dim_>
+: Static_::template Rebind_dimension<Dimension_tag<d>, Max_dim_>::Other
+{
+ template <class D2, class D3 = D2>
+ struct Rebind_dimension {
+ typedef Mix_vector<Static_, Dynamic_, NT_, D2, D3> Other;
+ };
+};
+}
+#endif
+
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h
new file mode 100644
index 00000000..2a75385c
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h
@@ -0,0 +1,145 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_SSE2_H
+#define CGAL_VECTOR_SSE2_H
+
+// Check what needs adapting for clang, intel and microsoft
+#if !defined __SSE2__ || (__GNUC__ * 100 + __GNUC_MINOR__ < 408)
+#error Requires SSE2 and gcc 4.8+
+#endif
+#include <x86intrin.h> // FIXME: other platforms call it differently
+
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/enum.h> // CGAL::Sign
+#include <CGAL/number_utils.h> // CGAL::sign
+
+
+
+namespace CGAL {
+
+ struct Sse_vector_2 {
+ typedef double NT;
+ typedef Dimension_tag<2> Dimension;
+ typedef Dimension_tag<2> Max_dimension;
+ // No Rebind_dimension, this is a building block
+ template<class,bool=true> struct Property : boost::false_type {};
+ template<bool b> struct Property<Has_vector_plus_minus_tag,b>
+ : boost::true_type {};
+ /* MAYBE?
+ template<bool b> struct Property<Has_vector_scalar_ops_tag,b>
+ : boost::true_type {};
+ */
+ template<bool b> struct Property<Has_determinant_of_vectors_tag,b>
+ : boost::true_type {};
+ template<bool b> struct Property<Has_dot_product_tag,b>
+ : boost::true_type {};
+
+ typedef __m128d Vector;
+ struct Construct_vector {
+ struct Dimension {
+ // Initialize with NaN?
+ Vector operator()(unsigned d) const {
+ CGAL_assertion(d==2);
+ return Vector();
+ }
+ };
+
+ struct Iterator {
+ template<typename Iter>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==2);
+ double x0 = *f;
+ double x1 = *++f;
+ CGAL_assertion(++f==e);
+ Vector a = { x0, x1 };
+ return a;
+ }
+ };
+
+ struct Iterator_and_last {
+ template<typename Iter,typename T>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const {
+ CGAL_assertion(d==2);
+ Vector a = { *f, t };
+ CGAL_assertion(++f==e);
+ return a;
+ }
+ };
+
+ struct Values {
+ Vector operator()(double a,double b) const {
+ Vector r = { a, b };
+ return r;
+ }
+ };
+
+ struct Values_divide {
+ Vector operator()(double h,double a,double b) const {
+ // {a,b}/{h,h} is probably slower
+ Vector r = { a/h, b/h };
+ return r;
+ }
+ };
+ };
+
+ typedef double const* Vector_const_iterator;
+ static inline Vector_const_iterator vector_begin(Vector const&a){
+ return (Vector_const_iterator)(&a);
+ }
+ static inline Vector_const_iterator vector_end(Vector const&a){
+ return (Vector_const_iterator)(&a)+2;
+ }
+ static inline unsigned size_of_vector(Vector){
+ return 2;
+ }
+ public:
+
+ static double determinant_of_vectors(Vector a, Vector b) {
+ __m128d c = _mm_shuffle_pd (b, b, 1); // b1, b0
+ __m128d d = a * c; // a0*b1, a1*b0
+#ifdef __SSE3__
+ __m128d e = _mm_hsub_pd (d, d);
+ return e[0];
+#else
+ return d[0]-d[1];
+#endif
+ }
+ static CGAL::Sign sign_of_determinant_of_vectors(Vector a, Vector b) {
+ return CGAL::sign(determinant_of_vectors(a,b));
+ }
+
+ static double dot_product(Vector a,Vector b){
+#ifdef __SSE4_1__
+ return _mm_dp_pd (a, b, 1+16+32)[0];
+#else
+ __m128d p = a * b;
+#if defined __SSE3__
+ __m128d s = _mm_hadd_pd (p, p);
+ return s[0];
+#else
+ return p[0]+p[1];
+#endif
+#endif
+ };
+ };
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h
new file mode 100644
index 00000000..b85a3734
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h
@@ -0,0 +1,181 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_2INT_H
+#define CGAL_VECTOR_2INT_H
+
+#include <stdint.h>
+#include <cmath>
+#include <CGAL/array.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/enum.h>
+#include <CGAL/number_utils.h>
+#include <CGAL/NT_converter.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/determinant_of_vectors.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+
+
+// What are the pros and cons of having NT be int vs double?
+
+namespace CGAL {
+ struct Vector_2_int_prop1 {
+ typedef double NT; // try lying a bit
+ typedef int32_t NT1; // what is really stored
+ typedef int32_t NT1b; // slightly longer
+ typedef int_fast64_t NT2; // longer type for computations
+ typedef int_fast64_t NT2b; // slightly longer
+ bool check_limits(int32_t x){return std::abs(x)<(1<<30);}
+ // TODO: find nice bounds
+ };
+#ifdef __SIZEOF_INT128__
+ struct Vector_2_int_prop2 {
+ typedef double NT;
+ typedef int32_t NT1;
+ typedef int_fast64_t NT1b;
+ typedef int_fast64_t NT2;
+ typedef __int128 NT2b;
+ bool check_limits(int32_t){return true;}
+ // take a template/int64_t input and still check the limits?
+ };
+ struct Vector_2_int_prop3 {
+ typedef long double NT;
+ typedef int64_t NT1;
+ typedef int64_t NT1b;
+ typedef __int128 NT2;
+ typedef __int128 NT2b;
+ enum { has_limit=true };
+ bool check_limits(int32_t x){return std::abs(x)<(1L<<62);}
+ // TODO: find nice bounds
+ };
+#endif
+
+ template<class Prop=Vector_2_int_prop1>
+ struct Vector_2_int : Prop {
+ using typename Prop::NT;
+ using typename Prop::NT1;
+ using typename Prop::NT1b;
+ using typename Prop::NT2;
+ using typename Prop::NT2b;
+ using Prop::check_limits;
+
+ typedef Dimension_tag<2> Dimension;
+ typedef Dimension_tag<2> Max_dimension;
+ // No Rebind_dimension, this is a building block
+ template<class,bool=true> struct Property : boost::false_type {};
+ //template<bool b> struct Property<Has_vector_plus_minus_tag,b>
+ // : boost::true_type {};
+ template<bool b> struct Property<Has_determinant_of_vectors_tag,b>
+ : boost::true_type {};
+ //template<bool b> struct Property<Has_determinant_of_points_tag,b>
+ // : boost::true_type {};
+ // Advertise somehow that the sign_of_determinant* are exact?
+
+ typedef cpp0x::array<NT1,2> Vector;
+ struct Construct_vector {
+ struct Dimension {
+ Vector operator()(unsigned d) const {
+ CGAL_assertion(d==2);
+ return Vector();
+ }
+ };
+
+ // TODO (for all constructors): check that input fits in NT1...
+ struct Iterator {
+ template<typename Iter>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==2);
+ NT1 x0 = *f;
+ NT1 x1 = *++f;
+ CGAL_assertion (++f == e);
+ CGAL_assertion (check_limits(x0) && check_limits(x1));
+ Vector a = { x0, x1 };
+ return a;
+ }
+ };
+
+ struct Iterator_and_last {
+ template<typename Iter,typename T>
+ Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const {
+ CGAL_assertion(d==2);
+ NT1 x = *f;
+ CGAL_assertion (++f == e);
+ CGAL_assertion (check_limits(x) && check_limits(t));
+ Vector a = { x, t };
+ return a;
+ }
+ };
+
+ struct Values {
+ Vector operator()(NT1 a,NT1 b) const {
+ CGAL_assertion (check_limits(a) && check_limits(b));
+ Vector r = { a, b };
+ return r;
+ }
+ };
+
+ /*
+ // Maybe safer not to provide it
+ struct Values_divide {
+ Vector operator()(double h,double a,double b) const {
+ Vector r = { a/h, b/h };
+ return r;
+ }
+ };
+ */
+ };
+
+ // Since we lie about NT, be consistent about it
+ typedef transforming_iterator<NT_converter<NT1,NT>,NT1 const*> Vector_const_iterator;
+ static inline Vector_const_iterator vector_begin(Vector const&a){
+ return Vector_const_iterator(a.begin());
+ }
+ static inline Vector_const_iterator vector_end(Vector const&a){
+ return Vector_const_iterator(a.end());
+ }
+ static inline unsigned size_of_vector(Vector){
+ return 2;
+ }
+
+ // for unsigned NT1, check what changes to do.
+ // return NT or NT2?
+ static NT determinant_of_vectors(Vector a, Vector b) {
+ return CGAL::determinant_of_vectors<NT2>(a,b);
+ }
+ static CGAL::Sign sign_of_determinant_of_vectors(Vector a, Vector b) {
+ return CGAL::sign_of_determinant_of_vectors<NT2>(a,b);
+ }
+
+ static NT determinant_of_points(Vector a, Vector b, Vector c) {
+ // could be faster to convert to NT directly
+ NT1b a0=a[0]; NT1b a1=a[1];
+ NT1b x0=b[0]-a0; NT1b x1=b[1]-a1;
+ NT1b y0=c[0]-a0; NT1b y1=c[1]-a1;
+ return CGAL::determinant<NT>(x0,x1,y0,y1);
+ }
+ static CGAL::Sign sign_of_determinant_of_points(Vector a, Vector b, Vector c) {
+ NT1b a0=a[0]; NT1b a1=a[1];
+ NT1b x0=b[0]-a0; NT1b x1=b[1]-a1;
+ NT2b y0=c[0]-a0; NT2b y1=c[1]-a1;
+ return CGAL::compare(x0*y1,x1*y0);
+ }
+ };
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h
new file mode 100644
index 00000000..f9cc4e3c
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h
@@ -0,0 +1,167 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_VECTOR_VECTOR_H
+#define CGAL_VECTOR_VECTOR_H
+#include <boost/type_traits/is_arithmetic.hpp>
+#include <boost/utility/enable_if.hpp>
+#include <CGAL/Dimension.h>
+#include <CGAL/NewKernel_d/utils.h>
+#include <vector>
+#include <boost/preprocessor/repetition.hpp>
+#include <boost/preprocessor/repetition/enum.hpp>
+namespace CGAL {
+
+//Derive from a class that doesn't depend on Dim, or still use Dim for checking?
+template<class NT_,class Dim_,class Max_dim_=Dim_> struct Vector_vector {
+ typedef NT_ NT;
+ typedef Dim_ Dimension;
+ typedef Max_dim_ Max_dimension;
+ typedef std::vector<NT> Vector;
+ template< class D2, class D3=D2 >
+ struct Rebind_dimension {
+ typedef Vector_vector< NT, D2, D3 > Other;
+ };
+ template<class> struct Property : boost::false_type {};
+
+ struct Construct_vector {
+ struct Dimension {
+ Vector operator()(int d) const {
+ return Vector(d);
+ }
+ };
+
+ struct Iterator {
+ template<typename Iter>
+ Vector operator()(int CGAL_assertion_code(d),Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==std::distance(f,e));
+ return Vector(f,e);
+ }
+ };
+
+ // unneeded thanks to Iterator_and_last?
+#if 0
+ struct Iterator_add_one {
+ template<typename Iter>
+ Vector operator()(int CGAL_assertion_code(d),Iter const& f,Iter const& e) const {
+ CGAL_assertion(d==std::distance(f,e)+1);
+ Vector a;
+ a.reserve(d+1);
+ a.insert(a.end(),f,e);
+ a.push_back(1);
+ return a;
+ }
+ };
+#endif
+
+ struct Iterator_and_last {
+ template<typename Iter,typename T>
+ Vector operator()(int d,Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const {
+ CGAL_assertion(d==std::distance(f,e)+1);
+ Vector a;
+ a.reserve(d+1);
+ a.insert(a.end(),f,e);
+ a.push_back(CGAL_FORWARD(T,t));
+ return a;
+ }
+ };
+
+ // useless, use a transform_iterator?
+#if 0
+ struct Iterator_and_last_divide {
+ template<typename Iter,typename T>
+ Vector operator()(int d,Iter f,Iter const& e,T const&t) const {
+ CGAL_assertion(d==std::distance(f,e)+1);
+ Vector a;
+ a.reserve(d+1);
+ for(;f!=e;++f){
+ a.push_back(*f/t);
+ }
+ return a;
+ }
+ };
+#endif
+
+ struct Values {
+#ifdef CGAL_CXX11
+ template<class...U>
+ Vector operator()(U&&...u) const {
+ //TODO: check the right number of {}, g++ accepts one and two
+ Vector a={forward_safe<NT,U>(u)...};
+ return a;
+ }
+#else
+
+#define CGAL_VAR(Z,N,_) a.push_back(t##N);
+#define CGAL_CODE(Z,N,_) Vector operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \
+ Vector a; \
+ a.reserve(N); \
+ BOOST_PP_REPEAT(N,CGAL_VAR,) \
+ return a; \
+}
+BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR
+
+#endif
+ };
+
+ struct Values_divide {
+#ifdef CGAL_CXX11
+ template<class H,class...U>
+ Vector operator()(H const&h,U&&...u) const {
+ //TODO: do we want to cast at some point?
+ //e.g. to avoid 1/2 in integers
+ // ==> use Rational_traits<NT>().make_rational(x,y) ?
+ Vector a={Rational_traits<NT>().make_rational(std::forward<U>(u),h)...};
+ return a;
+ }
+#else
+
+#define CGAL_VAR(Z,N,_) a.push_back(Rational_traits<NT>().make_rational( t##N ,h));
+#define CGAL_CODE(Z,N,_) template<class H> Vector \
+ operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \
+ Vector a; \
+ a.reserve(N); \
+ BOOST_PP_REPEAT(N,CGAL_VAR,) \
+ return a; \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR
+
+#endif
+ };
+ };
+ typedef typename Vector::const_iterator Vector_const_iterator;
+ static Vector_const_iterator vector_begin(Vector const&a){
+ return a.begin();
+ }
+ static Vector_const_iterator vector_end(Vector const&a){
+ return a.end();
+ }
+ static int size_of_vector(Vector const&a){
+ return (int)a.size();
+ }
+};
+
+
+}
+#endif
+
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h
new file mode 100644
index 00000000..44e9aa96
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h
@@ -0,0 +1,305 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_CARTESIAN_WRAP_H
+#define CGAL_KERNEL_D_CARTESIAN_WRAP_H
+
+#include <CGAL/basic.h>
+#include <CGAL/is_iterator.h>
+
+#if defined(BOOST_MSVC)
+# pragma warning(push)
+# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I'
+ // http://lists.boost.org/boost-users/2014/11/83291.php
+#endif
+#include <CGAL/NewKernel_d/Wrapper/Point_d.h>
+#include <CGAL/NewKernel_d/Wrapper/Vector_d.h>
+#include <CGAL/NewKernel_d/Wrapper/Segment_d.h>
+#include <CGAL/NewKernel_d/Wrapper/Sphere_d.h>
+#include <CGAL/NewKernel_d/Wrapper/Hyperplane_d.h>
+#include <CGAL/NewKernel_d/Wrapper/Weighted_point_d.h>
+
+#include <CGAL/NewKernel_d/Wrapper/Ref_count_obj.h>
+
+#include <boost/mpl/or.hpp>
+#include <boost/mpl/contains.hpp>
+#include <boost/mpl/vector.hpp>
+
+//TODO: do we want to store the kernel ref in the Object wrappers? It would allow for additions and operator[] and things like that to work, but objects would still need to be created by functors.
+
+namespace CGAL {
+namespace internal {
+BOOST_MPL_HAS_XXX_TRAIT_DEF(Is_wrapper)
+template<class T,bool=has_Is_wrapper<T>::value> struct Is_wrapper {
+ enum { value=false };
+ typedef Tag_false type;
+};
+template<class T> struct Is_wrapper<T,true> {
+ typedef typename T::Is_wrapper type;
+ enum { value=type::value };
+};
+
+template<class T,bool=is_iterator_type<T,std::input_iterator_tag>::value> struct Is_wrapper_iterator {
+ enum { value=false };
+ typedef Tag_false type;
+};
+template<class T> struct Is_wrapper_iterator<T,true> :
+ Is_wrapper<typename std::iterator_traits<typename CGAL::decay<T>::type>::value_type>
+{ };
+
+struct Forward_rep {
+//TODO: make a good C++0X version with perfect forwarding
+//#ifdef CGAL_CXX11
+//template <class T,class=typename std::enable_if<!Is_wrapper<typename std::decay<T>::type>::value&&!Is_wrapper_iterator<typename std::decay<T>::type>::value>::type>
+//T&& operator()(typename std::remove_reference<T>::type&& t) const {return static_cast<T&&>(t);};
+//template <class T,class=typename std::enable_if<!Is_wrapper<typename std::decay<T>::type>::value&&!Is_wrapper_iterator<typename std::decay<T>::type>::value>::type>
+//T&& operator()(typename std::remove_reference<T>::type& t) const {return static_cast<T&&>(t);};
+//
+//template <class T,class=typename std::enable_if<Is_wrapper<typename std::decay<T>::type>::value>::type>
+//typename Type_copy_cvref<T,typename std::decay<T>::type::Rep>::type&&
+//operator()(T&& t) const {
+// return static_cast<typename Type_copy_cvref<T,typename std::decay<T>::type::Rep>::type&&>(t.rep());
+//};
+//
+//template <class T,class=typename std::enable_if<Is_wrapper_iterator<typename std::decay<T>::type>::value>::type>
+//transforming_iterator<Forward_rep,typename std::decay<T>::type>
+//operator()(T&& t) const {
+// return make_transforming_iterator(std::forward<T>(t),Forward_rep());
+//};
+//#else
+template <class T,bool=Is_wrapper<T>::value,bool=Is_wrapper_iterator<T>::value> struct result_;
+template <class T> struct result_<T,false,false>{typedef T const& type;};
+template <class T> struct result_<T,true,false>{typedef typename decay<T>::type::Rep const& type;};
+template <class T> struct result_<T,false,true>{typedef transforming_iterator<Forward_rep,typename decay<T>::type> type;};
+template<class> struct result;
+template<class T> struct result<Forward_rep(T)> : result_<T> {};
+
+template <class T> typename boost::disable_if<boost::mpl::or_<Is_wrapper<T>,Is_wrapper_iterator<T> >,T>::type const& operator()(T const& t) const {return t;}
+template <class T> typename boost::disable_if<boost::mpl::or_<Is_wrapper<T>,Is_wrapper_iterator<T> >,T>::type& operator()(T& t) const {return t;}
+
+template <class T> typename T::Rep const& operator()(T const& t, typename boost::enable_if<Is_wrapper<T> >::type* = 0) const {return t.rep();}
+
+template <class T> transforming_iterator<Forward_rep,typename boost::enable_if<Is_wrapper_iterator<T>,T>::type> operator()(T const& t) const {return make_transforming_iterator(t,Forward_rep());}
+//#endif
+};
+}
+
+template <class B, class K, class T, bool = Provides_type<B, T>::value>
+struct Map_wrapping_type : Get_type<B, T> {};
+#define CGAL_REGISTER_OBJECT_WRAPPER(X) \
+ template <class B, class K> \
+ struct Map_wrapping_type <B, K, X##_tag, true> { \
+ typedef Wrap::X##_d<K> type; \
+ }
+CGAL_REGISTER_OBJECT_WRAPPER(Point);
+CGAL_REGISTER_OBJECT_WRAPPER(Vector);
+CGAL_REGISTER_OBJECT_WRAPPER(Segment);
+CGAL_REGISTER_OBJECT_WRAPPER(Sphere);
+CGAL_REGISTER_OBJECT_WRAPPER(Hyperplane);
+CGAL_REGISTER_OBJECT_WRAPPER(Weighted_point);
+#undef CGAL_REGISTER_OBJECT_WRAPPER
+
+// Note: this tends to be an all or nothing thing currently, wrapping
+// only some types breaks, probably because we don't check whether the
+// return type is indeed wrapped.
+template < typename Base_ , typename Derived_ = Default >
+struct Cartesian_wrap : public Base_
+{
+ CGAL_CONSTEXPR Cartesian_wrap(){}
+ CGAL_CONSTEXPR Cartesian_wrap(int d):Base_(d){}
+ typedef Base_ Kernel_base;
+ typedef Cartesian_wrap Self;
+ // TODO: pass the 2 types Self and Derived to the wrappers, they can use Self for most purposes and Derived only for Kernel_traits' typedef R.
+ typedef typename Default::Get<Derived_, Self>::type Derived;
+ // FIXME: The list doesn't belong here.
+ typedef boost::mpl::vector<Point_tag,Segment_tag,Sphere_tag,Vector_tag,Hyperplane_tag> Wrapped_list;
+
+ template <class T>
+ struct Type : Map_wrapping_type<Base_, Derived, T> {};
+
+ //Translate the arguments
+ template <class T, class D = void,
+ class=typename Get_functor_category<Derived,T>::type,
+ bool=Provides_functor<Kernel_base, T>::value,
+ bool=boost::mpl::contains<Wrapped_list,typename map_result_tag<T>::type>::type::value>
+ struct Functor {
+ typedef typename Get_functor<Kernel_base, T>::type B;
+ struct type {
+ B b;
+ type(){}
+ type(Self const&k):b(k){}
+ typedef typename B::result_type result_type;
+#ifdef CGAL_CXX11
+ template<class...U> result_type operator()(U&&...u)const{
+ return b(internal::Forward_rep()(u)...);
+ }
+#else
+#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N)
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U)> result_type \
+ operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \
+ return b(BOOST_PP_ENUM(N,CGAL_VAR,)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+#undef CGAL_VAR
+// In case the last argument needs to be non-const. Fragile...
+#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N)
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U),class V> result_type \
+ operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u),V&v)const{ \
+ return b(BOOST_PP_ENUM(N,CGAL_VAR,),internal::Forward_rep()(v)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,8,CGAL_CODE,_)
+#undef CGAL_CODE
+#undef CGAL_VAR
+#endif
+ };
+ };
+
+ // Preserve the difference between Null_functor and nothing.
+ template <class T, class D, class C, bool b>
+ struct Functor <T, D, C, false, b>
+ : Get_functor <Kernel_base, T> {};
+
+ //Translate both the arguments and the result
+ //TODO: Check Is_wrapper instead of relying on map_result_tag?
+ template<class T,class D> struct Functor<T,D,Construct_tag,true,true> {
+ typedef typename Get_functor<Kernel_base, T>::type B;
+ struct type {
+ B b;
+ type(){}
+ type(Self const&k):b(k){}
+ typedef typename map_result_tag<T>::type result_tag;
+ // FIXME: Self or Derived?
+ typedef typename Get_type<Self,result_tag>::type result_type;
+#ifdef CGAL_CXX11
+ template<class...U> result_type operator()(U&&...u)const{
+ return result_type(Eval_functor(),b,internal::Forward_rep()(u)...);
+ }
+#else
+#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N)
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U)> result_type \
+ operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \
+ return result_type(Eval_functor(),b,BOOST_PP_ENUM(N,CGAL_VAR,)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+#undef CGAL_VAR
+#endif
+ };
+ };
+
+};
+
+template < typename Base_ >
+struct Cartesian_refcount : public Base_
+{
+ CGAL_CONSTEXPR Cartesian_refcount(){}
+ CGAL_CONSTEXPR Cartesian_refcount(int d):Base_(d){}
+ typedef Base_ Kernel_base;
+ typedef Cartesian_refcount Self;
+
+ // FIXME: Use object_list, or a list passed as argument, or anything
+ // automatic.
+ template <class T, class=void> struct Type : Get_type<Base_, T> {};
+#define CGAL_Kernel_obj(X,Y) \
+ template <class D> struct Type<X##_tag, D> { typedef Ref_count_obj<Cartesian_refcount, X##_tag> type; };
+
+ CGAL_Kernel_obj(Point,point)
+ CGAL_Kernel_obj(Vector,vector)
+#undef CGAL_Kernel_obj
+
+ template<class T> struct Dispatch {
+ //typedef typename map_functor_type<T>::type f_t;
+ typedef typename map_result_tag<T>::type r_t;
+ enum {
+ is_nul = boost::is_same<typename Get_functor<Kernel_base, T>::type,Null_functor>::value,
+ ret_rcobj = boost::is_same<r_t,Point_tag>::value || boost::is_same<r_t,Vector_tag>::value
+ };
+ };
+
+ //Translate the arguments
+ template<class T,class D=void,bool=Dispatch<T>::is_nul,bool=Dispatch<T>::ret_rcobj> struct Functor {
+ typedef typename Get_functor<Kernel_base, T>::type B;
+ struct type {
+ B b;
+ type(){}
+ type(Self const&k):b(k){}
+ typedef typename B::result_type result_type;
+#ifdef CGAL_CXX11
+ template<class...U> result_type operator()(U&&...u)const{
+ return b(internal::Forward_rep()(u)...);
+ }
+#else
+ result_type operator()()const{
+ return b();
+ }
+#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N)
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U)> result_type \
+ operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \
+ return b(BOOST_PP_ENUM(N,CGAL_VAR,)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+#undef CGAL_VAR
+#endif
+ };
+ };
+
+ //Translate both the arguments and the result
+ template<class T,class D,bool b> struct Functor<T,D,true,b> {
+ typedef Null_functor type;
+ };
+
+ template<class T,class D> struct Functor<T,D,false,true> {
+ typedef typename Get_functor<Kernel_base, T>::type B;
+ struct type {
+ B b;
+ type(){}
+ type(Self const&k):b(k){}
+ typedef typename map_result_tag<T>::type result_tag;
+ typedef typename Get_type<Self,result_tag>::type result_type;
+#ifdef CGAL_CXX11
+ template<class...U> result_type operator()(U&&...u)const{
+ return result_type(Eval_functor(),b,internal::Forward_rep()(u)...);
+ }
+#else
+ result_type operator()()const{
+ return result_type(Eval_functor(),b);
+ }
+#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N)
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U)> result_type \
+ operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \
+ return result_type(Eval_functor(),b,BOOST_PP_ENUM(N,CGAL_VAR,)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+#undef CGAL_VAR
+#endif
+ };
+ };
+
+};
+
+} //namespace CGAL
+
+#if defined(BOOST_MSVC)
+# pragma warning(pop)
+#endif
+
+#endif // CGAL_KERNEL_D_CARTESIAN_WRAP_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h
new file mode 100644
index 00000000..54fd50bd
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h
@@ -0,0 +1,131 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_HYPERPLANE_D_H
+#define CGAL_WRAPPER_HYPERPLANE_D_H
+
+#include <CGAL/representation_tags.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+namespace CGAL {
+namespace Wrap {
+
+template <class R_>
+class Hyperplane_d : public Get_type<typename R_::Kernel_base, Hyperplane_tag>::type
+{
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_type<R_, Vector_tag>::type Vector_;
+ typedef typename Get_functor<Kbase, Construct_ttag<Hyperplane_tag> >::type CHBase;
+ typedef typename Get_functor<Kbase, Orthogonal_vector_tag>::type OVBase;
+ typedef typename Get_functor<Kbase, Hyperplane_translation_tag>::type HTBase;
+
+ typedef Hyperplane_d Self;
+ CGAL_static_assertion((boost::is_same<Self, typename Get_type<R_, Hyperplane_tag>::type>::value));
+
+public:
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ typedef typename Increment_dimension<Ambient_dimension,-1>::type Feature_dimension;
+
+ typedef typename Get_type<Kbase, Hyperplane_tag>::type Rep;
+
+ const Rep& rep() const
+ {
+ return *this;
+ }
+
+ Rep& rep()
+ {
+ return *this;
+ }
+
+ typedef R_ R;
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Hyperplane_d> >::value>::type> explicit Hyperplane_d(U&&...u)
+ : Rep(CHBase()(std::forward<U>(u)...)){}
+
+// // called from Construct_point_d
+// template<class...U> explicit Point_d(Eval_functor&&,U&&...u)
+// : Rep(Eval_functor(), std::forward<U>(u)...){}
+ template<class F,class...U> explicit Hyperplane_d(Eval_functor&&,F&&f,U&&...u)
+ : Rep(std::forward<F>(f)(std::forward<U>(u)...)){}
+
+#if 0
+ // the new standard may make this necessary
+ Point_d(Point_d const&)=default;
+ Point_d(Point_d &);//=default;
+ Point_d(Point_d &&)=default;
+#endif
+
+ // try not to use these
+ Hyperplane_d(Rep const& v) : Rep(v) {}
+ Hyperplane_d(Rep& v) : Rep(static_cast<Rep const&>(v)) {}
+ Hyperplane_d(Rep&& v) : Rep(std::move(v)) {}
+
+#else
+
+ Hyperplane_d() : Rep(CHBase()()) {}
+
+ Hyperplane_d(Rep const& v) : Rep(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Hyperplane_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(CHBase()( \
+ BOOST_PP_ENUM_PARAMS(N,t))) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Hyperplane_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {}
+ /*
+ template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {}
+ */
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+#endif
+
+ //TODO: if OVBase returns a reference to a base vector, cast it to a
+ //reference to a wrapper vector. Ugly but should be safe.
+ Vector_ orthogonal_vector()const{
+ return Vector_(Eval_functor(),OVBase(),rep());
+ }
+ FT_ translation()const{
+ return HTBase()(rep());
+ }
+
+
+};
+
+} //namespace Wrap
+} //namespace CGAL
+
+#endif // CGAL_WRAPPER_SPHERE_D_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h
new file mode 100644
index 00000000..0718c947
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h
@@ -0,0 +1,284 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_POINT_D_H
+#define CGAL_WRAPPER_POINT_D_H
+
+#include <ostream>
+#include <CGAL/Origin.h>
+#include <CGAL/Kernel/mpl.h>
+#include <CGAL/representation_tags.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+namespace CGAL {
+namespace Wrap {
+
+template <class R_>
+class Point_d : public Get_type<typename R_::Kernel_base, Point_tag>::type
+ // Deriving won't work if the point is just a __m256d.
+ // Test boost/std::is_class for instance
+{
+ typedef typename Get_type<R_, RT_tag>::type RT_;
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_type<R_, Vector_tag>::type Vector_;
+ typedef typename Get_functor<Kbase, Construct_ttag<Point_tag> >::type CPBase;
+ typedef typename Get_functor<Kbase, Compute_point_cartesian_coordinate_tag>::type CCBase;
+ typedef typename Get_functor<Kbase, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CPI;
+
+
+ typedef Point_d Self;
+ CGAL_static_assertion((boost::is_same<Self, typename Get_type<R_, Point_tag>::type>::value));
+
+public:
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ typedef Dimension_tag<0> Feature_dimension;
+
+ typedef typename Get_type<Kbase, Point_tag>::type Rep;
+ //typedef typename CGAL::decay<typename boost::result_of<CPI(Rep,Begin_tag)>::type>::type Cartesian_const_iterator;
+
+ const Rep& rep() const
+ {
+ return *this;
+ }
+
+ Rep& rep()
+ {
+ return *this;
+ }
+
+ typedef R_ R;
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Point_d> >::value>::type> explicit Point_d(U&&...u)
+ : Rep(CPBase()(std::forward<U>(u)...)){}
+
+// // called from Construct_point_d
+// template<class...U> explicit Point_d(Eval_functor&&,U&&...u)
+// : Rep(Eval_functor(), std::forward<U>(u)...){}
+ template<class F,class...U> explicit Point_d(Eval_functor&&,F&&f,U&&...u)
+ : Rep(std::forward<F>(f)(std::forward<U>(u)...)){}
+
+#if 0
+ // the new standard may make this necessary
+ Point_d(Point_d const&)=default;
+ Point_d(Point_d &);//=default;
+ Point_d(Point_d &&)=default;
+#endif
+
+ // try not to use these
+ Point_d(Rep const& v) : Rep(v) {}
+ Point_d(Rep& v) : Rep(static_cast<Rep const&>(v)) {}
+ Point_d(Rep&& v) : Rep(std::move(v)) {}
+
+ // this one should be implicit
+ Point_d(Origin const& v)
+ : Rep(CPBase()(v)) {}
+ Point_d(Origin& v)
+ : Rep(CPBase()(v)) {}
+ Point_d(Origin&& v)
+ : Rep(CPBase()(std::move(v))) {}
+
+#else
+
+ Point_d() : Rep(CPBase()()) {}
+
+ Point_d(Rep const& v) : Rep(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Point_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(CPBase()( \
+ BOOST_PP_ENUM_PARAMS(N,t))) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {}
+ /*
+ template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {}
+ */
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+ // this one should be implicit
+ Point_d(Origin const& o)
+ : Rep(CPBase()(o)) {}
+
+#endif
+
+ typename boost::result_of<CCBase(Rep,int)>::type cartesian(int i)const{
+ return CCBase()(rep(),i);
+ }
+ typename boost::result_of<CCBase(Rep,int)>::type operator[](int i)const{
+ return CCBase()(rep(),i);
+ }
+
+ typename boost::result_of<CPI(Rep,Begin_tag)>::type cartesian_begin()const{
+ return CPI()(rep(),Begin_tag());
+ }
+
+ typename boost::result_of<CPI(Rep,End_tag)>::type cartesian_end()const{
+ return CPI()(rep(),End_tag());
+ }
+
+ int dimension() const {
+ typedef typename Get_functor<Kbase, Point_dimension_tag>::type PDBase;
+ return PDBase()(rep());
+ }
+
+ /*
+ Direction_d direction() const
+ {
+ return R().construct_direction_d_object()(*this);
+ }
+
+ Vector_d transform(const Aff_transformation_d &t) const
+ {
+ return t.transform(*this);
+ }
+
+ Vector_d operator/(const RT& c) const
+ {
+ return R().construct_divided_vector_d_object()(*this,c);
+ }
+
+ Vector_d operator/(const typename First_if_different<FT_,RT>::Type & c) const
+ {
+ return R().construct_divided_vector_d_object()(*this,c);
+ }
+
+ typename Qualified_result_of<typename R::Compute_x_3, Vector_3>::type
+ x() const
+ {
+ return R().compute_x_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_y_3, Vector_3>::type
+ y() const
+ {
+ return R().compute_y_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_z_3, Vector_3>::type
+ z() const
+ {
+ return R().compute_z_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hx_3, Vector_3>::type
+ hx() const
+ {
+ return R().compute_hx_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hy_3, Vector_3>::type
+ hy() const
+ {
+ return R().compute_hy_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hz_3, Vector_3>::type
+ hz() const
+ {
+ return R().compute_hz_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hw_3, Vector_3>::type
+ hw() const
+ {
+ return R().compute_hw_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_x_3, Vector_3>::type
+ cartesian(int i) const
+ {
+ CGAL_kernel_precondition( (i == 0) || (i == 1) || (i == 2) );
+ if (i==0) return x();
+ if (i==1) return y();
+ return z();
+ }
+
+ typename Qualified_result_of<typename R::Compute_hw_3, Vector_3>::type
+ homogeneous(int i) const
+ {
+ CGAL_kernel_precondition( (i >= 0) || (i <= 3) );
+ if (i==0) return hx();
+ if (i==1) return hy();
+ if (i==2) return hz();
+ return hw();
+ }
+
+ typename Qualified_result_of<typename R::Compute_squared_length_3, Vector_3>::type
+ squared_length() const
+ {
+ return R().compute_squared_length_3_object()(*this);
+ }
+*/
+};
+#if 0
+template <class R_> Point_d<R_>::Point_d(Point_d &)=default;
+#endif
+
+//TODO: IO
+
+template <class R_>
+std::ostream& operator <<(std::ostream& os, const Point_d<R_>& p)
+{
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_functor<Kbase, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CPI;
+ // Should just be "auto"...
+ typename CGAL::decay<typename boost::result_of<
+ CPI(typename Point_d<R_>::Rep,Begin_tag)
+ >::type>::type
+ b = p.cartesian_begin(),
+ e = p.cartesian_end();
+ os << p.dimension();
+ for(; b != e; ++b){
+ os << " " << *b;
+ }
+ return os;
+}
+
+//template <class R_>
+//Vector_d<R_> operator+(const Vector_d<R_>& v,const Vector_d<R_>& w) const
+//{
+// return typename R::template Construct<Sum_of_vectors_tag>::type()(v,w);
+//}
+//
+//template <class R_>
+//Vector_d<R_> operator-(const Vector_d<R_>& v,const Vector_d<R_>& w) const
+//{
+// return typename R::template Construct<Difference_of_vectors_tag>::type()(v,w);
+//}
+
+} //namespace Wrap
+} //namespace CGAL
+
+#endif // CGAL_WRAPPER_POINT_D_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h
new file mode 100644
index 00000000..f33e14c0
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h
@@ -0,0 +1,120 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_REF_COUNT_OBJ_H
+#define CGAL_WRAPPER_REF_COUNT_OBJ_H
+
+#include <CGAL/Origin.h>
+#include <CGAL/Handle_for.h>
+#include <CGAL/Kernel/mpl.h>
+#include <CGAL/representation_tags.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+// no need for a fancy interface here, people can use the Point_d wrapper on
+// top.
+
+namespace CGAL {
+
+template <class R_, class Tag_>
+class Ref_count_obj
+{
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_functor<Kbase, Construct_ttag<Tag_> >::type CBase;
+
+ typedef Ref_count_obj Self;
+ CGAL_static_assertion((boost::is_same<Self, typename Get_type<R_, Tag_>::type>::value));
+
+public:
+ typedef R_ R;
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ //typedef Dimension_tag<0> Feature_dimension;
+
+ typedef typename Get_type<Kbase, Tag_>::type Rep;
+ typedef Handle_for<Rep> Data;
+
+private:
+ Data data;
+public:
+
+ const Rep& rep() const
+ {
+ return CGAL::get_pointee_or_identity(data);
+ }
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Ref_count_obj> >::value>::type> explicit Ref_count_obj(U&&...u)
+ : data(Eval_functor(),CBase(),std::forward<U>(u)...){}
+
+ template<class F,class...U> explicit Ref_count_obj(Eval_functor&&,F&&f,U&&...u)
+ : data(Eval_functor(),std::forward<F>(f),std::forward<U>(u)...){}
+
+ // try not to use these
+ Ref_count_obj(Rep const& v) : data(v) {}
+ Ref_count_obj(Rep& v) : data(static_cast<Rep const&>(v)) {}
+ Ref_count_obj(Rep&& v) : data(std::move(v)) {}
+
+ // Do we really need this for point?
+// // this one should be implicit
+// Ref_count_obj(Origin const& v)
+// : data(Eval_functor(),CBase(),v) {}
+// Ref_count_obj(Origin& v)
+// : data(Eval_functor(),CBase(),v) {}
+// Ref_count_obj(Origin&& v)
+// : data(Eval_functor(),CBase(),std::move(v)) {}
+
+#else
+
+ Ref_count_obj() : data(Eval_functor(),CBase()) {}
+
+ Ref_count_obj(Rep const& v) : data(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Ref_count_obj(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : data(Eval_functor(),CBase(),BOOST_PP_ENUM_PARAMS(N,t)) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Ref_count_obj(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : data(Eval_functor(),f,BOOST_PP_ENUM_PARAMS(N,t)) {}
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+ template<class F>
+ Ref_count_obj(Eval_functor,F const& f)
+ : data(Eval_functor(),f) {}
+
+// // this one should be implicit
+// Ref_count_obj(Origin const& o)
+// : data(Eval_functor(),CBase(),o) {}
+
+#endif
+
+};
+
+} //namespace CGAL
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h
new file mode 100644
index 00000000..bfb20a77
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h
@@ -0,0 +1,133 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_SEGMENT_D_H
+#define CGAL_WRAPPER_SEGMENT_D_H
+
+#include <CGAL/Origin.h>
+#include <CGAL/Kernel/mpl.h>
+#include <CGAL/representation_tags.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+namespace CGAL {
+namespace Wrap {
+
+template <class R_>
+class Segment_d : public Get_type<typename R_::Kernel_base, Segment_tag>::type
+{
+ typedef typename Get_type<R_, RT_tag>::type RT_;
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef typename Get_functor<Kbase, Construct_ttag<Point_tag> >::type CPBase;
+ typedef typename Get_functor<Kbase, Construct_ttag<Segment_tag> >::type CSBase;
+ typedef typename Get_functor<Kbase, Segment_extremity_tag>::type CSEBase;
+
+ typedef Segment_d Self;
+ CGAL_static_assertion((boost::is_same<Self, typename Get_type<R_, Segment_tag>::type>::value));
+
+public:
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ typedef Dimension_tag<1> Feature_dimension;
+
+ typedef typename Get_type<Kbase, Segment_tag>::type Rep;
+
+ const Rep& rep() const
+ {
+ return *this;
+ }
+
+ Rep& rep()
+ {
+ return *this;
+ }
+
+ typedef R_ R;
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Segment_d> >::value>::type> explicit Segment_d(U&&...u)
+ : Rep(CSBase()(std::forward<U>(u)...)){}
+
+// // called from Construct_point_d
+// template<class...U> explicit Point_d(Eval_functor&&,U&&...u)
+// : Rep(Eval_functor(), std::forward<U>(u)...){}
+ template<class F,class...U> explicit Segment_d(Eval_functor&&,F&&f,U&&...u)
+ : Rep(std::forward<F>(f)(std::forward<U>(u)...)){}
+
+#if 0
+ // the new standard may make this necessary
+ Point_d(Point_d const&)=default;
+ Point_d(Point_d &);//=default;
+ Point_d(Point_d &&)=default;
+#endif
+
+ // try not to use these
+ Segment_d(Rep const& v) : Rep(v) {}
+ Segment_d(Rep& v) : Rep(static_cast<Rep const&>(v)) {}
+ Segment_d(Rep&& v) : Rep(std::move(v)) {}
+
+#else
+
+ Segment_d() : Rep(CSBase()()) {}
+
+ Segment_d(Rep const& v) : Rep(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Segment_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(CSBase()( \
+ BOOST_PP_ENUM_PARAMS(N,t))) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Segment_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {}
+ /*
+ template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {}
+ */
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+#endif
+
+ //TODO: if CSEBase returns a reference to a base point, cast it to a
+ //reference to a wrapper point. Ugly but should be safe.
+ Point_ source()const{
+ return Point_(Eval_functor(),CSEBase(),rep(),0);
+ }
+ Point_ target()const{
+ return Point_(Eval_functor(),CSEBase(),rep(),1);
+ }
+
+};
+
+} //namespace Wrap
+} //namespace CGAL
+
+#endif // CGAL_WRAPPER_SEGMENT_D_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h
new file mode 100644
index 00000000..87f0c66e
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h
@@ -0,0 +1,130 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_SPHERE_D_H
+#define CGAL_WRAPPER_SPHERE_D_H
+
+#include <CGAL/representation_tags.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+namespace CGAL {
+namespace Wrap {
+
+template <class R_>
+class Sphere_d : public Get_type<typename R_::Kernel_base, Sphere_tag>::type
+{
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef typename Get_functor<Kbase, Construct_ttag<Sphere_tag> >::type CSBase;
+ typedef typename Get_functor<Kbase, Center_of_sphere_tag>::type COSBase;
+ typedef typename Get_functor<Kbase, Squared_radius_tag>::type SRBase;
+
+ typedef Sphere_d Self;
+ CGAL_static_assertion((boost::is_same<Self, typename Get_type<R_, Sphere_tag>::type>::value));
+
+public:
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ typedef typename Increment_dimension<Ambient_dimension,-1>::type Feature_dimension;
+
+ typedef typename Get_type<Kbase, Sphere_tag>::type Rep;
+
+ const Rep& rep() const
+ {
+ return *this;
+ }
+
+ Rep& rep()
+ {
+ return *this;
+ }
+
+ typedef R_ R;
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Sphere_d> >::value>::type> explicit Sphere_d(U&&...u)
+ : Rep(CSBase()(std::forward<U>(u)...)){}
+
+// // called from Construct_point_d
+// template<class...U> explicit Point_d(Eval_functor&&,U&&...u)
+// : Rep(Eval_functor(), std::forward<U>(u)...){}
+ template<class F,class...U> explicit Sphere_d(Eval_functor&&,F&&f,U&&...u)
+ : Rep(std::forward<F>(f)(std::forward<U>(u)...)){}
+
+#if 0
+ // the new standard may make this necessary
+ Point_d(Point_d const&)=default;
+ Point_d(Point_d &);//=default;
+ Point_d(Point_d &&)=default;
+#endif
+
+ // try not to use these
+ Sphere_d(Rep const& v) : Rep(v) {}
+ Sphere_d(Rep& v) : Rep(static_cast<Rep const&>(v)) {}
+ Sphere_d(Rep&& v) : Rep(std::move(v)) {}
+
+#else
+
+ Sphere_d() : Rep(CSBase()()) {}
+
+ Sphere_d(Rep const& v) : Rep(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Sphere_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(CSBase()( \
+ BOOST_PP_ENUM_PARAMS(N,t))) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Sphere_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {}
+ /*
+ template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {}
+ */
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+#endif
+
+ //TODO: if COSBase returns a reference to a base point, cast it to a
+ //reference to a wrapper point. Ugly but should be safe.
+ Point_ center()const{
+ return Point_(Eval_functor(),COSBase(),rep());
+ }
+ FT_ squared_radius()const{
+ return SRBase()(rep());
+ }
+
+};
+
+} //namespace Wrap
+} //namespace CGAL
+
+#endif // CGAL_WRAPPER_SPHERE_D_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h
new file mode 100644
index 00000000..b7d1f0d0
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h
@@ -0,0 +1,266 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_VECTOR_D_H
+#define CGAL_WRAPPER_VECTOR_D_H
+
+#include <CGAL/Origin.h>
+#include <CGAL/Kernel/mpl.h>
+#include <CGAL/representation_tags.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+namespace CGAL {
+namespace Wrap {
+
+template <class R_>
+class Vector_d : public Get_type<typename R_::Kernel_base, Vector_tag>::type
+{
+ typedef typename Get_type<R_, RT_tag>::type RT_;
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef typename Get_functor<Kbase, Construct_ttag<Vector_tag> >::type CVBase;
+ typedef typename Get_functor<Kbase, Compute_vector_cartesian_coordinate_tag>::type CCBase;
+ typedef typename Get_functor<Kbase, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CVI;
+ typedef typename Get_functor<Kbase, Squared_length_tag>::type SLBase;
+
+ typedef Vector_d Self;
+ CGAL_static_assertion((boost::is_same<Self, typename Get_type<R_, Vector_tag>::type>::value));
+
+public:
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ typedef Dimension_tag<0> Feature_dimension;
+
+ //typedef typename R_::Vector_cartesian_const_iterator Cartesian_const_iterator;
+ typedef typename Get_type<Kbase, Vector_tag>::type Rep;
+
+ const Rep& rep() const
+ {
+ return *this;
+ }
+
+ Rep& rep()
+ {
+ return *this;
+ }
+
+ typedef R_ R;
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Vector_d> >::value>::type> explicit Vector_d(U&&...u)
+ : Rep(CVBase()(std::forward<U>(u)...)){}
+
+// // called from Construct_vector_d
+// template<class...U> explicit Vector_d(Eval_functor&&,U&&...u)
+// : Rep(Eval_functor(), std::forward<U>(u)...){}
+ template<class F,class...U> explicit Vector_d(Eval_functor&&,F&&f,U&&...u)
+ : Rep(std::forward<F>(f)(std::forward<U>(u)...)){}
+
+#if 0
+ // the new standard may make this necessary
+ Vector_d(Vector_d const&)=default;
+ Vector_d(Vector_d &);//=default;
+ Vector_d(Vector_d &&)=default;
+#endif
+
+ // try not to use these
+ Vector_d(Rep const& v) : Rep(v) {}
+ Vector_d(Rep& v) : Rep(static_cast<Rep const&>(v)) {}
+ Vector_d(Rep&& v) : Rep(std::move(v)) {}
+
+ // this one should be implicit
+ Vector_d(Null_vector const& v)
+ : Rep(CVBase()(v)) {}
+ Vector_d(Null_vector& v)
+ : Rep(CVBase()(v)) {}
+ Vector_d(Null_vector&& v)
+ : Rep(CVBase()(std::move(v))) {}
+
+#else
+
+ Vector_d() : Rep(CVBase()()) {}
+
+ Vector_d(Rep const& v) : Rep(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Vector_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(CVBase()( \
+ BOOST_PP_ENUM_PARAMS(N,t))) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Vector_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {}
+ /*
+ template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Vector_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {}
+ */
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+ // this one should be implicit
+ Vector_d(Null_vector const& v)
+ : Rep(CVBase()(v)) {}
+
+#endif
+
+ typename boost::result_of<CCBase(Rep,int)>::type cartesian(int i)const{
+ return CCBase()(rep(),i);
+ }
+
+ typename boost::result_of<CCBase(Rep,int)>::type operator[](int i)const{
+ return CCBase()(rep(),i);
+ }
+
+ typename boost::result_of<CVI(Rep,Begin_tag)>::type cartesian_begin()const{
+ return CVI()(rep(),Begin_tag());
+ }
+
+ typename boost::result_of<CVI(Rep,End_tag)>::type cartesian_end()const{
+ return CVI()(rep(),End_tag());
+ }
+
+ Vector_d operator-() const
+ {
+ return typename Get_functor<R, Opposite_vector_tag>::type()(*this);
+ }
+
+ /*
+ Direction_d direction() const
+ {
+ return R().construct_direction_d_object()(*this);
+ }
+
+ Vector_d transform(const Aff_transformation_d &t) const
+ {
+ return t.transform(*this);
+ }
+
+ Vector_d operator/(const RT& c) const
+ {
+ return R().construct_divided_vector_d_object()(*this,c);
+ }
+
+ Vector_d operator/(const typename First_if_different<FT_,RT>::Type & c) const
+ {
+ return R().construct_divided_vector_d_object()(*this,c);
+ }
+
+ typename Qualified_result_of<typename R::Compute_x_3, Vector_3>::type
+ x() const
+ {
+ return R().compute_x_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_y_3, Vector_3>::type
+ y() const
+ {
+ return R().compute_y_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_z_3, Vector_3>::type
+ z() const
+ {
+ return R().compute_z_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hx_3, Vector_3>::type
+ hx() const
+ {
+ return R().compute_hx_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hy_3, Vector_3>::type
+ hy() const
+ {
+ return R().compute_hy_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hz_3, Vector_3>::type
+ hz() const
+ {
+ return R().compute_hz_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_hw_3, Vector_3>::type
+ hw() const
+ {
+ return R().compute_hw_3_object()(*this);
+ }
+
+ typename Qualified_result_of<typename R::Compute_x_3, Vector_3>::type
+ cartesian(int i) const
+ {
+ CGAL_kernel_precondition( (i == 0) || (i == 1) || (i == 2) );
+ if (i==0) return x();
+ if (i==1) return y();
+ return z();
+ }
+
+ typename Qualified_result_of<typename R::Compute_hw_3, Vector_3>::type
+ homogeneous(int i) const
+ {
+ CGAL_kernel_precondition( (i >= 0) || (i <= 3) );
+ if (i==0) return hx();
+ if (i==1) return hy();
+ if (i==2) return hz();
+ return hw();
+ }
+
+ int dimension() const // bad idea?
+ {
+ return rep.dimension();
+ }
+*/
+ typename boost::result_of<SLBase(Rep)>::type squared_length()const{
+ return SLBase()(rep());
+ }
+};
+#if 0
+template <class R_> Vector_d<R_>::Vector_d(Vector_d &)=default;
+#endif
+
+//TODO: IO
+
+template <class R_>
+Vector_d<R_> operator+(const Vector_d<R_>& v,const Vector_d<R_>& w)
+{
+ return typename Get_functor<R_, Sum_of_vectors_tag>::type()(v,w);
+}
+
+template <class R_>
+Vector_d<R_> operator-(const Vector_d<R_>& v,const Vector_d<R_>& w)
+{
+ return typename Get_functor<R_, Difference_of_vectors_tag>::type()(v,w);
+}
+
+} //namespace Wrap
+} //namespace CGAL
+
+#endif // CGAL_WRAPPER_VECTOR_D_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h
new file mode 100644
index 00000000..877eea21
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h
@@ -0,0 +1,129 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_WRAPPER_WEIGHTED_POINT_D_H
+#define CGAL_WRAPPER_WEIGHTED_POINT_D_H
+
+#include <CGAL/representation_tags.h>
+#include <boost/static_assert.hpp>
+#include <boost/type_traits.hpp>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/Dimension.h>
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#endif
+#include <boost/utility/result_of.hpp>
+
+namespace CGAL {
+namespace Wrap {
+
+template <class R_>
+class Weighted_point_d : public Get_type<typename R_::Kernel_base, Weighted_point_tag>::type
+{
+ typedef typename Get_type<R_, FT_tag>::type FT_;
+ typedef typename R_::Kernel_base Kbase;
+ typedef typename Get_type<R_, Point_tag>::type Point_;
+ typedef typename Get_functor<Kbase, Construct_ttag<Weighted_point_tag> >::type CWPBase;
+ typedef typename Get_functor<Kbase, Point_drop_weight_tag>::type PDWBase;
+ typedef typename Get_functor<Kbase, Point_weight_tag>::type PWBase;
+
+ typedef Weighted_point_d Self;
+ BOOST_STATIC_ASSERT((boost::is_same<Self, typename Get_type<R_, Weighted_point_tag>::type>::value));
+
+public:
+
+ typedef Tag_true Is_wrapper;
+ typedef typename R_::Default_ambient_dimension Ambient_dimension;
+ typedef Dimension_tag<0> Feature_dimension;
+
+ typedef typename Get_type<Kbase, Weighted_point_tag>::type Rep;
+
+ const Rep& rep() const
+ {
+ return *this;
+ }
+
+ Rep& rep()
+ {
+ return *this;
+ }
+
+ typedef R_ R;
+
+#ifdef CGAL_CXX11
+ template<class...U,class=typename std::enable_if<!std::is_same<std::tuple<typename std::decay<U>::type...>,std::tuple<Weighted_point_d> >::value>::type> explicit Weighted_point_d(U&&...u)
+ : Rep(CWPBase()(std::forward<U>(u)...)){}
+
+// // called from Construct_point_d
+// template<class...U> explicit Point_d(Eval_functor&&,U&&...u)
+// : Rep(Eval_functor(), std::forward<U>(u)...){}
+ template<class F,class...U> explicit Weighted_point_d(Eval_functor&&,F&&f,U&&...u)
+ : Rep(std::forward<F>(f)(std::forward<U>(u)...)){}
+
+#if 0
+ // the new standard may make this necessary
+ Point_d(Point_d const&)=default;
+ Point_d(Point_d &);//=default;
+ Point_d(Point_d &&)=default;
+#endif
+
+ // try not to use these
+ Weighted_point_d(Rep const& v) : Rep(v) {}
+ Weighted_point_d(Rep& v) : Rep(static_cast<Rep const&>(v)) {}
+ Weighted_point_d(Rep&& v) : Rep(std::move(v)) {}
+
+#else
+
+ Weighted_point_d() : Rep(CWPBase()()) {}
+
+ Weighted_point_d(Rep const& v) : Rep(v) {} // try not to use it
+
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ explicit Weighted_point_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(CWPBase()( \
+ BOOST_PP_ENUM_PARAMS(N,t))) {} \
+ \
+ template<class F,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Weighted_point_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {}
+ /*
+ template<BOOST_PP_ENUM_PARAMS(N,class T)> \
+ Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \
+ : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {}
+ */
+
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+
+#endif
+
+ //TODO: use references?
+ Point_ point()const{
+ return Point_(Eval_functor(),PDWBase(),rep());
+ }
+ FT_ weight()const{
+ return PWBase()(rep());
+ }
+
+};
+
+} //namespace Wrap
+} //namespace CGAL
+
+#endif // CGAL_WRAPPER_SPHERE_D_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h
new file mode 100644
index 00000000..5a132ad2
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h
@@ -0,0 +1,1355 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H
+#define CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H
+
+#include <CGAL/NewKernel_d/utils.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/Uncertain.h>
+#include <CGAL/NewKernel_d/store_kernel.h>
+#include <CGAL/is_iterator.h>
+#include <CGAL/iterator_from_indices.h>
+#include <CGAL/number_utils.h>
+#include <CGAL/Kernel/Return_base_tag.h>
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/transforming_pair_iterator.h>
+#include <CGAL/NewKernel_d/functor_tags.h>
+#include <CGAL/NewKernel_d/functor_properties.h>
+#include <CGAL/predicates/sign_of_determinant.h>
+#include <functional>
+#ifdef CGAL_CXX11
+#include <initializer_list>
+#endif
+
+namespace CGAL {
+namespace CartesianDKernelFunctors {
+namespace internal {
+template<class,int> struct Dimension_at_most { enum { value = false }; };
+template<int a,int b> struct Dimension_at_most<Dimension_tag<a>,b> {
+ enum { value = (a <= b) };
+};
+}
+
+template<class R_,class D_=typename R_::Default_ambient_dimension,bool=internal::Dimension_at_most<D_,6>::value> struct Orientation_of_points : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation_of_points)
+ typedef R_ R;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename R::LA::Square_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ Point const& p0=*f++;
+ int d=pd(p0);
+ Matrix m(d,d);
+ // FIXME: this writes the vector coordinates in lines ? check all the other uses in this file, this may be wrong for some.
+ for(int i=0;f!=e;++f,++i) {
+ Point const& p=*f;
+ for(int j=0;j<d;++j){
+ m(i,j)=c(p,j)-c(p0,j);
+ // should we cache the coordinates of p0 in case they are computed?
+ }
+ }
+ return R::LA::sign_of_determinant(CGAL_MOVE(m));
+ }
+
+#ifdef CGAL_CXX11
+ // Since the dimension is at least 2, there are at least 3 points and no ambiguity with iterators.
+ // template <class...U,class=typename std::enable_if<std::is_same<Dimension_tag<sizeof...(U)-1>,typename R::Default_ambient_dimension>::value>::type>
+ template <class...U,class=typename std::enable_if<(sizeof...(U)>=3)>::type>
+ result_type operator()(U&&...u) const {
+ return operator()({std::forward<U>(u)...});
+ }
+
+ template <class P>
+ result_type operator()(std::initializer_list<P> l) const {
+ return operator()(l.begin(),l.end());
+ }
+#else
+ //should we make it template to avoid instantiation for wrong dim?
+ //or iterate outside the class?
+#define CGAL_VAR(Z,J,I) m(I,J)=c(p##I,J)-c(x,J);
+#define CGAL_VAR2(Z,I,N) BOOST_PP_REPEAT(N,CGAL_VAR,I)
+#define CGAL_CODE(Z,N,_) \
+ result_type operator()(Point const&x, BOOST_PP_ENUM_PARAMS(N,Point const&p)) const { \
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel()); \
+ Matrix m(N,N); \
+ BOOST_PP_REPEAT(N,CGAL_VAR2,N) \
+ return R::LA::sign_of_determinant(CGAL_MOVE(m)); \
+ }
+
+BOOST_PP_REPEAT_FROM_TO(7, 10, CGAL_CODE, _ )
+ // No need to do it for <=6, since that uses a different code path
+#undef CGAL_CODE
+#undef CGAL_VAR2
+#undef CGAL_VAR
+#endif
+};
+
+#ifdef CGAL_CXX11
+template<class R_,int d> struct Orientation_of_points<R_,Dimension_tag<d>,true> : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation_of_points)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ template<class>struct Help;
+ template<int...I>struct Help<Indices<I...> > {
+ template<class C,class P,class T> result_type operator()(C const&c,P const&x,T&&t)const{
+ return sign_of_determinant<RT>(c(std::get<I/d>(t),I%d)-c(x,I%d)...);
+ }
+ };
+ template<class P0,class...P> result_type operator()(P0 const&x,P&&...p)const{
+ static_assert(d==sizeof...(P),"Wrong number of arguments");
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ return Help<typename N_increasing_indices<d*d>::type>()(c,x,std::forward_as_tuple(std::forward<P>(p)...));
+ }
+
+
+ template<int N,class Iter,class...U> result_type help2(Dimension_tag<N>, Iter f, Iter const&e, U&&...u)const{
+ auto const&p=*f;
+ return help2(Dimension_tag<N-1>(),++f,e,std::forward<U>(u)...,p);
+ }
+ template<class Iter,class...U> result_type help2(Dimension_tag<0>, Iter CGAL_assertion_code(f), Iter const& CGAL_assertion_code(e), U&&...u)const{
+ CGAL_assertion(f==e);
+ return operator()(std::forward<U>(u)...);
+ }
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ return help2(Dimension_tag<d+1>(),f,e);
+ }
+};
+#else
+#define CGAL_VAR(Z,J,I) c(p##I,J)-x##J
+#define CGAL_VAR2(Z,I,N) BOOST_PP_ENUM(N,CGAL_VAR,I)
+#define CGAL_VAR3(Z,N,_) Point const&p##N=*++f;
+#define CGAL_VAR4(Z,N,_) RT const&x##N=c(x,N);
+#define CGAL_CODE(Z,N,_) \
+template<class R_> struct Orientation_of_points<R_,Dimension_tag<N>,true> : private Store_kernel<R_> { \
+ CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) \
+ typedef R_ R; \
+ typedef typename Get_type<R, RT_tag>::type RT; \
+ typedef typename Get_type<R, Point_tag>::type Point; \
+ typedef typename Get_type<R, Orientation_tag>::type result_type; \
+ result_type operator()(Point const&x, BOOST_PP_ENUM_PARAMS(N,Point const&p)) const { \
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel()); \
+ BOOST_PP_REPEAT(N,CGAL_VAR4,) \
+ return sign_of_determinant<RT>(BOOST_PP_ENUM(N,CGAL_VAR2,N)); \
+ } \
+ template<class Iter> \
+ result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ \
+ Point const&x=*f; \
+ BOOST_PP_REPEAT(N,CGAL_VAR3,) \
+ CGAL_assertion(++f==e); \
+ return operator()(x,BOOST_PP_ENUM_PARAMS(N,p)); \
+ } \
+};
+
+ BOOST_PP_REPEAT_FROM_TO(2, 7, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR4
+#undef CGAL_VAR3
+#undef CGAL_VAR2
+#undef CGAL_VAR
+
+#endif
+
+template<class R_> struct Orientation_of_points<R_,Dimension_tag<1>,true> : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation_of_points)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ result_type operator()(Point const&x, Point const&y) const {
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ // No sign_of_determinant(RT) :-(
+ return CGAL::compare(c(y,0),c(x,0));
+ }
+ template<class Iter>
+ result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{
+ Point const&x=*f;
+ Point const&y=*++f;
+ CGAL_assertion(++f==e);
+ return operator()(x,y);
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Orientation_of_points_tag,(CartesianDKernelFunctors::Orientation_of_points<K>),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Orientation_of_vectors : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation_of_vectors)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename R::LA::Square_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R, Compute_vector_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type vd(this->kernel());
+ // FIXME: Uh? Using it on a vector ?!
+ Vector const& v0=*f;
+ int d=vd(v0);
+ Matrix m(d,d);
+ for(int j=0;j<d;++j){
+ m(0,j)=c(v0,j);
+ }
+ for(int i=1;++f!=e;++i) {
+ Vector const& v=*f;
+ for(int j=0;j<d;++j){
+ m(i,j)=c(v,j);
+ }
+ }
+ return R::LA::sign_of_determinant(CGAL_MOVE(m));
+ }
+
+#ifdef CGAL_CXX11
+ template <class...U,class=typename std::enable_if<(sizeof...(U)>=3)>::type>
+ result_type operator()(U&&...u) const {
+ return operator()({std::forward<U>(u)...});
+ }
+
+ template <class V>
+ result_type operator()(std::initializer_list<V> l) const {
+ return operator()(l.begin(),l.end());
+ }
+#else
+ //TODO
+#endif
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Orientation_of_vectors_tag,(CartesianDKernelFunctors::Orientation_of_vectors<K>),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Linear_rank : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Linear_rank)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ // Computing a sensible Uncertain<int> is not worth it
+ typedef int result_type;
+ typedef typename R::LA::Dynamic_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R, Compute_vector_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type vd(this->kernel());
+ std::ptrdiff_t n=std::distance(f,e);
+ if (n==0) return 0;
+ Vector const& v0 = *f;
+ // FIXME: Uh? Using it on a vector ?!
+ int d=vd(v0);
+ Matrix m(d,n);
+ for(int j=0;j<d;++j){
+ m(j,0)=c(v0,j);
+ }
+ for(int i=1; ++f!=e; ++i){
+ Vector const& v = *f;
+ for(int j=0;j<d;++j){
+ m(j,i)=c(v,j);
+ }
+ }
+ return R::LA::rank(CGAL_MOVE(m));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Linear_rank_tag,(CartesianDKernelFunctors::Linear_rank<K>),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Linearly_independent : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Linearly_independent)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R, Point_dimension_tag>::type vd(this->kernel());
+ std::ptrdiff_t n=std::distance(f,e);
+ // FIXME: Uh? Using it on a vector ?!
+ int d=vd(*f);
+ if (n>d) return false;
+ typename Get_functor<R, Linear_rank_tag>::type lr(this->kernel());
+ return lr(f,e) == n;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Linearly_independent_tag,(CartesianDKernelFunctors::Linearly_independent<K>),(Vector_tag),(Point_dimension_tag,Linear_rank_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Contained_in_linear_hull : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Contained_in_linear_hull)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ // Computing a sensible Uncertain<bool> is not worth it
+ typedef bool result_type;
+ typedef typename R::LA::Dynamic_matrix Matrix;
+
+ template<class Iter,class V>
+ result_type operator()(Iter f, Iter e,V const&w)const{
+ typename Get_functor<R, Compute_vector_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type vd(this->kernel());
+ std::ptrdiff_t n=std::distance(f,e);
+ if (n==0) return false;
+ // FIXME: Uh? Using it on a vector ?!
+ int d=vd(w);
+ Matrix m(d,n+1);
+ for(int i=0; f!=e; ++f,++i){
+ Vector const& v = *f;
+ for(int j=0;j<d;++j){
+ m(j,i)=c(v,j);
+ }
+ }
+ for(int j=0;j<d;++j){
+ m(j,n)=c(w,j);
+ }
+ int r1 = R::LA::rank(m);
+ // FIXME: Don't use eigen directly, go through an interface in LA...
+ m.conservativeResize(Eigen::NoChange, n);
+ int r2 = R::LA::rank(CGAL_MOVE(m));
+ return r1 == r2;
+ // TODO: This is very very far from optimal...
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Contained_in_linear_hull_tag,(CartesianDKernelFunctors::Contained_in_linear_hull<K>),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Affine_rank : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Affine_rank)
+ typedef R_ R;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ // Computing a sensible Uncertain<int> is not worth it
+ typedef int result_type;
+ typedef typename R::LA::Dynamic_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ int n=(int)std::distance(f,e);
+ if (--n<=0) return n;
+ Point const& p0 = *f;
+ int d=pd(p0);
+ Matrix m(d,n);
+ for(int i=0; ++f!=e; ++i){
+ Point const& p = *f;
+ for(int j=0;j<d;++j){
+ m(j,i)=c(p,j)-c(p0,j);
+ // TODO: cache p0[j] in case it is computed?
+ }
+ }
+ return R::LA::rank(CGAL_MOVE(m));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Affine_rank_tag,(CartesianDKernelFunctors::Affine_rank<K>),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Affinely_independent : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Affinely_independent)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ std::ptrdiff_t n=std::distance(f,e);
+ int d=pd(*f);
+ if (--n>d) return false;
+ typename Get_functor<R, Affine_rank_tag>::type ar(this->kernel());
+ return ar(f,e) == n;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Affinely_independent_tag,(CartesianDKernelFunctors::Affinely_independent<K>),(Point_tag),(Point_dimension_tag,Affine_rank_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Contained_in_simplex : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Contained_in_simplex)
+ typedef R_ R;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ // Computing a sensible Uncertain<*> is not worth it
+ // typedef typename Get_type<R, Boolean_tag>::type result_type;
+ typedef bool result_type;
+ typedef typename Increment_dimension<typename R::Default_ambient_dimension>::type D1;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type D2;
+ typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename LA::Dynamic_matrix Matrix;
+ typedef typename LA::Dynamic_vector DynVec;
+ typedef typename LA::Vector Vec;
+
+ template<class Iter, class P>
+ result_type operator()(Iter f, Iter e, P const&q)const{
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ std::ptrdiff_t n=std::distance(f,e);
+ if (n==0) return false;
+ int d=pd(q);
+ Matrix m(d+1,n);
+ DynVec a(n);
+ // FIXME: Should use the proper vector constructor (Iterator_and_last)
+ Vec b(d+1);
+ for(int j=0;j<d;++j) b[j]=c(q,j);
+ b[d]=1;
+
+ for(int i=0; f!=e; ++i,++f){
+ Point const& p = *f;
+ for(int j=0;j<d;++j){
+ m(j,i)=c(p,j);
+ }
+ m(d,i)=1;
+ }
+ // If the simplex has full dimension, there must be a solution, only the signs need to be checked.
+ if (n == d+1)
+ LA::solve(a,CGAL_MOVE(m),CGAL_MOVE(b));
+ else if (!LA::solve_and_check(a,CGAL_MOVE(m),CGAL_MOVE(b)))
+ return false;
+ for(int i=0;i<n;++i){
+ if (a[i]<0) return false;
+ }
+ return true;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Contained_in_simplex_tag,(CartesianDKernelFunctors::Contained_in_simplex<K>),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+ namespace internal {
+ template<class Ref_>
+ struct Matrix_col_access {
+ typedef Ref_ result_type;
+ int col;
+ Matrix_col_access(int r):col(r){}
+ template<class Mat> Ref_ operator()(Mat const& m, std::ptrdiff_t row)const{
+ return m(row,col);
+ }
+ };
+ }
+template<class R_> struct Linear_base : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Linear_base)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef void result_type;
+ typedef typename R::LA::Dynamic_matrix Matrix;
+
+ template<class Iter, class Oter>
+ result_type operator()(Iter f, Iter e, Oter&o)const{
+ typename Get_functor<R, Compute_vector_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type vd(this->kernel());
+ typename Get_functor<R, Construct_ttag<Vector_tag> >::type cv(this->kernel());
+ std::ptrdiff_t n=std::distance(f,e);
+ if (n==0) return;
+ Vector const& v0 = *f;
+ // FIXME: Uh? Using it on a vector ?!
+ int d=vd(v0);
+ Matrix m(d,n);
+ for(int j=0;j<d;++j){
+ m(0,j)=c(v0,j);
+ }
+ for(int i=1; ++f!=e; ++i){
+ Vector const& v = *f;
+ for(int j=0;j<d;++j){
+ m(i,j)=c(v,j);
+ }
+ }
+ Matrix b = R::LA::basis(CGAL_MOVE(m));
+ for(int i=0; i < R::LA::columns(b); ++i){
+ //*o++ = Vector(b.col(i));
+ typedef
+#ifdef CGAL_CXX11
+ decltype(std::declval<const Matrix>()(0,0))
+#else
+ FT
+#endif
+ Ref;
+ typedef Iterator_from_indices<Matrix, FT, Ref,
+ internal::Matrix_col_access<Ref> > IFI;
+ *o++ = cv(IFI(b,0,i),IFI(b,d,i));
+ }
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Linear_base_tag,(CartesianDKernelFunctors::Linear_base<K>),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag));
+
+#if 0
+namespace CartesianDKernelFunctors {
+template<class R_,bool=boost::is_same<typename R_::Point,typename R_::Vector>::value> struct Orientation : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename Get_functor<R, Orientation_of_points_tag>::type OP;
+ typedef typename Get_functor<R, Orientation_of_vectors_tag>::type OV;
+
+ //FIXME!!!
+ //when Point and Vector are distinct types, the dispatch should be made
+ //in a way that doesn't instantiate a conversion from Point to Vector
+ template<class Iter>
+ result_type operator()(Iter const&f, Iter const& e)const{
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ typename std::iterator_traits<Iter>::difference_type d=std::distance(f,e);
+ int dim=pd(*f); // BAD
+ if(d==dim) return OV(this->kernel())(f,e);
+ CGAL_assertion(d==dim+1);
+ return OP(this->kernel())(f,e);
+ }
+ //TODO: version that takes objects directly instead of iterators
+};
+
+template<class R_> struct Orientation<R_,false> : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Orientation)
+ typedef R_ R;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Orientation_tag>::type result_type;
+ typedef typename Get_functor<R, Orientation_of_points_tag>::type OP;
+ typedef typename Get_functor<R, Orientation_of_vectors_tag>::type OV;
+ typedef typename R::LA::Square_matrix Matrix;
+
+ //FIXME!!!
+ //when Point and Vector are distinct types, the dispatch should be made
+ //in a way that doesn't instantiate a conversion from Point to Vector
+ template<class Iter>
+ typename boost::enable_if<is_iterator_to<Iter,Point>,result_type>::type
+ operator()(Iter const&f, Iter const& e)const{
+ return OP(this->kernel())(f,e);
+ }
+ template<class Iter>
+ typename boost::enable_if<is_iterator_to<Iter,Vector>,result_type>::type
+ operator()(Iter const&f, Iter const& e)const{
+ return OV(this->kernel())(f,e);
+ }
+ //TODO: version that takes objects directly instead of iterators
+};
+}
+#endif
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Power_side_of_power_sphere_raw : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Power_side_of_power_sphere_raw)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Oriented_side_tag>::type result_type;
+ typedef typename Increment_dimension<typename R::Default_ambient_dimension>::type D1;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type D2;
+ typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+
+ template<class IterP, class IterW, class Pt, class Wt>
+ result_type operator()(IterP f, IterP const& e, IterW fw, Pt const& p0, Wt const& w0) const {
+ typedef typename Get_functor<R, Squared_distance_to_origin_tag>::type Sqdo;
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+
+ int d=pd(p0);
+ Matrix m(d+1,d+1);
+ if(CGAL::Is_stored<Sqdo>::value) {
+ Sqdo sqdo(this->kernel());
+ FT const& h0 = sqdo(p0) - w0;
+ for(int i=0;f!=e;++f,++fw,++i) {
+ Point const& p=*f;
+ for(int j=0;j<d;++j){
+ RT const& x=c(p,j);
+ m(i,j)=x-c(p0,j);
+ }
+ m(i,d) = sqdo(p) - *fw - h0;
+ }
+ } else {
+ for(int i=0;f!=e;++f,++fw,++i) {
+ Point const& p=*f;
+ m(i,d) = w0 - *fw;
+ for(int j=0;j<d;++j){
+ RT const& x=c(p,j);
+ m(i,j)=x-c(p0,j);
+ m(i,d)+=CGAL::square(m(i,j));
+ }
+ }
+ }
+ if(d%2)
+ return -LA::sign_of_determinant(CGAL_MOVE(m));
+ else
+ return LA::sign_of_determinant(CGAL_MOVE(m));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Power_side_of_power_sphere_raw_tag,(CartesianDKernelFunctors::Power_side_of_power_sphere_raw<K>),(Point_tag),(Point_dimension_tag,Squared_distance_to_origin_tag,Compute_point_cartesian_coordinate_tag));
+
+// TODO: make Side_of_oriented_sphere call Power_side_of_power_sphere_raw
+namespace CartesianDKernelFunctors {
+template<class R_> struct Side_of_oriented_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Side_of_oriented_sphere)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Oriented_side_tag>::type result_type;
+ typedef typename Increment_dimension<typename R::Default_ambient_dimension>::type D1;
+ typedef typename Increment_dimension<typename R::Max_ambient_dimension>::type D2;
+ typedef typename R::LA::template Rebind_dimension<D1,D2>::Other LA;
+ typedef typename LA::Square_matrix Matrix;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter const& e)const{
+ Point const& p0=*f++; // *--e ?
+ return this->operator()(f,e,p0);
+ }
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter const& e, Point const& p0) const {
+ typedef typename Get_functor<R, Squared_distance_to_origin_tag>::type Sqdo;
+ typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+
+ int d=pd(p0);
+ Matrix m(d+1,d+1);
+ if(CGAL::Is_stored<Sqdo>::value) {
+ Sqdo sqdo(this->kernel());
+ for(int i=0;f!=e;++f,++i) {
+ Point const& p=*f;
+ for(int j=0;j<d;++j){
+ RT const& x=c(p,j);
+ m(i,j)=x-c(p0,j);
+ }
+ m(i,d) = sqdo(p) - sqdo(p0);
+ }
+ } else {
+ for(int i=0;f!=e;++f,++i) {
+ Point const& p=*f;
+ m(i,d) = 0;
+ for(int j=0;j<d;++j){
+ RT const& x=c(p,j);
+ m(i,j)=x-c(p0,j);
+ m(i,d)+=CGAL::square(m(i,j));
+ }
+ }
+ }
+ if(d%2)
+ return -LA::sign_of_determinant(CGAL_MOVE(m));
+ else
+ return LA::sign_of_determinant(CGAL_MOVE(m));
+ }
+
+#ifdef CGAL_CXX11
+ template <class...U,class=typename std::enable_if<(sizeof...(U)>=4)>::type>
+ result_type operator()(U&&...u) const {
+ return operator()({std::forward<U>(u)...});
+ }
+
+ template <class P>
+ result_type operator()(std::initializer_list<P> l) const {
+ return operator()(l.begin(),l.end());
+ }
+#else
+ //TODO
+#endif
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Side_of_oriented_sphere_tag,(CartesianDKernelFunctors::Side_of_oriented_sphere<K>),(Point_tag),(Point_dimension_tag,Squared_distance_to_origin_tag,Compute_point_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template <class R_> struct Construct_circumcenter : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Construct_circumcenter)
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef Point result_type;
+ typedef typename Get_type<R_, FT_tag>::type FT;
+ template <class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typedef typename Get_type<R_, Point_tag>::type Point;
+ typedef typename R_::LA LA;
+ typename Get_functor<R_, Compute_point_cartesian_coordinate_tag>::type c(this->kernel());
+ typename Get_functor<R_, Construct_ttag<Point_tag> >::type cp(this->kernel());
+ typename Get_functor<R_, Point_dimension_tag>::type pd(this->kernel());
+ typename Get_functor<R_, Squared_distance_to_origin_tag>::type sdo(this->kernel());
+
+ Point const& p0=*f;
+ int d = pd(p0);
+ if (d+1 == std::distance(f,e))
+ {
+ // 2*(x-y).c == x^2-y^2
+ typedef typename LA::Square_matrix Matrix;
+ typedef typename LA::Vector Vec;
+ typedef typename LA::Construct_vector CVec;
+ FT const& n0 = sdo(p0);
+ Matrix m(d,d);
+ Vec b = typename CVec::Dimension()(d);
+ // Write the point coordinates in lines.
+ int i;
+ for(i=0; ++f!=e; ++i) {
+ Point const& p=*f;
+ for(int j=0;j<d;++j) {
+ m(i,j)=2*(c(p,j)-c(p0,j));
+ b[i] = sdo(p) - n0;
+ }
+ }
+ CGAL_assertion (i == d);
+ Vec res = typename CVec::Dimension()(d);;
+ //std::cout << "Mat: " << m << "\n Vec: " << one << std::endl;
+ LA::solve(res, CGAL_MOVE(m), CGAL_MOVE(b));
+ //std::cout << "Sol: " << res << std::endl;
+ return cp(d,LA::vector_begin(res),LA::vector_end(res));
+ }
+ else
+ {
+ /*
+ * Matrix P=(p1, p2, ...) (each point as a column)
+ * Matrix Q=2*t(p2-p1,p3-p1, ...) (each vector as a line)
+ * Matrix M: QP, adding a line of 1 at the top
+ * Vector B: (1, p2^2-p1^2, p3^2-p1^2, ...)
+ * Solve ML=B, the center of the sphere is PL
+ *
+ * It would likely be faster to write P then transpose, multiply,
+ * etc instead of doing it by hand.
+ */
+ // TODO: check for degenerate cases?
+
+ typedef typename R_::Max_ambient_dimension D2;
+ typedef typename R_::LA::template Rebind_dimension<Dynamic_dimension_tag,D2>::Other LAd;
+ typedef typename LAd::Square_matrix Matrix;
+ typedef typename LAd::Vector Vec;
+ typename Get_functor<R_, Scalar_product_tag>::type sp(this->kernel());
+ int k=static_cast<int>(std::distance(f,e));
+ Matrix m(k,k);
+ Vec b(k);
+ Vec l(k);
+ int j,i=0;
+ for(Iter f2=f;f2!=e;++f2,++i){
+ b(i)=m(i,i)=sdo(*f2);
+ j=0;
+ for(Iter f3=f;f3!=e;++f3,++j){
+ m(j,i)=m(i,j)=sp(*f2,*f3);
+ }
+ }
+ for(i=1;i<k;++i){
+ b(i)-=b(0);
+ for(j=0;j<k;++j){
+ m(i,j)=2*(m(i,j)-m(0,j));
+ }
+ }
+ for(j=0;j<k;++j) m(0,j)=1;
+ b(0)=1;
+
+ LAd::solve(l,CGAL_MOVE(m),CGAL_MOVE(b));
+
+ typename LA::Vector center=typename LA::Construct_vector::Dimension()(d);
+ for(i=0;i<d;++i) center(i)=0;
+ j=0;
+ for(Iter f2=f;f2!=e;++f2,++j){
+ for(i=0;i<d;++i){
+ center(i)+=l(j)*c(*f2,i);
+ }
+ }
+
+ return cp(LA::vector_begin(center),LA::vector_end(center));
+ }
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Construct_circumcenter_tag,(CartesianDKernelFunctors::Construct_circumcenter<K>),(Point_tag),(Construct_ttag<Point_tag>,Compute_point_cartesian_coordinate_tag,Scalar_product_tag,Squared_distance_to_origin_tag,Point_dimension_tag));
+
+namespace CartesianDKernelFunctors {
+template <class R_> struct Squared_circumradius : Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Squared_circumradius)
+ typedef typename Get_type<R_, FT_tag>::type result_type;
+ template <class Iter>
+ result_type operator()(Iter f, Iter e)const{
+ typename Get_functor<R_, Construct_circumcenter_tag>::type cc(this->kernel());
+ typename Get_functor<R_, Squared_distance_tag>::type sd(this->kernel());
+ return sd(cc(f, e), *f);
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Squared_circumradius_tag,(CartesianDKernelFunctors::Squared_circumradius<K>),(Point_tag),(Construct_circumcenter_tag,Squared_distance_tag));
+
+namespace CartesianDKernelFunctors {
+// TODO: implement it directly, it should be at least as fast as Side_of_oriented_sphere.
+template<class R_> struct Side_of_bounded_sphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Side_of_bounded_sphere)
+ typedef R_ R;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Bounded_side_tag>::type result_type;
+
+ template<class Iter>
+ result_type operator()(Iter f, Iter const& e) const {
+ Point const& p0 = *f++; // *--e ?
+ typename Get_functor<R, Point_dimension_tag>::type pd(this->kernel());
+ //FIXME: Doesn't work for non-full dimension.
+ CGAL_assertion (std::distance(f,e) == pd(p0)+1);
+ return operator() (f, e, p0);
+ }
+
+ template<class Iter>
+ result_type operator()(Iter const& f, Iter const& e, Point const& p0) const {
+ typename Get_functor<R, Side_of_oriented_sphere_tag>::type sos (this->kernel());
+ typename Get_functor<R, Orientation_of_points_tag>::type op (this->kernel());
+ // enum_cast is not very generic, but since this function isn't supposed to remain like this...
+ return enum_cast<Bounded_side> (sos (f, e, p0) * op (f, e));
+ }
+
+#ifdef CGAL_CXX11
+ template <class...U,class=typename std::enable_if<(sizeof...(U)>=4)>::type>
+ result_type operator()(U&&...u) const {
+ return operator()({std::forward<U>(u)...});
+ }
+
+ template <class P>
+ result_type operator()(std::initializer_list<P> l) const {
+ return operator()(l.begin(),l.end());
+ }
+#else
+ //TODO
+#endif
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Side_of_bounded_sphere_tag,(CartesianDKernelFunctors::Side_of_bounded_sphere<K>),(Point_tag),(Side_of_oriented_sphere_tag,Orientation_of_points_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Side_of_bounded_circumsphere : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Side_of_bounded_circumsphere)
+ typedef typename Get_type<R_, Bounded_side_tag>::type result_type;
+
+ template<class Iter, class P>
+ result_type operator()(Iter f, Iter const& e, P const& p0) const {
+ // TODO: Special case when the dimension is full.
+ typename Get_functor<R_, Construct_circumcenter_tag>::type cc(this->kernel());
+ typename Get_functor<R_, Compare_distance_tag>::type cd(this->kernel());
+
+ return enum_cast<Bounded_side>(cd(cc(f, e), *f, p0));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Side_of_bounded_circumsphere_tag,(CartesianDKernelFunctors::Side_of_bounded_circumsphere<K>),(Point_tag),(Squared_distance_tag,Construct_circumcenter_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Point_to_vector : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Point_to_vector)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Construct_ttag<Vector_tag> >::type CV;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ typedef Vector result_type;
+ typedef Point argument_type;
+ result_type operator()(argument_type const&v)const{
+ CI ci(this->kernel());
+ return CV(this->kernel())(ci(v,Begin_tag()),ci(v,End_tag()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Point_to_vector_tag,(CartesianDKernelFunctors::Point_to_vector<K>),(Point_tag,Vector_tag),(Construct_ttag<Vector_tag>, Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Vector_to_point : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Vector_to_point)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Construct_ttag<Point_tag> >::type CP;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef Point result_type;
+ typedef Vector argument_type;
+ result_type operator()(argument_type const&v)const{
+ CI ci(this->kernel());
+ return CP(this->kernel())(ci(v,Begin_tag()),ci(v,End_tag()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Vector_to_point_tag,(CartesianDKernelFunctors::Vector_to_point<K>),(Point_tag,Vector_tag),(Construct_ttag<Point_tag>, Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Opposite_vector : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Opposite_vector)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_tag> >::type CV;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef Vector result_type;
+ typedef Vector argument_type;
+ result_type operator()(Vector const&v)const{
+ CI ci(this->kernel());
+ return CV(this->kernel())(make_transforming_iterator(ci(v,Begin_tag()),std::negate<RT>()),make_transforming_iterator(ci(v,End_tag()),std::negate<RT>()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Opposite_vector_tag,(CartesianDKernelFunctors::Opposite_vector<K>),(Vector_tag),(Construct_ttag<Vector_tag>, Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Scaled_vector : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Scaled_vector)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_tag> >::type CV;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef Vector result_type;
+ typedef Vector first_argument_type;
+ typedef FT second_argument_type;
+ result_type operator()(Vector const&v,FT const& s)const{
+ CI ci(this->kernel());
+ return CV(this->kernel())(make_transforming_iterator(ci(v,Begin_tag()),Scale<FT>(s)),make_transforming_iterator(ci(v,End_tag()),Scale<FT>(s)));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Scaled_vector_tag,(CartesianDKernelFunctors::Scaled_vector<K>),(Vector_tag),(Construct_ttag<Vector_tag>, Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Sum_of_vectors : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Sum_of_vectors)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_tag> >::type CV;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef Vector result_type;
+ typedef Vector first_argument_type;
+ typedef Vector second_argument_type;
+ result_type operator()(Vector const&a, Vector const&b)const{
+ CI ci(this->kernel());
+ return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::plus<RT>()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::plus<RT>()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Sum_of_vectors_tag,(CartesianDKernelFunctors::Sum_of_vectors<K>),(Vector_tag),(Construct_ttag<Vector_tag>, Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Difference_of_vectors : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Difference_of_vectors)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_tag> >::type CV;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef Vector result_type;
+ typedef Vector first_argument_type;
+ typedef Vector second_argument_type;
+ result_type operator()(Vector const&a, Vector const&b)const{
+ CI ci(this->kernel());
+ return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::minus<RT>()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::minus<RT>()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Difference_of_vectors_tag,(CartesianDKernelFunctors::Difference_of_vectors<K>),(Vector_tag),(Construct_ttag<Vector_tag>, Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Translated_point : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Translated_point)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Construct_ttag<Point_tag> >::type CP;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CVI;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CPI;
+ typedef Point result_type;
+ typedef Point first_argument_type;
+ typedef Vector second_argument_type;
+ result_type operator()(Point const&a, Vector const&b)const{
+ CVI cvi(this->kernel());
+ CPI cpi(this->kernel());
+ return CP(this->kernel())(make_transforming_pair_iterator(cpi(a,Begin_tag()),cvi(b,Begin_tag()),std::plus<RT>()),make_transforming_pair_iterator(cpi(a,End_tag()),cvi(b,End_tag()),std::plus<RT>()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Translated_point_tag,(CartesianDKernelFunctors::Translated_point<K>),(Point_tag, Vector_tag),(Construct_ttag<Point_tag>, Construct_ttag<Vector_cartesian_const_iterator_tag>, Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Difference_of_points : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Difference_of_points)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_tag> >::type CV;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ typedef Vector result_type;
+ typedef Point first_argument_type;
+ typedef Point second_argument_type;
+ result_type operator()(Point const&a, Point const&b)const{
+ CI ci(this->kernel());
+ return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::minus<RT>()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::minus<RT>()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Difference_of_points_tag,(CartesianDKernelFunctors::Difference_of_points<K>),(Point_tag, Vector_tag),(Construct_ttag<Vector_tag>, Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Midpoint : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Midpoint)
+ typedef R_ R;
+ typedef typename Get_type<R, FT_tag>::type FT;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Construct_ttag<Point_tag> >::type CP;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ typedef Point result_type;
+ typedef Point first_argument_type;
+ typedef Point second_argument_type;
+ // There is a division, but it will be cast to RT afterwards anyway, so maybe we could use RT.
+ struct Average : std::binary_function<FT,RT,FT> {
+ FT operator()(FT const&a, RT const&b)const{
+ return (a+b)/2;
+ }
+ };
+ result_type operator()(Point const&a, Point const&b)const{
+ CI ci(this->kernel());
+ //Divide<FT,int> half(2);
+ //return CP(this->kernel())(make_transforming_iterator(make_transforming_pair_iterator(ci.begin(a),ci.begin(b),std::plus<FT>()),half),make_transforming_iterator(make_transforming_pair_iterator(ci.end(a),ci.end(b),std::plus<FT>()),half));
+ return CP(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),Average()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),Average()));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Midpoint_tag,(CartesianDKernelFunctors::Midpoint<K>),(Point_tag),(Construct_ttag<Point_tag>, Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Squared_length : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Squared_length)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef RT result_type;
+ typedef Vector argument_type;
+ result_type operator()(Vector const&a)const{
+ CI ci(this->kernel());
+ typename Algebraic_structure_traits<RT>::Square f;
+ // TODO: avoid this RT(0)+...
+ return std::accumulate(make_transforming_iterator(ci(a,Begin_tag()),f),make_transforming_iterator(ci(a,End_tag()),f),RT(0));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Squared_length_tag,(CartesianDKernelFunctors::Squared_length<K>),(Vector_tag),(Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Squared_distance_to_origin : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Squared_distance_to_origin)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ typedef RT result_type;
+ typedef Point argument_type;
+ result_type operator()(Point const&a)const{
+ CI ci(this->kernel());
+ typename Algebraic_structure_traits<RT>::Square f;
+ // TODO: avoid this RT(0)+...
+ return std::accumulate(make_transforming_iterator(ci(a,Begin_tag()),f),make_transforming_iterator(ci(a,End_tag()),f),RT(0));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Squared_distance_to_origin_tag,(CartesianDKernelFunctors::Squared_distance_to_origin<K>),(Point_tag),(Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Squared_distance : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Squared_distance)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ typedef RT result_type;
+ typedef Point first_argument_type;
+ typedef Point second_argument_type;
+ struct Sq_diff : std::binary_function<RT,RT,RT> {
+ RT operator()(RT const&a, RT const&b)const{
+ return CGAL::square(a-b);
+ }
+ };
+ result_type operator()(Point const&a, Point const&b)const{
+ CI ci(this->kernel());
+ Sq_diff f;
+ // TODO: avoid this RT(0)+...
+ return std::accumulate(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),f),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),f),RT(0));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Squared_distance_tag,(CartesianDKernelFunctors::Squared_distance<K>),(Point_tag),(Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Scalar_product : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Scalar_product)
+ typedef R_ R;
+ typedef typename Get_type<R, RT_tag>::type RT;
+ typedef typename Get_type<R, Vector_tag>::type Vector;
+ typedef typename Get_functor<R, Construct_ttag<Vector_cartesian_const_iterator_tag> >::type CI;
+ typedef RT result_type;
+ typedef Vector first_argument_type;
+ typedef Vector second_argument_type;
+ result_type operator()(Vector const&a, Vector const&b)const{
+ CI ci(this->kernel());
+ std::multiplies<RT> f;
+ // TODO: avoid this RT(0)+...
+ return std::accumulate(
+ make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),f),
+ make_transforming_pair_iterator(ci(a, End_tag()),ci(b, End_tag()),f),
+ RT(0));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Scalar_product_tag,(CartesianDKernelFunctors::Scalar_product<K>),(Vector_tag),(Construct_ttag<Vector_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Compare_distance : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Compare_distance)
+ typedef R_ R;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_functor<R, Squared_distance_tag>::type CSD;
+ typedef typename Get_type<R, Comparison_result_tag>::type result_type;
+ typedef Point first_argument_type;
+ typedef Point second_argument_type;
+ typedef Point third_argument_type; // why am I doing this already?
+ typedef Point fourth_argument_type;
+ result_type operator()(Point const&a, Point const&b, Point const&c)const{
+ CSD csd(this->kernel());
+ return CGAL_NTS compare(csd(a,b),csd(a,c));
+ }
+ result_type operator()(Point const&a, Point const&b, Point const&c, Point const&d)const{
+ CSD csd(this->kernel());
+ return CGAL_NTS compare(csd(a,b),csd(c,d));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Compare_distance_tag,(CartesianDKernelFunctors::Compare_distance<K>),(Point_tag),(Squared_distance_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Less_point_cartesian_coordinate : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Less_point_cartesian_coordinate)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+ typedef typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type Cc;
+ // TODO: This is_exact thing should be reengineered.
+ // the goal is to have a way to tell: don't filter this
+ typedef typename CGAL::Is_exact<Cc> Is_exact;
+
+ template<class V,class W,class I>
+ result_type operator()(V const&a, W const&b, I i)const{
+ Cc c(this->kernel());
+ return c(a,i)<c(b,i);
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Less_point_cartesian_coordinate_tag,(CartesianDKernelFunctors::Less_point_cartesian_coordinate<K>),(),(Compute_point_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Compare_point_cartesian_coordinate : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Compare_point_cartesian_coordinate)
+ typedef R_ R;
+ typedef typename Get_type<R, Comparison_result_tag>::type result_type;
+ typedef typename Get_functor<R, Compute_point_cartesian_coordinate_tag>::type Cc;
+ // TODO: This is_exact thing should be reengineered.
+ // the goal is to have a way to tell: don't filter this
+ typedef typename CGAL::Is_exact<Cc> Is_exact;
+
+ template<class V,class W,class I>
+ result_type operator()(V const&a, W const&b, I i)const{
+ Cc c(this->kernel());
+ return CGAL_NTS compare(c(a,i),c(b,i));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Compare_point_cartesian_coordinate_tag,(CartesianDKernelFunctors::Compare_point_cartesian_coordinate<K>),(),(Compute_point_cartesian_coordinate_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Compare_lexicographically : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Compare_lexicographically)
+ typedef R_ R;
+ typedef typename Get_type<R, Comparison_result_tag>::type result_type;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ // TODO: This is_exact thing should be reengineered.
+ // the goal is to have a way to tell: don't filter this
+ typedef typename CGAL::Is_exact<CI> Is_exact;
+
+ template<class V,class W>
+ result_type operator()(V const&a, W const&b)const{
+ CI c(this->kernel());
+#ifdef CGAL_CXX11
+ auto
+#else
+ typename CI::result_type
+#endif
+ a_begin=c(a,Begin_tag()),
+ b_begin=c(b,Begin_tag()),
+ a_end=c(a,End_tag());
+ result_type res;
+ // can't we do slightly better for Uncertain<*> ?
+ // after res=...; if(is_uncertain(res))return indeterminate<result_type>();
+ do res=CGAL_NTS compare(*a_begin++,*b_begin++);
+ while(a_begin!=a_end && res==EQUAL);
+ return res;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Compare_lexicographically_tag,(CartesianDKernelFunctors::Compare_lexicographically<K>),(),(Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Less_lexicographically : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Less_lexicographically)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+ typedef typename Get_functor<R, Compare_lexicographically_tag>::type CL;
+ typedef typename CGAL::Is_exact<CL> Is_exact;
+
+ template <class V, class W>
+ result_type operator() (V const&a, W const&b) const {
+ CL c (this->kernel());
+ return c(a,b) < 0;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Less_lexicographically_tag,(CartesianDKernelFunctors::Less_lexicographically<K>),(),(Compare_lexicographically_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Less_or_equal_lexicographically : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Less_or_equal_lexicographically)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+ typedef typename Get_functor<R, Compare_lexicographically_tag>::type CL;
+ typedef typename CGAL::Is_exact<CL> Is_exact;
+
+ template <class V, class W>
+ result_type operator() (V const&a, W const&b) const {
+ CL c (this->kernel());
+ return c(a,b) <= 0;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Less_or_equal_lexicographically_tag,(CartesianDKernelFunctors::Less_or_equal_lexicographically<K>),(),(Compare_lexicographically_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Equal_points : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Equal_points)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+ typedef typename Get_functor<R, Construct_ttag<Point_cartesian_const_iterator_tag> >::type CI;
+ // TODO: This is_exact thing should be reengineered.
+ // the goal is to have a way to tell: don't filter this
+ typedef typename CGAL::Is_exact<CI> Is_exact;
+
+ template<class V,class W>
+ result_type operator()(V const&a, W const&b)const{
+ CI c(this->kernel());
+#ifdef CGAL_CXX11
+ auto
+#else
+ typename CI::result_type
+#endif
+ a_begin=c(a,Begin_tag()),
+ b_begin=c(b,Begin_tag()),
+ a_end=c(a,End_tag());
+ result_type res = true;
+ // Is using CGAL::possibly for Uncertain really an optimization?
+ do res = res & (*a_begin++ == *b_begin++);
+ while(a_begin!=a_end && possibly(res));
+ return res;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Equal_points_tag,(CartesianDKernelFunctors::Equal_points<K>),(),(Construct_ttag<Point_cartesian_const_iterator_tag>));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Oriented_side : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Oriented_side)
+ typedef R_ R;
+ typedef typename Get_type<R, Oriented_side_tag>::type result_type;
+ typedef typename Get_type<R, Point_tag>::type Point;
+ typedef typename Get_type<R, Hyperplane_tag>::type Hyperplane;
+ typedef typename Get_type<R, Sphere_tag>::type Sphere;
+ typedef typename Get_functor<R, Value_at_tag>::type VA;
+ typedef typename Get_functor<R, Hyperplane_translation_tag>::type HT;
+ typedef typename Get_functor<R, Squared_distance_tag>::type SD;
+ typedef typename Get_functor<R, Squared_radius_tag>::type SR;
+ typedef typename Get_functor<R, Center_of_sphere_tag>::type CS;
+
+ result_type operator()(Hyperplane const&h, Point const&p)const{
+ HT ht(this->kernel());
+ VA va(this->kernel());
+ return CGAL::compare(va(h,p),ht(h));
+ }
+ result_type operator()(Sphere const&s, Point const&p)const{
+ SD sd(this->kernel());
+ SR sr(this->kernel());
+ CS cs(this->kernel());
+ return CGAL::compare(sd(cs(s),p),sr(s));
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Oriented_side_tag,(CartesianDKernelFunctors::Oriented_side<K>),(Point_tag,Sphere_tag,Hyperplane_tag),(Value_at_tag,Hyperplane_translation_tag,Squared_distance_tag,Squared_radius_tag,Center_of_sphere_tag));
+
+namespace CartesianDKernelFunctors {
+template<class R_> struct Has_on_positive_side : private Store_kernel<R_> {
+ CGAL_FUNCTOR_INIT_STORE(Has_on_positive_side)
+ typedef R_ R;
+ typedef typename Get_type<R, Bool_tag>::type result_type;
+ typedef typename Get_functor<R, Oriented_side_tag>::type OS;
+
+ template <class Obj, class Pt>
+ result_type operator()(Obj const&o, Pt const&p)const{
+ OS os(this->kernel());
+ return os(o,p) == ON_POSITIVE_SIDE;
+ }
+};
+}
+
+CGAL_KD_DEFAULT_FUNCTOR(Has_on_positive_side_tag,(CartesianDKernelFunctors::Has_on_positive_side<K>),(),(Oriented_side_tag));
+
+}
+#include <CGAL/NewKernel_d/Coaffine.h>
+#endif // CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h
new file mode 100644
index 00000000..c25c4e2b
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h
@@ -0,0 +1,40 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_EXACTNESS_H
+#define CGAL_EXACTNESS_H
+#include <boost/mpl/has_xxx.hpp>
+#include <CGAL/tags.h>
+namespace CGAL {
+
+#define CGAL_STRAWBERRY(Is_pretty) \
+ namespace internal { \
+ BOOST_MPL_HAS_XXX_TRAIT_DEF(Is_pretty) \
+ } \
+ template<class T,bool=internal::has_##Is_pretty<T>::value> \
+ struct Is_pretty : boost::false_type {}; \
+ template<class T> \
+ struct Is_pretty<T,true> : T::Is_pretty {}
+
+CGAL_STRAWBERRY(Is_exact);
+CGAL_STRAWBERRY(Is_fast);
+CGAL_STRAWBERRY(Is_stored);
+#undef CGAL_STRAWBERRY
+}
+#endif // CGAL_EXACTNESS_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h
new file mode 100644
index 00000000..b8e17886
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h
@@ -0,0 +1,363 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_FUNCTOR_TAGS_H
+#define CGAL_FUNCTOR_TAGS_H
+#include <CGAL/tags.h> // for Null_tag
+#include <CGAL/NewKernel_d/utils.h>
+#ifdef CGAL_CXX11
+#include <type_traits>
+#include <utility>
+#endif
+#include <boost/type_traits.hpp>
+#include <boost/mpl/has_xxx.hpp>
+#include <boost/mpl/not.hpp>
+#include <boost/mpl/if.hpp>
+#include <boost/mpl/vector.hpp>
+#include <boost/mpl/empty.hpp>
+#include <boost/mpl/front.hpp>
+#include <boost/mpl/pop_front.hpp>
+namespace CGAL {
+
+ // Find a better place for this later
+
+ template <class K, class T, class=void> struct Get_type
+ : K::template Type<T> {};
+ template <class K, class F, class O=void, class=void> struct Get_functor
+ : K::template Functor<F, O> {};
+#ifdef CGAL_CXX11
+ template <class K, class T> using Type = typename Get_type<K, T>::type;
+ template <class K, class T> using Functor = typename Get_functor<K, T>::type;
+#endif
+
+ class Null_type {~Null_type();}; // no such object should be created
+
+ // To construct iterators
+ struct Begin_tag {};
+ struct End_tag {};
+
+ // Functor category
+ struct Predicate_tag {};
+ struct Construct_tag {};
+ struct Construct_iterator_tag {};
+ struct Compute_tag {};
+ struct Misc_tag {};
+
+ struct No_filter_tag {};
+
+ template<class>struct Construct_ttag {};
+ template<class>struct Convert_ttag {};
+
+ template <class K, class F, class=void, class=void> struct Get_functor_category { typedef Misc_tag type; };
+ template<class Tg, class Obj, class Base> struct Typedef_tag_type;
+ //template<class Kernel, class Tg> struct Read_tag_type {};
+
+ template<class Kernel, class Tg>
+ struct Provides_type
+ : Has_type_different_from<Get_type<Kernel, Tg>, Null_type> {};
+
+ template<class Kernel, class Tg, class O=void>
+ struct Provides_functor
+ : Has_type_different_from<Get_functor<Kernel, Tg, O>, Null_functor> {};
+
+ template<class K, class List, bool=boost::mpl::empty<List>::type::value>
+ struct Provides_functors : boost::mpl::and_ <
+ Provides_functor<K, typename boost::mpl::front<List>::type>,
+ Provides_functors<K, typename boost::mpl::pop_front<List>::type> > {};
+ template<class K, class List>
+ struct Provides_functors<K, List, true> : boost::true_type {};
+
+ template<class K, class List, bool=boost::mpl::empty<List>::type::value>
+ struct Provides_types : boost::mpl::and_ <
+ Provides_type<K, typename boost::mpl::front<List>::type>,
+ Provides_types<K, typename boost::mpl::pop_front<List>::type> > {};
+ template<class K, class List>
+ struct Provides_types<K, List, true> : boost::true_type {};
+
+ namespace internal { BOOST_MPL_HAS_XXX_TRAIT_NAMED_DEF(has_Type,template Type<Null_tag>,false) }
+ template<class Kernel, class Tg,
+ bool = internal::has_Type<Kernel>::value /* false */>
+ struct Provides_type_i : boost::false_type {};
+ template<class Kernel, class Tg>
+ struct Provides_type_i <Kernel, Tg, true>
+ : Has_type_different_from<typename Kernel::template Type<Tg>, Null_type> {};
+
+ //// This version does not like Functor<T,bool=false>
+ //namespace internal { BOOST_MPL_HAS_XXX_TEMPLATE_NAMED_DEF(has_Functor,Functor,false) }
+ // This version lets us use non-type template parameters, but fails with older EDG-based compilers (Intel 14).
+ namespace internal { BOOST_MPL_HAS_XXX_TRAIT_NAMED_DEF(has_Functor,template Functor<Null_tag>,false) }
+
+ template<class Kernel, class Tg, class O=void,
+ bool = internal::has_Functor<Kernel>::value /* false */>
+ struct Provides_functor_i : boost::false_type {};
+ template<class Kernel, class Tg, class O>
+ struct Provides_functor_i <Kernel, Tg, O, true>
+ : Has_type_different_from<typename Kernel::template Functor<Tg, O>, Null_functor> {};
+
+ // TODO: Refine this a bit.
+ template <class K, class T, class D=void,
+ //bool=Provides_functor<K,T>::value,
+ //bool=Provides_functor_i<K,T>::value,
+ bool = internal::has_Functor<K>::value
+ >
+ struct Inherit_functor : K::template Functor<T> {};
+ template <class K, class T, class D>
+ struct Inherit_functor <K, T, D, false> {};
+
+ template <class K, class T, bool=internal::has_Type<K>::value>
+ struct Inherit_type : K::template Type<T> {};
+ template <class K, class T>
+ struct Inherit_type <K, T, false> {};
+
+ struct Number_tag {};
+ struct Discrete_tag {};
+ struct Object_tag {};
+ template <class K, class T, class=void> struct Get_type_category {
+ // The lazy kernel uses it too eagerly,
+ // so it currently needs a default.
+ typedef Null_tag type;
+ };
+
+#define CGAL_DECL_OBJ_(X,Y) \
+ template<class Obj,class Base> \
+ struct Typedef_tag_type<X##_tag, Obj, Base> : Base { typedef Obj X; }; \
+ template<class K, class D> \
+ struct Get_type_category <K, X##_tag, D> { typedef Y##_tag type; }
+#define CGAL_DECL_OBJ(X,Y) struct X##_tag {}; \
+ CGAL_DECL_OBJ_(X,Y)
+
+ //namespace has_object { BOOST_MPL_HAS_XXX_TRAIT_DEF(X) }
+ //template<class Kernel>
+ //struct Provides_tag_type<Kernel, X##_tag> : has_object::has_##X<Kernel> {};
+ //template<class Kernel>
+ //struct Read_tag_type<Kernel, X##_tag> { typedef typename Kernel::X type; }
+
+ // Not exactly objects, but the extras can't hurt.
+ CGAL_DECL_OBJ(FT, Number);
+ CGAL_DECL_OBJ(RT, Number);
+
+ CGAL_DECL_OBJ(Bool, Discrete); // Boolean_tag is already taken, and is a template :-(
+ CGAL_DECL_OBJ(Comparison_result, Discrete);
+ CGAL_DECL_OBJ(Sign, Discrete);
+ CGAL_DECL_OBJ(Orientation, Discrete); // Note: duplicate with the functor tag!
+ CGAL_DECL_OBJ(Oriented_side, Discrete);
+ CGAL_DECL_OBJ(Bounded_side, Discrete);
+ CGAL_DECL_OBJ(Angle, Discrete);
+ CGAL_DECL_OBJ(Flat_orientation, Discrete);
+
+ CGAL_DECL_OBJ(Vector, Object);
+ CGAL_DECL_OBJ(Point, Object);
+ CGAL_DECL_OBJ(Segment, Object);
+ CGAL_DECL_OBJ(Sphere, Object);
+ CGAL_DECL_OBJ(Line, Object);
+ CGAL_DECL_OBJ(Direction, Object);
+ CGAL_DECL_OBJ(Hyperplane, Object);
+ CGAL_DECL_OBJ(Ray, Object);
+ CGAL_DECL_OBJ(Iso_box, Object);
+ CGAL_DECL_OBJ(Bbox, Object);
+ CGAL_DECL_OBJ(Aff_transformation, Object);
+ CGAL_DECL_OBJ(Weighted_point, Object);
+#undef CGAL_DECL_OBJ_
+#undef CGAL_DECL_OBJ
+
+// Intel fails with those, and they are not so useful.
+// CGAL_KD_DEFAULT_TYPE(RT_tag,(typename Get_type<K, FT_tag>::type),(),());
+// CGAL_KD_DEFAULT_TYPE(FT_tag,(CGAL::Quotient<typename Get_type<K, RT_tag>::type>),(),());
+
+#define CGAL_SMURF2(A,B) CGAL_KD_DEFAULT_TYPE(A##_tag,(typename Same_uncertainty_nt<B, typename Get_type<K,RT_tag>::type>::type),(RT_tag),())
+#define CGAL_SMURF1(A) CGAL_SMURF2(A,CGAL::A)
+ CGAL_SMURF2(Bool, bool);
+ CGAL_SMURF1(Sign);
+ CGAL_SMURF1(Comparison_result);
+ CGAL_SMURF1(Orientation);
+ CGAL_SMURF1(Oriented_side);
+ CGAL_SMURF1(Bounded_side);
+ CGAL_SMURF1(Angle);
+#undef CGAL_SMURF1
+#undef CGAL_SMURF2
+
+ // TODO: replace with Get_type_category
+ template<class> struct is_NT_tag { enum { value = false }; };
+ template<> struct is_NT_tag<FT_tag> { enum { value = true }; };
+ template<> struct is_NT_tag<RT_tag> { enum { value = true }; };
+
+ template<class> struct iterator_tag_traits {
+ enum { is_iterator = false, has_nth_element = false };
+ typedef Null_tag value_tag;
+ };
+
+#define CGAL_DECL_COMPUTE(X) struct X##_tag {}; \
+ template<class A,class B,class C>struct Get_functor_category<A,X##_tag,B,C>{typedef Compute_tag type;}
+ CGAL_DECL_COMPUTE(Compute_point_cartesian_coordinate);
+ CGAL_DECL_COMPUTE(Compute_vector_cartesian_coordinate);
+ CGAL_DECL_COMPUTE(Compute_homogeneous_coordinate);
+ CGAL_DECL_COMPUTE(Squared_distance);
+ CGAL_DECL_COMPUTE(Squared_distance_to_origin);
+ CGAL_DECL_COMPUTE(Squared_length);
+ CGAL_DECL_COMPUTE(Squared_radius);
+ CGAL_DECL_COMPUTE(Squared_circumradius);
+ CGAL_DECL_COMPUTE(Scalar_product);
+ CGAL_DECL_COMPUTE(Hyperplane_translation);
+ CGAL_DECL_COMPUTE(Value_at);
+ CGAL_DECL_COMPUTE(Point_weight);
+ CGAL_DECL_COMPUTE(Power_distance);
+ CGAL_DECL_COMPUTE(Power_distance_to_point);
+#undef CGAL_DECL_COMPUTE
+
+#define CGAL_DECL_ITER_OBJ(X,Y,Z,C) struct X##_tag {}; \
+ template<>struct iterator_tag_traits<X##_tag> { \
+ enum { is_iterator = true, has_nth_element = true }; \
+ typedef Y##_tag value_tag; \
+ typedef Z##_tag nth_element; \
+ typedef C##_tag container; \
+ }; \
+ template<class Obj,class Base> \
+ struct Typedef_tag_type<X##_tag, Obj, Base> : Base { typedef Obj X; }
+
+ //namespace has_object { BOOST_MPL_HAS_XXX_TRAIT_DEF(X) }
+ //template<class Kernel>
+ //struct Provides_tag_type<Kernel, X##_tag> : has_object::has_##X<Kernel> {};
+ //template<class Kernel>
+ //struct Read_tag_type<Kernel, X##_tag> { typedef typename Kernel::X type; }
+
+ CGAL_DECL_ITER_OBJ(Vector_cartesian_const_iterator, FT, Compute_vector_cartesian_coordinate, Vector);
+ CGAL_DECL_ITER_OBJ(Point_cartesian_const_iterator, FT, Compute_point_cartesian_coordinate, Point);
+#undef CGAL_DECL_ITER_OBJ
+
+ template<class>struct map_result_tag{typedef Null_type type;};
+ template<class T>struct map_result_tag<Construct_ttag<T> >{typedef T type;};
+
+ template<class A,class T,class B,class C>struct Get_functor_category<A,Construct_ttag<T>,B,C> :
+ boost::mpl::if_c<iterator_tag_traits<T>::is_iterator,
+ Construct_iterator_tag,
+ Construct_tag> {};
+
+ // Really?
+ template<class A,class T,class B,class C>struct Get_functor_category<A,Convert_ttag<T>,B,C>{typedef Misc_tag type;};
+
+#define CGAL_DECL_CONSTRUCT(X,Y) struct X##_tag {}; \
+ template<>struct map_result_tag<X##_tag>{typedef Y##_tag type;}; \
+ template<class A,class B,class C>struct Get_functor_category<A,X##_tag,B,C>{typedef Construct_tag type;}
+ CGAL_DECL_CONSTRUCT(Midpoint,Point);
+ CGAL_DECL_CONSTRUCT(Center_of_sphere,Point);
+ CGAL_DECL_CONSTRUCT(Point_of_sphere,Point);
+ CGAL_DECL_CONSTRUCT(Segment_extremity,Point);
+ CGAL_DECL_CONSTRUCT(Sum_of_vectors,Vector);
+ CGAL_DECL_CONSTRUCT(Difference_of_vectors,Vector);
+ CGAL_DECL_CONSTRUCT(Opposite_vector,Vector);
+ CGAL_DECL_CONSTRUCT(Scaled_vector,Vector);
+ CGAL_DECL_CONSTRUCT(Orthogonal_vector,Vector);
+ CGAL_DECL_CONSTRUCT(Difference_of_points,Vector);
+ CGAL_DECL_CONSTRUCT(Translated_point,Point);
+ CGAL_DECL_CONSTRUCT(Point_to_vector,Vector);
+ CGAL_DECL_CONSTRUCT(Vector_to_point,Point);
+ CGAL_DECL_CONSTRUCT(Construct_min_vertex,Point);
+ CGAL_DECL_CONSTRUCT(Construct_max_vertex,Point);
+ CGAL_DECL_CONSTRUCT(Construct_circumcenter,Point);
+ CGAL_DECL_CONSTRUCT(Point_drop_weight,Point);
+ CGAL_DECL_CONSTRUCT(Power_center,Weighted_point);
+#undef CGAL_DECL_CONSTRUCT
+#if 0
+#define CGAL_DECL_ITER_CONSTRUCT(X,Y) struct X##_tag {}; \
+ template<>struct map_result_tag<X##_tag>{typedef Y##_tag type;}; \
+ template<>struct map_functor_type<X##_tag>{typedef Construct_iterator_tag type;}
+ CGAL_DECL_ITER_CONSTRUCT(Construct_point_cartesian_const_iterator,Point_cartesian_const_iterator);
+ CGAL_DECL_ITER_CONSTRUCT(Construct_vector_cartesian_const_iterator,Vector_cartesian_const_iterator);
+#undef CGAL_DECL_ITER_CONSTRUCT
+#endif
+
+ //FIXME: choose a convention: prefix with Predicate_ ?
+#define CGAL_DECL_PREDICATE_(X) \
+ template<class A,class B,class C>struct Get_functor_category<A,X##_tag,B,C>{typedef Predicate_tag type;}
+#define CGAL_DECL_PREDICATE(X) struct X##_tag {}; \
+ CGAL_DECL_PREDICATE_(X)
+ CGAL_DECL_PREDICATE(Less_point_cartesian_coordinate);
+ CGAL_DECL_PREDICATE(Compare_point_cartesian_coordinate);
+ CGAL_DECL_PREDICATE(Compare_distance);
+ CGAL_DECL_PREDICATE(Compare_lexicographically);
+ CGAL_DECL_PREDICATE(Less_lexicographically);
+ CGAL_DECL_PREDICATE(Less_or_equal_lexicographically);
+ CGAL_DECL_PREDICATE(Equal_points);
+ CGAL_DECL_PREDICATE(Has_on_positive_side);
+ CGAL_DECL_PREDICATE_(Orientation); // duplicate with the type
+ CGAL_DECL_PREDICATE_(Oriented_side); // duplicate with the type
+ CGAL_DECL_PREDICATE(Orientation_of_points);
+ CGAL_DECL_PREDICATE(Orientation_of_vectors);
+ CGAL_DECL_PREDICATE(Side_of_oriented_sphere);
+ CGAL_DECL_PREDICATE(Side_of_bounded_sphere);
+ CGAL_DECL_PREDICATE(Side_of_bounded_circumsphere);
+ CGAL_DECL_PREDICATE(Contained_in_affine_hull);
+ CGAL_DECL_PREDICATE(In_flat_orientation);
+ CGAL_DECL_PREDICATE(In_flat_side_of_oriented_sphere);
+ CGAL_DECL_PREDICATE(Construct_flat_orientation); // Making it a predicate is a questionable choice, it should be possible to let it be a construction for some implementations. Not sure how to do that... TODO
+ CGAL_DECL_PREDICATE(Linear_rank);
+ CGAL_DECL_PREDICATE(Affine_rank);
+ CGAL_DECL_PREDICATE(Linearly_independent);
+ CGAL_DECL_PREDICATE(Affinely_independent);
+ CGAL_DECL_PREDICATE(Contained_in_linear_hull);
+ CGAL_DECL_PREDICATE(Contained_in_simplex);
+ CGAL_DECL_PREDICATE(Power_side_of_power_sphere_raw);
+ CGAL_DECL_PREDICATE(Power_side_of_power_sphere);
+ CGAL_DECL_PREDICATE(In_flat_power_side_of_power_sphere_raw);
+ CGAL_DECL_PREDICATE(In_flat_power_side_of_power_sphere);
+#undef CGAL_DECL_PREDICATE
+
+#define CGAL_DECL_MISC(X) struct X##_tag {}; \
+ template<class A,class B,class C>struct Get_functor_category<A,X##_tag,B,C>{typedef Misc_tag type;}
+ //TODO: split into _begin and _end ?
+ //CGAL_DECL_MISC(Construct_point_cartesian_const_iterator);
+ //CGAL_DECL_MISC(Construct_vector_cartesian_const_iterator);
+ CGAL_DECL_MISC(Point_dimension);
+ CGAL_DECL_MISC(Vector_dimension);
+ CGAL_DECL_MISC(Linear_base); // Find a more appropriate category?
+#undef CGAL_DECL_MISC
+
+
+ // Properties for LA
+ struct Has_extra_dimension_tag {};
+ struct Has_vector_plus_minus_tag {};
+ struct Has_vector_scalar_ops_tag {};
+ struct Has_dot_product_tag {};
+ struct Has_determinant_of_vectors_tag {};
+ struct Has_determinant_of_points_tag {};
+ struct Has_determinant_of_iterator_to_vectors_tag {};
+ struct Has_determinant_of_iterator_to_points_tag {};
+ struct Has_determinant_of_vectors_omit_last_tag {};
+ struct Stores_squared_norm_tag {};
+
+ template<class> struct Preserved_by_non_linear_extra_coordinate
+ : boost::false_type {};
+ template<> struct Preserved_by_non_linear_extra_coordinate
+ <Has_extra_dimension_tag> : boost::true_type {};
+ template<> struct Preserved_by_non_linear_extra_coordinate
+ <Has_determinant_of_vectors_tag> : boost::true_type {};
+ template<> struct Preserved_by_non_linear_extra_coordinate
+ <Has_determinant_of_points_tag> : boost::true_type {};
+ template<> struct Preserved_by_non_linear_extra_coordinate
+ <Has_determinant_of_iterator_to_vectors_tag> : boost::true_type {};
+ template<> struct Preserved_by_non_linear_extra_coordinate
+ <Has_determinant_of_iterator_to_points_tag> : boost::true_type {};
+ template<> struct Preserved_by_non_linear_extra_coordinate
+ <Has_determinant_of_vectors_omit_last_tag> : boost::true_type {};
+
+ // Kernel properties
+ struct Point_stores_squared_distance_to_origin_tag {};
+
+}
+#endif // CGAL_FUNCTOR_TAGS_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h
new file mode 100644
index 00000000..21858804
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h
@@ -0,0 +1,61 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_STATIC_INT_H
+#define CGAL_STATIC_INT_H
+#include <CGAL/constant.h>
+
+namespace CGAL {
+template <class NT> struct static_zero {
+ operator NT() const { return constant<NT,0>(); }
+};
+template <class NT> struct static_one {
+ operator NT() const { return constant<NT,1>(); }
+};
+
+template <class NT> static_zero<NT> operator-(static_zero<NT>) { return static_zero<NT>(); }
+
+template <class NT> NT operator+(NT const& x, static_zero<NT>) { return x; }
+template <class NT> NT operator+(static_zero<NT>, NT const& x) { return x; }
+template <class NT> static_zero<NT> operator+(static_zero<NT>, static_zero<NT>) { return static_zero<NT>(); }
+template <class NT> static_one<NT> operator+(static_zero<NT>, static_one<NT>) { return static_one<NT>(); }
+template <class NT> static_one<NT> operator+(static_one<NT>, static_zero<NT>) { return static_one<NT>(); }
+
+template <class NT> NT operator-(NT const& x, static_zero<NT>) { return x; }
+template <class NT> NT operator-(static_zero<NT>, NT const& x) { return -x; }
+template <class NT> static_zero<NT> operator-(static_zero<NT>, static_zero<NT>) { return static_zero<NT>(); }
+template <class NT> static_zero<NT> operator-(static_one<NT>, static_one<NT>) { return static_zero<NT>(); }
+template <class NT> static_one<NT> operator-(static_one<NT>, static_zero<NT>) { return static_one<NT>(); }
+
+template <class NT> NT operator*(NT const& x, static_one<NT>) { return x; }
+template <class NT> NT operator*(static_one<NT>, NT const& x) { return x; }
+template <class NT> static_zero<NT> operator*(NT const&, static_zero<NT>) { return static_zero<NT>(); }
+template <class NT> static_zero<NT> operator*(static_zero<NT>, NT const&) { return static_zero<NT>(); }
+template <class NT> static_zero<NT> operator*(static_zero<NT>, static_zero<NT>) { return static_zero<NT>(); }
+template <class NT> static_one<NT> operator*(static_one<NT>, static_one<NT>) { return static_one<NT>(); }
+template <class NT> static_zero<NT> operator*(static_zero<NT>, static_one<NT>) { return static_zero<NT>(); }
+template <class NT> static_zero<NT> operator*(static_one<NT>, static_zero<NT>) { return static_zero<NT>(); }
+
+template <class NT> NT operator/(NT const& x, static_one<NT>) { return x; }
+template <class NT> static_zero<NT> operator/(static_zero<NT>, NT const&) { return static_zero<NT>(); }
+template <class NT> static_zero<NT> operator/(static_zero<NT>, static_one<NT>) { return static_zero<NT>(); }
+template <class NT> static_one<NT> operator/(static_one<NT>, static_one<NT>) { return static_one<NT>(); }
+
+}
+#endif // CGAL_STATIC_INT_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h
new file mode 100644
index 00000000..253e1282
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h
@@ -0,0 +1,104 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_STORE_KERNEL_H
+#define CGAL_STORE_KERNEL_H
+
+#include <CGAL/assertions.h>
+#include <boost/type_traits/is_empty.hpp>
+
+namespace CGAL {
+namespace internal {
+BOOST_MPL_HAS_XXX_TRAIT_DEF(Do_not_store_kernel)
+template<class T,bool=boost::is_empty<T>::value,bool=has_Do_not_store_kernel<T>::value> struct Do_not_store_kernel {
+ enum { value=false };
+ typedef Tag_false type;
+};
+template<class T> struct Do_not_store_kernel<T,true,false> {
+ enum { value=true };
+ typedef Tag_true type;
+};
+template<class T,bool b> struct Do_not_store_kernel<T,b,true> {
+ typedef typename T::Do_not_store_kernel type;
+ enum { value=type::value };
+};
+}
+
+template<class R_,bool=internal::Do_not_store_kernel<R_>::value>
+struct Store_kernel {
+ Store_kernel(){}
+ Store_kernel(R_ const&){}
+ enum { kernel_is_stored = false };
+ R_ kernel()const{return R_();}
+ typedef R_ reference_type;
+ void set_kernel(R_ const&){}
+};
+template<class R_>
+struct Store_kernel<R_,false> {
+ Store_kernel():rp(0){
+ CGAL_warning_msg(true,"I should know my kernel");
+ }
+ Store_kernel(R_ const& r):rp(&r){}
+ enum { kernel_is_stored = true };
+ R_ const& kernel()const{
+ CGAL_warning_msg(rp!=0,"I should know my kernel");
+ return *rp;
+ }
+ typedef R_ const& reference_type;
+ void set_kernel(R_ const&r){rp=&r;}
+ private:
+ R_ const* rp;
+};
+
+//For a second kernel. TODO: find something more elegant
+template<class R_,bool=internal::Do_not_store_kernel<R_>::value>
+struct Store_kernel2 {
+ Store_kernel2(){}
+ Store_kernel2(R_ const&){}
+ enum { kernel2_is_stored = false };
+ R_ kernel2()const{return R_();}
+ typedef R_ reference2_type;
+ void set_kernel2(R_ const&){}
+};
+template<class R_>
+struct Store_kernel2<R_,false> {
+ Store_kernel2(){
+ //CGAL_warning_msg(true,"I should know my kernel");
+ }
+ Store_kernel2(R_ const& r):rp(&r){}
+ enum { kernel2_is_stored = true };
+ R_ const& kernel2()const{
+ CGAL_warning_msg(rp==0,"I should know my kernel");
+ return *rp;
+ }
+ typedef R_ const& reference2_type;
+ void set_kernel2(R_ const&r){rp=&r;}
+ private:
+ R_ const* rp;
+};
+}
+#define CGAL_BASE_INIT(X,Y) \
+ X():Y(){} \
+ X(R_ const&r):Y(r){}
+#define CGAL_FUNCTOR_INIT_STORE(X) CGAL_BASE_INIT(X,Store_kernel<R_>)
+#define CGAL_FUNCTOR_INIT_IGNORE(X) \
+ X(){} \
+ X(R_ const&){}
+
+#endif // CGAL_STORE_KERNEL_H
diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h
new file mode 100644
index 00000000..238a2230
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h
@@ -0,0 +1,306 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_MARCUTILS
+#define CGAL_MARCUTILS
+
+#include <CGAL/config.h>
+
+#if defined(BOOST_MSVC)
+# pragma warning(push)
+# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I'
+ // http://lists.boost.org/boost-users/2014/11/83291.php
+#endif
+
+#ifdef CGAL_CXX11
+#include <type_traits>
+#include <utility>
+#define CGAL_FORWARDABLE(T) T&&
+#define CGAL_FORWARD(T,t) std::forward<T>(t)
+#define CGAL_MOVE(t) std::move(t)
+#define CGAL_CONSTEXPR constexpr
+#else
+#define CGAL_FORWARDABLE(T) T const&
+#define CGAL_FORWARD(T,t) t
+#define CGAL_MOVE(t) t
+#define CGAL_CONSTEXPR
+#endif
+#include <boost/utility/enable_if.hpp>
+#include <boost/preprocessor/repetition.hpp>
+#include <CGAL/Rational_traits.h>
+#include <CGAL/tuple.h>
+#include <boost/mpl/has_xxx.hpp>
+#include <boost/mpl/not.hpp>
+#include <boost/type_traits.hpp>
+
+#ifdef CGAL_CXX11
+#define CGAL_BOOSTD std::
+#else
+#define CGAL_BOOSTD boost::
+#endif
+
+namespace CGAL {
+namespace internal {
+ BOOST_MPL_HAS_XXX_TRAIT_DEF(type)
+}
+
+template <class T, class No, bool=internal::has_type<T>::value /*false*/>
+struct Has_type_different_from : boost::false_type {};
+template <class T, class No>
+struct Has_type_different_from <T, No, true>
+: boost::mpl::not_<boost::is_same<typename T::type, No> > {};
+
+
+ template <class T> struct Wrap_type { typedef T type; };
+
+ // tell a function f(a,b,c) that its real argument is a(b,c)
+ struct Eval_functor {};
+
+ // forget the first argument. Useful to make something dependant
+ // (and thus usable in SFINAE), although that's not a great design.
+ template<class A,class B> struct Second_arg {
+ typedef B type;
+ };
+
+ // like std::forward, except for basic types where it does a cast, to
+ // avoid issues with narrowing conversions
+#ifdef CGAL_CXX11
+ template<class T,class U,class V> inline
+ typename std::conditional<std::is_arithmetic<T>::value&&std::is_arithmetic<typename std::remove_reference<U>::type>::value,T,U&&>::type
+ forward_safe(V&& u) { return std::forward<U>(u); }
+#else
+ template<class T,class U> inline U const& forward_safe(U const& u) {
+ return u;
+ }
+#endif
+
+#ifdef CGAL_CXX11
+ template<class...> struct Constructible_from_each;
+ template<class To,class From1,class...From> struct Constructible_from_each<To,From1,From...>{
+ enum { value=std::is_convertible<From1,To>::value&&Constructible_from_each<To,From...>::value };
+ };
+ template<class To> struct Constructible_from_each<To>{
+ enum { value=true };
+ };
+#else
+// currently only used in C++0X code
+#endif
+
+ template<class T> struct Scale {
+#ifndef CGAL_CXX11
+ template<class> struct result;
+ template<class FT> struct result<Scale(FT)> {
+ typedef FT type;
+ };
+#endif
+ T const& scale;
+ Scale(T const& t):scale(t){}
+ template<class FT>
+#ifdef CGAL_CXX11
+ auto operator()(FT&& x)const->decltype(scale*std::forward<FT>(x))
+#else
+ FT operator()(FT const& x)const
+#endif
+ {
+ return scale*CGAL_FORWARD(FT,x);
+ }
+ };
+ template<class NT,class T> struct Divide {
+#if !defined(CGAL_CXX11) || !defined(BOOST_RESULT_OF_USE_DECLTYPE)
+ // requires boost > 1.44
+ // shouldn't be needed with C++0X
+ //template<class> struct result;
+ //template<class FT> struct result<Divide(FT)> {
+ // typedef FT type;
+ //};
+ typedef NT result_type;
+#endif
+ T const& scale;
+ Divide(T const& t):scale(t){}
+ template<class FT>
+#ifdef CGAL_CXX11
+ //FIXME: gcc complains for Gmpq
+ //auto operator()(FT&& x)const->decltype(Rational_traits<NT>().make_rational(std::forward<FT>(x),scale))
+ NT operator()(FT&& x)const
+#else
+ NT operator()(FT const& x)const
+#endif
+ {
+ return Rational_traits<NT>().
+ make_rational(CGAL_FORWARD(FT,x),scale);
+ }
+ };
+
+ template <class NT> struct has_cheap_constructor : boost::is_arithmetic<NT>{};
+ template <bool p> struct has_cheap_constructor<Interval_nt<p> > {
+ enum { value=true };
+ };
+
+ // like std::multiplies but allows mixing types
+ // in C++11 in doesn't need to be a template
+ template < class Ret >
+ struct multiplies {
+ template<class A,class B>
+#ifdef CGAL_CXX11
+ auto operator()(A&&a,B&&b)const->decltype(std::forward<A>(a)*std::forward<B>(b))
+#else
+ Ret operator()(A const& a, B const& b)const
+#endif
+ {
+ return CGAL_FORWARD(A,a)*CGAL_FORWARD(B,b);
+ }
+ };
+ template < class Ret >
+ struct division {
+ template<class A,class B>
+#ifdef CGAL_CXX11
+ auto operator()(A&&a,B&&b)const->decltype(std::forward<A>(a)/std::forward<B>(b))
+#else
+ Ret operator()(A const& a, B const& b)const
+#endif
+ {
+ return CGAL_FORWARD(A,a)/CGAL_FORWARD(B,b);
+ }
+ };
+
+#ifdef CGAL_CXX11
+ using std::decay;
+#else
+ template<class T> struct decay : boost::remove_cv<typename boost::decay<T>::type> {};
+#endif
+
+ template<class T,class U> struct Type_copy_ref { typedef U type; };
+ template<class T,class U> struct Type_copy_ref<T&,U> { typedef U& type; };
+#ifdef CGAL_CXX11
+ template<class T,class U> struct Type_copy_ref<T&&,U> { typedef U&& type; };
+#endif
+ template<class T,class U> struct Type_copy_cv { typedef U type; };
+ template<class T,class U> struct Type_copy_cv<T const,U> { typedef U const type; };
+ template<class T,class U> struct Type_copy_cv<T volatile,U> { typedef U volatile type; };
+ template<class T,class U> struct Type_copy_cv<T const volatile,U> { typedef U const volatile type; };
+
+ template<class T,class U> struct Type_copy_cvref :
+ Type_copy_ref<T,typename Type_copy_cv<typename boost::remove_reference<T>::type,U>::type> {};
+
+ struct Dereference_functor {
+ template<class> struct result{};
+ template<class It> struct result<Dereference_functor(It)> {
+ typedef typename std::iterator_traits<It>::reference type;
+ };
+ template<class It> typename result<Dereference_functor(It)>::type
+ operator()(It const&i)const{
+ return *i;
+ }
+ };
+
+#ifdef CGAL_CXX11
+ template<int...> struct Indices{};
+ template<class> struct Next_increasing_indices;
+ template<int...I> struct Next_increasing_indices<Indices<I...> > {
+ typedef Indices<I...,sizeof...(I)> type;
+ };
+ template<int N> struct N_increasing_indices {
+ typedef typename Next_increasing_indices<typename N_increasing_indices<N-1>::type>::type type;
+ };
+ template<> struct N_increasing_indices<0> { typedef Indices<> type; };
+ namespace internal {
+ template<class F,class...U,int...I> inline typename std::result_of<F&&(U...)>::type
+ do_call_on_tuple_elements(F&&f, std::tuple<U...>&&t, Indices<I...>&&) {
+ return f(std::get<I>(std::move(t))...);
+ }
+ } // internal
+ template<class/*result type, ignored*/,class F,class...U>
+ inline typename std::result_of<F&&(U...)>::type
+ call_on_tuple_elements(F&&f, std::tuple<U...>&&t) {
+ return internal::do_call_on_tuple_elements(std::forward<F>(f),std::move(t),
+ typename N_increasing_indices<sizeof...(U)>::type());
+ }
+#else
+#define CGAL_VAR(Z,N,_) cpp0x::get<N>(t)
+#define CGAL_CODE(Z,N,_) template<class Res, class F BOOST_PP_COMMA_IF(N) BOOST_PP_ENUM_PARAMS(N,class U)> \
+ inline Res call_on_tuple_elements(F const&f, \
+ cpp0x::tuple<BOOST_PP_ENUM_PARAMS(N,U)> const&t) { \
+ return f(BOOST_PP_ENUM(N,CGAL_VAR,)); \
+ }
+ template<class Res, class F>
+ inline Res call_on_tuple_elements(F const&f, cpp0x::tuple<>) {
+ return f();
+ }
+BOOST_PP_REPEAT_FROM_TO(1, 8, CGAL_CODE, _ )
+#undef CGAL_CODE
+#undef CGAL_VAR
+#endif
+
+ template<class A> struct Factory {
+ typedef A result_type;
+#ifdef CGAL_CXX11
+ template<class...U> result_type operator()(U&&...u)const{
+ return A(std::forward<U>(u)...);
+ }
+#else
+ result_type operator()()const{
+ return A();
+ }
+#define CGAL_CODE(Z,N,_) template<BOOST_PP_ENUM_PARAMS(N,class U)> \
+ result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \
+ return A(BOOST_PP_ENUM_PARAMS(N,u)); \
+ }
+BOOST_PP_REPEAT_FROM_TO(1, 8, CGAL_CODE, _ )
+#undef CGAL_CODE
+#endif
+ };
+}
+
+// TODO: make a Cartesian-only variant
+// WARNING: do not use the Req* parameters too much, they can cause circular instanciations and are only useful for dispatching.
+#define CGAL_STRIP_PAREN_(...) __VA_ARGS__
+#define CGAL_STRIP_PAREN(...) CGAL_STRIP_PAREN_ __VA_ARGS__
+// What to do with O? pass it down to other functors or drop it?
+#define CGAL_KD_DEFAULT_FUNCTOR(Tg,Name,ReqTyp,ReqFun) \
+ template <class K, class O> \
+ struct Get_functor<K, Tg, O, \
+ typename boost::mpl::if_c< \
+ Provides_functor_i<K, Tg, O>::value \
+ || !Provides_types<K, boost::mpl::vector<CGAL_STRIP_PAREN_ ReqTyp> >::value \
+ || !Provides_functors<K, boost::mpl::vector<CGAL_STRIP_PAREN_ ReqFun> >::value \
+ , int, void>::type> \
+ { \
+ typedef CGAL_STRIP_PAREN_ Name type; \
+ typedef K Bound_kernel; \
+ }
+
+// Not used yet, may need some changes.
+#define CGAL_KD_DEFAULT_TYPE(Tg,Name,ReqTyp,ReqFun) \
+ template <class K> \
+ struct Get_type<K, Tg, \
+ typename boost::mpl::if_c< \
+ Provides_type_i<K, Tg>::value \
+ || !Provides_types<K, boost::mpl::vector<CGAL_STRIP_PAREN_ ReqTyp> >::value \
+ || !Provides_functors<K, boost::mpl::vector<CGAL_STRIP_PAREN_ ReqFun> >::value \
+ , int, void>::type> \
+ { \
+ typedef CGAL_STRIP_PAREN_ Name type; \
+ typedef K Bound_kernel; \
+ }
+
+#if defined(BOOST_MSVC)
+# pragma warning(pop)
+#endif
+
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h b/src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h
new file mode 100644
index 00000000..e29ce14f
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Orthogonal_incremental_neighbor_search.h
@@ -0,0 +1,620 @@
+// Copyright (c) 2002,2011 Utrecht University (The Netherlands).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+//
+// Author(s) : Hans Tangelder (<hanst@cs.uu.nl>)
+
+#ifndef CGAL_ORTHOGONAL_INCREMENTAL_NEIGHBOR_SEARCH
+#define CGAL_ORTHOGONAL_INCREMENTAL_NEIGHBOR_SEARCH
+
+#include <CGAL/Kd_tree.h>
+#include <cstring>
+#include <list>
+#include <queue>
+#include <memory>
+#include <CGAL/Euclidean_distance.h>
+#include <CGAL/tuple.h>
+
+namespace CGAL {
+
+ template <class SearchTraits,
+ class Distance_= typename internal::Spatial_searching_default_distance<SearchTraits>::type,
+ class Splitter_ = Sliding_midpoint<SearchTraits>,
+ class Tree_= Kd_tree<SearchTraits, Splitter_, Tag_true> >
+ class Orthogonal_incremental_neighbor_search {
+
+ public:
+ typedef Splitter_ Splitter;
+ typedef Tree_ Tree;
+ typedef Distance_ Distance;
+ typedef typename SearchTraits::Point_d Point_d;
+ typedef typename Distance::Query_item Query_item;
+ typedef typename SearchTraits::FT FT;
+ typedef typename Tree::Point_d_iterator Point_d_iterator;
+ typedef typename Tree::Node_const_handle Node_const_handle;
+
+ typedef std::pair<Point_d,FT> Point_with_transformed_distance;
+ typedef CGAL::cpp11::tuple<Node_const_handle,FT,std::vector<FT> > Node_with_distance;
+ typedef std::vector<Node_with_distance*> Node_with_distance_vector;
+ typedef std::vector<Point_with_transformed_distance*> Point_with_transformed_distance_vector;
+
+ template<class T>
+ struct Object_wrapper
+ {
+ T object;
+ Object_wrapper(const T& t):object(t){}
+ const T& operator* () const { return object; }
+ const T* operator-> () const { return &object; }
+ };
+
+ class Iterator_implementation {
+ SearchTraits traits;
+ public:
+
+ int number_of_neighbours_computed;
+ int number_of_internal_nodes_visited;
+ int number_of_leaf_nodes_visited;
+ int number_of_items_visited;
+
+ private:
+
+ typedef std::vector<FT> Distance_vector;
+
+ Distance_vector dists;
+
+ Distance Orthogonal_distance_instance;
+
+ FT multiplication_factor;
+
+ Query_item query_point;
+
+ FT distance_to_root;
+
+ bool search_nearest_neighbour;
+
+ FT rd;
+
+
+ class Priority_higher {
+ public:
+
+ bool search_nearest;
+
+ Priority_higher(bool search_the_nearest_neighbour)
+ : search_nearest(search_the_nearest_neighbour)
+ {}
+
+ //highest priority is smallest distance
+ bool
+ operator() (Node_with_distance* n1, Node_with_distance* n2) const
+ {
+ return (search_nearest) ? (CGAL::cpp11::get<1>(*n1) > CGAL::cpp11::get<1>(*n2)) : (CGAL::cpp11::get<1>(*n2) > CGAL::cpp11::get<1>(*n1));
+ }
+ };
+
+ class Distance_smaller {
+
+ public:
+
+ bool search_nearest;
+
+ Distance_smaller(bool search_the_nearest_neighbour)
+ : search_nearest(search_the_nearest_neighbour)
+ {}
+
+ //highest priority is smallest distance
+ bool operator() (Point_with_transformed_distance* p1, Point_with_transformed_distance* p2) const
+ {
+ return (search_nearest) ? (p1->second > p2->second) : (p2->second > p1->second);
+ }
+ };
+
+
+ std::priority_queue<Node_with_distance*, Node_with_distance_vector,
+ Priority_higher> PriorityQueue;
+
+ public:
+ std::priority_queue<Point_with_transformed_distance*, Point_with_transformed_distance_vector,
+ Distance_smaller> Item_PriorityQueue;
+
+
+ public:
+
+ int reference_count;
+
+
+
+ // constructor
+ Iterator_implementation(const Tree& tree,const Query_item& q, const Distance& tr,
+ FT Eps=FT(0.0), bool search_nearest=true)
+ : traits(tree.traits()),number_of_neighbours_computed(0), number_of_internal_nodes_visited(0),
+ number_of_leaf_nodes_visited(0), number_of_items_visited(0),
+ Orthogonal_distance_instance(tr), multiplication_factor(Orthogonal_distance_instance.transformed_distance(FT(1.0)+Eps)),
+ query_point(q), search_nearest_neighbour(search_nearest),
+ PriorityQueue(Priority_higher(search_nearest)), Item_PriorityQueue(Distance_smaller(search_nearest)),
+ reference_count(1)
+
+
+ {
+ if (tree.empty()) return;
+
+ typename SearchTraits::Construct_cartesian_const_iterator_d ccci=traits.construct_cartesian_const_iterator_d_object();
+ int dim = static_cast<int>(std::distance(ccci(q), ccci(q,0)));
+
+ dists.resize(dim);
+ for(int i=0 ; i<dim ; ++i){
+ dists[i] = 0;
+ }
+
+ if (search_nearest){
+ distance_to_root=
+ Orthogonal_distance_instance.min_distance_to_rectangle(q, tree.bounding_box(),dists);
+ Node_with_distance *The_Root = new Node_with_distance(tree.root(),
+ distance_to_root, dists);
+ PriorityQueue.push(The_Root);
+
+ // rd is the distance of the top of the priority queue to q
+ rd=CGAL::cpp11::get<1>(*The_Root);
+ Compute_the_next_nearest_neighbour();
+ }
+ else{
+ distance_to_root=
+ Orthogonal_distance_instance.max_distance_to_rectangle(q,
+ tree.bounding_box(), dists);
+ Node_with_distance *The_Root = new Node_with_distance(tree.root(),
+ distance_to_root, dists);
+ PriorityQueue.push(The_Root);
+
+ // rd is the distance of the top of the priority queue to q
+ rd=CGAL::cpp11::get<1>(*The_Root);
+ Compute_the_next_furthest_neighbour();
+ }
+
+
+ }
+
+ // * operator
+ const Point_with_transformed_distance&
+ operator* () const
+ {
+ return *(Item_PriorityQueue.top());
+ }
+
+ // prefix operator
+ Iterator_implementation&
+ operator++()
+ {
+ Delete_the_current_item_top();
+ if(search_nearest_neighbour)
+ Compute_the_next_nearest_neighbour();
+ else
+ Compute_the_next_furthest_neighbour();
+ return *this;
+ }
+
+ // postfix operator
+ Object_wrapper<Point_with_transformed_distance>
+ operator++(int)
+ {
+ Object_wrapper<Point_with_transformed_distance> result( *(Item_PriorityQueue.top()) );
+ ++*this;
+ return result;
+ }
+
+ // Print statistics of the general priority search process.
+ std::ostream&
+ statistics (std::ostream& s) const {
+ s << "Orthogonal priority search statistics:"
+ << std::endl;
+ s << "Number of internal nodes visited:"
+ << number_of_internal_nodes_visited << std::endl;
+ s << "Number of leaf nodes visited:"
+ << number_of_leaf_nodes_visited << std::endl;
+ s << "Number of items visited:"
+ << number_of_items_visited << std::endl;
+ s << "Number of neighbours computed:"
+ << number_of_neighbours_computed << std::endl;
+ return s;
+ }
+
+
+ //destructor
+ ~Iterator_implementation()
+ {
+ while (!PriorityQueue.empty()) {
+ Node_with_distance* The_top=PriorityQueue.top();
+ PriorityQueue.pop();
+ delete The_top;
+ }
+ while (!Item_PriorityQueue.empty()) {
+ Point_with_transformed_distance* The_top=Item_PriorityQueue.top();
+ Item_PriorityQueue.pop();
+ delete The_top;
+ }
+ }
+
+ private:
+
+ void
+ Delete_the_current_item_top()
+ {
+ Point_with_transformed_distance* The_item_top=Item_PriorityQueue.top();
+ Item_PriorityQueue.pop();
+ delete The_item_top;
+ }
+
+ void
+ Compute_the_next_nearest_neighbour()
+ {
+ // compute the next item
+ bool next_neighbour_found=false;
+ if (!(Item_PriorityQueue.empty())) {
+ next_neighbour_found=
+ (multiplication_factor*rd > Item_PriorityQueue.top()->second);
+ }
+ typename SearchTraits::Construct_cartesian_const_iterator_d construct_it=traits.construct_cartesian_const_iterator_d_object();
+ typename SearchTraits::Cartesian_const_iterator_d query_point_it = construct_it(query_point);
+ // otherwise browse the tree further
+ while ((!next_neighbour_found) && (!PriorityQueue.empty())) {
+ Node_with_distance* The_node_top=PriorityQueue.top();
+ Node_const_handle N= CGAL::cpp11::get<0>(*The_node_top);
+ dists = CGAL::cpp11::get<2>(*The_node_top);
+ PriorityQueue.pop();
+ delete The_node_top;
+ FT copy_rd=rd;
+ while (!(N->is_leaf())) { // compute new distance
+ typename Tree::Internal_node_const_handle node =
+ static_cast<typename Tree::Internal_node_const_handle>(N);
+ number_of_internal_nodes_visited++;
+ int new_cut_dim=node->cutting_dimension();
+ FT new_rd,dst = dists[new_cut_dim];
+ FT val = *(query_point_it + new_cut_dim);
+ FT diff1 = val - node->upper_low_value();
+ FT diff2 = val - node->lower_high_value();
+ if (diff1 + diff2 < FT(0.0)) {
+ new_rd=
+ Orthogonal_distance_instance.new_distance(copy_rd,dst,diff1,new_cut_dim);
+
+ CGAL_assertion(new_rd >= copy_rd);
+ dists[new_cut_dim] = diff1;
+ Node_with_distance *Upper_Child =
+ new Node_with_distance(node->upper(), new_rd, dists);
+ PriorityQueue.push(Upper_Child);
+ dists[new_cut_dim] = dst;
+ N=node->lower();
+
+ }
+ else { // compute new distance
+ new_rd=Orthogonal_distance_instance.new_distance(copy_rd,dst,diff2,new_cut_dim);
+ CGAL_assertion(new_rd >= copy_rd);
+ dists[new_cut_dim] = diff2;
+ Node_with_distance *Lower_Child =
+ new Node_with_distance(node->lower(), new_rd, dists);
+ PriorityQueue.push(Lower_Child);
+ dists[new_cut_dim] = dst;
+ N=node->upper();
+ }
+ }
+ // n is a leaf
+ typename Tree::Leaf_node_const_handle node =
+ static_cast<typename Tree::Leaf_node_const_handle>(N);
+ number_of_leaf_nodes_visited++;
+ if (node->size() > 0) {
+ for (typename Tree::iterator it=node->begin(); it != node->end(); it++) {
+ number_of_items_visited++;
+ FT distance_to_query_point=
+ Orthogonal_distance_instance.transformed_distance(query_point,*it);
+ Point_with_transformed_distance *NN_Candidate=
+ new Point_with_transformed_distance(*it,distance_to_query_point);
+ Item_PriorityQueue.push(NN_Candidate);
+ }
+ // old top of PriorityQueue has been processed,
+ // hence update rd
+
+ if (!(PriorityQueue.empty())) {
+ rd = CGAL::cpp11::get<1>(*PriorityQueue.top());
+ next_neighbour_found =
+ (multiplication_factor*rd >
+ Item_PriorityQueue.top()->second);
+ }
+ else // priority queue empty => last neighbour found
+ {
+ next_neighbour_found=true;
+ }
+
+ number_of_neighbours_computed++;
+ }
+ } // next_neighbour_found or priority queue is empty
+ // in the latter case also the item priority quee is empty
+ }
+
+
+ void
+ Compute_the_next_furthest_neighbour()
+ {
+ // compute the next item
+ bool next_neighbour_found=false;
+ if (!(Item_PriorityQueue.empty())) {
+ next_neighbour_found=
+ (rd < multiplication_factor*Item_PriorityQueue.top()->second);
+ }
+ typename SearchTraits::Construct_cartesian_const_iterator_d construct_it=traits.construct_cartesian_const_iterator_d_object();
+ typename SearchTraits::Cartesian_const_iterator_d query_point_it = construct_it(query_point);
+ // otherwise browse the tree further
+ while ((!next_neighbour_found) && (!PriorityQueue.empty())) {
+ Node_with_distance* The_node_top=PriorityQueue.top();
+ Node_const_handle N= CGAL::cpp11::get<0>(*The_node_top);
+ dists = CGAL::cpp11::get<2>(*The_node_top);
+ PriorityQueue.pop();
+ delete The_node_top;
+ FT copy_rd=rd;
+ while (!(N->is_leaf())) { // compute new distance
+ typename Tree::Internal_node_const_handle node =
+ static_cast<typename Tree::Internal_node_const_handle>(N);
+ number_of_internal_nodes_visited++;
+ int new_cut_dim=node->cutting_dimension();
+ FT new_rd,dst = dists[new_cut_dim];
+ FT val = *(query_point_it + new_cut_dim);
+ FT diff1 = val - node->upper_low_value();
+ FT diff2 = val - node->lower_high_value();
+ if (diff1 + diff2 < FT(0.0)) {
+ diff1 = val - node->upper_high_value();
+ new_rd=
+ Orthogonal_distance_instance.new_distance(copy_rd,dst,diff1,new_cut_dim);
+ Node_with_distance *Lower_Child =
+ new Node_with_distance(node->lower(), copy_rd, dists);
+ PriorityQueue.push(Lower_Child);
+ N=node->upper();
+ dists[new_cut_dim] = diff1;
+ copy_rd=new_rd;
+
+ }
+ else { // compute new distance
+ diff2 = val - node->lower_low_value();
+ new_rd=Orthogonal_distance_instance.new_distance(copy_rd,dst,diff2,new_cut_dim);
+ Node_with_distance *Upper_Child =
+ new Node_with_distance(node->upper(), copy_rd, dists);
+ PriorityQueue.push(Upper_Child);
+ N=node->lower();
+ dists[new_cut_dim] = diff2;
+ copy_rd=new_rd;
+ }
+ }
+ // n is a leaf
+ typename Tree::Leaf_node_const_handle node =
+ static_cast<typename Tree::Leaf_node_const_handle>(N);
+ number_of_leaf_nodes_visited++;
+ if (node->size() > 0) {
+ for (typename Tree::iterator it=node->begin(); it != node->end(); it++) {
+ number_of_items_visited++;
+ FT distance_to_query_point=
+ Orthogonal_distance_instance.transformed_distance(query_point,*it);
+ Point_with_transformed_distance *NN_Candidate=
+ new Point_with_transformed_distance(*it,distance_to_query_point);
+ Item_PriorityQueue.push(NN_Candidate);
+ }
+ // old top of PriorityQueue has been processed,
+ // hence update rd
+
+ if (!(PriorityQueue.empty())) {
+ rd = CGAL::cpp11::get<1>(*PriorityQueue.top());
+ next_neighbour_found =
+ (multiplication_factor*rd <
+ Item_PriorityQueue.top()->second);
+ }
+ else // priority queue empty => last neighbour found
+ {
+ next_neighbour_found=true;
+ }
+
+ number_of_neighbours_computed++;
+ }
+ } // next_neighbour_found or priority queue is empty
+ // in the latter case also the item priority quee is empty
+ }
+ }; // class Iterator_implementaion
+
+
+
+
+
+
+
+
+
+ public:
+ class iterator;
+ typedef iterator const_iterator;
+
+ // constructor
+ Orthogonal_incremental_neighbor_search(const Tree& tree,
+ const Query_item& q, FT Eps = FT(0.0),
+ bool search_nearest=true, const Distance& tr=Distance())
+ : m_tree(tree),m_query(q),m_dist(tr),m_Eps(Eps),m_search_nearest(search_nearest)
+ {}
+
+ iterator
+ begin() const
+ {
+ return iterator(m_tree,m_query,m_dist,m_Eps,m_search_nearest);
+ }
+
+ iterator
+ end() const
+ {
+ return iterator();
+ }
+
+ std::ostream&
+ statistics(std::ostream& s)
+ {
+ begin()->statistics(s);
+ return s;
+ }
+
+
+
+
+ class iterator {
+
+ public:
+
+ typedef std::input_iterator_tag iterator_category;
+ typedef Point_with_transformed_distance value_type;
+ typedef Point_with_transformed_distance* pointer;
+ typedef const Point_with_transformed_distance& reference;
+ typedef std::size_t size_type;
+ typedef std::ptrdiff_t difference_type;
+ typedef int distance_type;
+
+ //class Iterator_implementation;
+ Iterator_implementation *Ptr_implementation;
+
+
+ public:
+
+ // default constructor
+ iterator()
+ : Ptr_implementation(0)
+ {}
+
+ int
+ the_number_of_items_visited()
+ {
+ return Ptr_implementation->number_of_items_visited;
+ }
+
+ // constructor
+ iterator(const Tree& tree,const Query_item& q, const Distance& tr=Distance(), FT eps=FT(0.0),
+ bool search_nearest=true)
+ : Ptr_implementation(new Iterator_implementation(tree, q, tr, eps, search_nearest))
+ {}
+
+ // copy constructor
+ iterator(const iterator& Iter)
+ {
+ Ptr_implementation = Iter.Ptr_implementation;
+ if (Ptr_implementation != 0) Ptr_implementation->reference_count++;
+ }
+
+ iterator& operator=(const iterator& Iter)
+ {
+ if (Ptr_implementation != Iter.Ptr_implementation){
+ if (Ptr_implementation != 0 && --(Ptr_implementation->reference_count)==0) {
+ delete Ptr_implementation;
+ }
+ Ptr_implementation = Iter.Ptr_implementation;
+ if (Ptr_implementation != 0) Ptr_implementation->reference_count++;
+ }
+ return *this;
+ }
+
+
+ const Point_with_transformed_distance&
+ operator* () const
+ {
+ return *(*Ptr_implementation);
+ }
+
+ // -> operator
+ const Point_with_transformed_distance*
+ operator-> () const
+ {
+ return &*(*Ptr_implementation);
+ }
+
+ // prefix operator
+ iterator&
+ operator++()
+ {
+ ++(*Ptr_implementation);
+ return *this;
+ }
+
+ // postfix operator
+ Object_wrapper<Point_with_transformed_distance>
+ operator++(int)
+ {
+ return (*Ptr_implementation)++;
+ }
+
+
+ bool
+ operator==(const iterator& It) const
+ {
+ if (
+ ((Ptr_implementation == 0) ||
+ Ptr_implementation->Item_PriorityQueue.empty()) &&
+ ((It.Ptr_implementation == 0) ||
+ It.Ptr_implementation->Item_PriorityQueue.empty())
+ )
+ return true;
+ // else
+ return (Ptr_implementation == It.Ptr_implementation);
+ }
+
+ bool
+ operator!=(const iterator& It) const
+ {
+ return !(*this == It);
+ }
+
+ std::ostream&
+ statistics (std::ostream& s)
+ {
+ Ptr_implementation->statistics(s);
+ return s;
+ }
+
+ ~iterator()
+ {
+ if (Ptr_implementation != 0) {
+ Ptr_implementation->reference_count--;
+ if (Ptr_implementation->reference_count==0) {
+ delete Ptr_implementation;
+ Ptr_implementation = 0;
+ }
+ }
+ }
+
+
+ }; // class iterator
+
+ //data members
+ const Tree& m_tree;
+ Query_item m_query;
+ Distance m_dist;
+ FT m_Eps;
+ bool m_search_nearest;
+ }; // class
+
+ template <class Traits, class Query_item, class Distance>
+ void swap (typename Orthogonal_incremental_neighbor_search<Traits,
+ Query_item, Distance>::iterator& x,
+ typename Orthogonal_incremental_neighbor_search<Traits,
+ Query_item, Distance>::iterator& y)
+ {
+ typename Orthogonal_incremental_neighbor_search<Traits,
+ Query_item, Distance>::iterator::Iterator_implementation
+ *tmp = x.Ptr_implementation;
+ x.Ptr_implementation = y.Ptr_implementation;
+ y.Ptr_implementation = tmp;
+ }
+
+} // namespace CGAL
+
+#endif // CGAL_ORTHOGONAL_INCREMENTAL_NEIGHBOR_SEARCH_H
diff --git a/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h b/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h
new file mode 100644
index 00000000..111c6ac9
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h
@@ -0,0 +1,1169 @@
+// Copyright (c) 2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Clement Jamin
+
+#ifndef CGAL_REGULAR_TRIANGULATION_H
+#define CGAL_REGULAR_TRIANGULATION_H
+
+#include <CGAL/Triangulation.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/Default.h>
+#include <CGAL/spatial_sort.h>
+#include <CGAL/Regular_triangulation_traits_adapter.h>
+
+#include <boost/property_map/function_property_map.hpp>
+
+namespace CGAL {
+
+template< typename Traits_, typename TDS_ = Default >
+class Regular_triangulation
+: public Triangulation<
+ Regular_triangulation_traits_adapter<Traits_>,
+ typename Default::Get<
+ TDS_,
+ Triangulation_data_structure<
+ typename Regular_triangulation_traits_adapter<Traits_>::Dimension,
+ Triangulation_vertex<Regular_triangulation_traits_adapter<Traits_> >,
+ Triangulation_full_cell<Regular_triangulation_traits_adapter<Traits_> >
+ >
+ >::type>
+{
+ typedef Regular_triangulation_traits_adapter<Traits_> RTTraits;
+ typedef typename RTTraits::Dimension Maximal_dimension_;
+ typedef typename Default::Get<
+ TDS_,
+ Triangulation_data_structure<
+ Maximal_dimension_,
+ Triangulation_vertex<RTTraits>,
+ Triangulation_full_cell<RTTraits>
+ > >::type TDS;
+ typedef Triangulation<RTTraits, TDS> Base;
+ typedef Regular_triangulation<Traits_, TDS_> Self;
+
+ typedef typename RTTraits::Orientation_d Orientation_d;
+ typedef typename RTTraits::Power_side_of_power_sphere_d Power_side_of_power_sphere_d;
+ typedef typename RTTraits::In_flat_power_side_of_power_sphere_d
+ In_flat_power_side_of_power_sphere_d;
+ typedef typename RTTraits::Flat_orientation_d Flat_orientation_d;
+ typedef typename RTTraits::Construct_flat_orientation_d Construct_flat_orientation_d;
+
+public: // PUBLIC NESTED TYPES
+
+ typedef RTTraits Geom_traits;
+ typedef typename Base::Triangulation_ds Triangulation_ds;
+
+ typedef typename Base::Vertex Vertex;
+ typedef typename Base::Full_cell Full_cell;
+ typedef typename Base::Facet Facet;
+ typedef typename Base::Face Face;
+
+ typedef Maximal_dimension_ Maximal_dimension;
+ typedef typename RTTraits::Bare_point_d Bare_point;
+ typedef typename RTTraits::Weighted_point_d Weighted_point;
+
+ typedef typename Base::Point_const_iterator Point_const_iterator;
+ typedef typename Base::Vertex_handle Vertex_handle;
+ typedef typename Base::Vertex_iterator Vertex_iterator;
+ typedef typename Base::Vertex_const_handle Vertex_const_handle;
+ typedef typename Base::Vertex_const_iterator Vertex_const_iterator;
+
+ typedef typename Base::Full_cell_handle Full_cell_handle;
+ typedef typename Base::Full_cell_iterator Full_cell_iterator;
+ typedef typename Base::Full_cell_const_handle Full_cell_const_handle;
+ typedef typename Base::Full_cell_const_iterator Full_cell_const_iterator;
+ typedef typename Base::Finite_full_cell_const_iterator
+ Finite_full_cell_const_iterator;
+
+ typedef typename Base::size_type size_type;
+ typedef typename Base::difference_type difference_type;
+
+ typedef typename Base::Locate_type Locate_type;
+
+ //Tag to distinguish Delaunay from Regular triangulations
+ typedef Tag_true Weighted_tag;
+
+protected: // DATA MEMBERS
+
+
+public:
+
+ using typename Base::Rotor;
+ using Base::maximal_dimension;
+ using Base::are_incident_full_cells_valid;
+ using Base::coaffine_orientation_predicate;
+ using Base::reset_flat_orientation;
+ using Base::current_dimension;
+ using Base::geom_traits;
+ using Base::index_of_covertex;
+ //using Base::index_of_second_covertex;
+ using Base::rotate_rotor;
+ using Base::infinite_vertex;
+ using Base::insert_in_hole;
+ using Base::is_infinite;
+ using Base::locate;
+ using Base::points_begin;
+ using Base::set_neighbors;
+ using Base::new_full_cell;
+ using Base::number_of_vertices;
+ using Base::orientation;
+ using Base::tds;
+ using Base::reorient_full_cells;
+ using Base::full_cell;
+ using Base::full_cells_begin;
+ using Base::full_cells_end;
+ using Base::finite_full_cells_begin;
+ using Base::finite_full_cells_end;
+ using Base::vertices_begin;
+ using Base::vertices_end;
+
+private:
+
+ // Wrapper
+ struct Power_side_of_power_sphere_for_non_maximal_dim_d
+ {
+ boost::optional<Flat_orientation_d>* fop;
+ Construct_flat_orientation_d cfo;
+ In_flat_power_side_of_power_sphere_d ifpt;
+
+ Power_side_of_power_sphere_for_non_maximal_dim_d(
+ boost::optional<Flat_orientation_d>& x,
+ Construct_flat_orientation_d const&y,
+ In_flat_power_side_of_power_sphere_d const&z)
+ : fop(&x), cfo(y), ifpt(z) {}
+
+ template<class Iter>
+ CGAL::Orientation operator()(Iter a, Iter b, const Weighted_point & p)const
+ {
+ if(!*fop)
+ *fop=cfo(a,b);
+ return ifpt(fop->get(),a,b,p);
+ }
+ };
+
+public:
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS
+
+ Regular_triangulation(int dim, const Geom_traits &k = Geom_traits())
+ : Base(dim, k)
+ {
+ }
+
+ // With this constructor,
+ // the user can specify a Flat_orientation_d object to be used for
+ // orienting simplices of a specific dimension
+ // (= preset_flat_orientation_.first)
+ // It it used by the dark triangulations created by DT::remove
+ Regular_triangulation(
+ int dim,
+ const std::pair<int, const Flat_orientation_d *> &preset_flat_orientation,
+ const Geom_traits &k = Geom_traits())
+ : Base(dim, preset_flat_orientation, k)
+ {
+ }
+
+ ~Regular_triangulation() {}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS
+
+ // Not Documented
+ Power_side_of_power_sphere_for_non_maximal_dim_d power_side_of_power_sphere_for_non_maximal_dim_predicate() const
+ {
+ return Power_side_of_power_sphere_for_non_maximal_dim_d (
+ flat_orientation_,
+ geom_traits().construct_flat_orientation_d_object(),
+ geom_traits().in_flat_power_side_of_power_sphere_d_object()
+ );
+ }
+
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS
+
+ // Warning: these functions are not correct since they do not restore hidden
+ // vertices
+
+ Full_cell_handle remove(Vertex_handle);
+ Full_cell_handle remove(const Weighted_point & p, Full_cell_handle hint = Full_cell_handle())
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, hint);
+ if( Base::ON_VERTEX == lt )
+ {
+ return remove(s->vertex(f.index(0)));
+ }
+ return Full_cell_handle();
+ }
+
+ template< typename ForwardIterator >
+ void remove(ForwardIterator start, ForwardIterator end)
+ {
+ while( start != end )
+ remove(*start++);
+ }
+
+ // Not documented
+ void remove_decrease_dimension(Vertex_handle);
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS
+
+ template< typename ForwardIterator >
+ std::ptrdiff_t insert(ForwardIterator start, ForwardIterator end)
+ {
+ size_type n = number_of_vertices();
+ typedef std::vector<Weighted_point> WP_vec;
+ WP_vec points(start, end);
+
+ spatial_sort(points.begin(), points.end(), geom_traits());
+
+ Full_cell_handle hint;
+ for(typename WP_vec::const_iterator p = points.begin(); p != points.end(); ++p )
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle c = locate (*p, lt, f, ft, hint);
+ Vertex_handle v = insert (*p, lt, f, ft, c);
+
+ hint = v == Vertex_handle() ? c : v->full_cell();
+ }
+ return number_of_vertices() - n;
+ }
+
+ Vertex_handle insert(const Weighted_point &,
+ Locate_type,
+ const Face &,
+ const Facet &,
+ Full_cell_handle);
+
+ Vertex_handle insert(const Weighted_point & p,
+ Full_cell_handle start = Full_cell_handle())
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, start);
+ return insert(p, lt, f, ft, s);
+ }
+
+ Vertex_handle insert(const Weighted_point & p, Vertex_handle hint)
+ {
+ CGAL_assertion( Vertex_handle() != hint );
+ return insert(p, hint->full_cell());
+ }
+
+ Vertex_handle insert_outside_affine_hull(const Weighted_point &);
+ Vertex_handle insert_in_conflicting_cell(
+ const Weighted_point &, Full_cell_handle,
+ Vertex_handle only_if_this_vertex_is_in_the_cz = Vertex_handle());
+
+ Vertex_handle insert_if_in_star(const Weighted_point &,
+ Vertex_handle,
+ Locate_type,
+ const Face &,
+ const Facet &,
+ Full_cell_handle);
+
+ Vertex_handle insert_if_in_star(
+ const Weighted_point & p, Vertex_handle star_center,
+ Full_cell_handle start = Full_cell_handle())
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, start);
+ return insert_if_in_star(p, star_center, lt, f, ft, s);
+ }
+
+ Vertex_handle insert_if_in_star(
+ const Weighted_point & p, Vertex_handle star_center,
+ Vertex_handle hint)
+ {
+ CGAL_assertion( Vertex_handle() != hint );
+ return insert_if_in_star(p, star_center, hint->full_cell());
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES
+
+ bool is_in_conflict(const Weighted_point &, Full_cell_const_handle) const;
+
+ template< class OrientationPredicate >
+ Oriented_side perturbed_power_side_of_power_sphere(const Weighted_point &,
+ Full_cell_const_handle, const OrientationPredicate &) const;
+
+ template< typename OutputIterator >
+ Facet compute_conflict_zone(const Weighted_point &, Full_cell_handle, OutputIterator) const;
+
+ template < typename OrientationPredicate, typename PowerTestPredicate >
+ class Conflict_predicate
+ {
+ const Self & rt_;
+ const Weighted_point & p_;
+ OrientationPredicate ori_;
+ PowerTestPredicate power_side_of_power_sphere_;
+ int cur_dim_;
+ public:
+ Conflict_predicate(
+ const Self & rt,
+ const Weighted_point & p,
+ const OrientationPredicate & ori,
+ const PowerTestPredicate & power_side_of_power_sphere)
+ : rt_(rt), p_(p), ori_(ori), power_side_of_power_sphere_(power_side_of_power_sphere), cur_dim_(rt.current_dimension()) {}
+
+ inline
+ bool operator()(Full_cell_const_handle s) const
+ {
+ bool ok;
+ if( ! rt_.is_infinite(s) )
+ {
+ Oriented_side power_side_of_power_sphere = power_side_of_power_sphere_(rt_.points_begin(s), rt_.points_begin(s) + cur_dim_ + 1, p_);
+ if( ON_POSITIVE_SIDE == power_side_of_power_sphere )
+ ok = true;
+ else if( ON_NEGATIVE_SIDE == power_side_of_power_sphere )
+ ok = false;
+ else
+ ok = ON_POSITIVE_SIDE == rt_.perturbed_power_side_of_power_sphere<OrientationPredicate>(p_, s, ori_);
+ }
+ else
+ {
+ typedef typename Full_cell::Vertex_handle_const_iterator VHCI;
+ typedef Substitute_point_in_vertex_iterator<VHCI> F;
+ F spivi(rt_.infinite_vertex(), &p_);
+
+ Orientation o = ori_(
+ boost::make_transform_iterator(s->vertices_begin(), spivi),
+ boost::make_transform_iterator(s->vertices_begin() + cur_dim_ + 1,
+ spivi));
+
+ if( POSITIVE == o )
+ ok = true;
+ else if( o == NEGATIVE )
+ ok = false;
+ else
+ ok = (*this)(s->neighbor( s->index( rt_.infinite_vertex() ) ));
+ }
+ return ok;
+ }
+ };
+
+ template < typename ConflictPredicate >
+ class Conflict_traversal_predicate
+ {
+ const Self & rt_;
+ const ConflictPredicate & pred_;
+ public:
+ Conflict_traversal_predicate(const Self & rt, const ConflictPredicate & pred)
+ : rt_(rt), pred_(pred)
+ {}
+ inline
+ bool operator()(const Facet & f) const
+ {
+ return pred_(rt_.full_cell(f)->neighbor(rt_.index_of_covertex(f)));
+ }
+ };
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY
+
+ bool is_valid(bool verbose = false, int level = 0) const;
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MISC
+
+ std::size_t number_of_hidden_vertices() const
+ {
+ return m_hidden_points.size();
+ }
+
+private:
+
+ template<typename InputIterator>
+ bool
+ does_cell_range_contain_vertex(InputIterator cz_begin, InputIterator cz_end,
+ Vertex_handle vh) const
+ {
+ // Check all vertices
+ while(cz_begin != cz_end)
+ {
+ Full_cell_handle fch = *cz_begin;
+ for (int i = 0 ; i <= current_dimension() ; ++i)
+ {
+ if (fch->vertex(i) == vh)
+ return true;
+ }
+ ++cz_begin;
+ }
+ return false;
+ }
+
+ template<typename InputIterator, typename OutputIterator>
+ void
+ process_conflict_zone(InputIterator cz_begin, InputIterator cz_end,
+ OutputIterator vertices_out) const
+ {
+ // Get all vertices
+ while(cz_begin != cz_end)
+ {
+ Full_cell_handle fch = *cz_begin;
+ for (int i = 0 ; i <= current_dimension() ; ++i)
+ {
+ Vertex_handle vh = fch->vertex(i);
+ if (vh->full_cell() != Full_cell_handle())
+ {
+ (*vertices_out++) = vh;
+ vh->set_full_cell(Full_cell_handle());
+ }
+ }
+ ++cz_begin;
+ }
+ }
+
+
+ template<typename InputIterator>
+ void
+ process_cz_vertices_after_insertion(InputIterator vertices_begin,
+ InputIterator vertices_end)
+ {
+ // Get all vertices
+ while(vertices_begin != vertices_end)
+ {
+ Vertex_handle vh = *vertices_begin;
+ if (vh->full_cell() == Full_cell_handle())
+ {
+ m_hidden_points.push_back(vh->point());
+ tds().delete_vertex(vh);
+ }
+ ++vertices_begin;
+ }
+ }
+
+private:
+ // Some internal types to shorten notation
+ using typename Base::Coaffine_orientation_d;
+ using Base::flat_orientation_;
+ typedef Conflict_predicate<Coaffine_orientation_d, Power_side_of_power_sphere_for_non_maximal_dim_d>
+ Conflict_pred_in_subspace;
+ typedef Conflict_predicate<Orientation_d, Power_side_of_power_sphere_d>
+ Conflict_pred_in_fullspace;
+ typedef Conflict_traversal_predicate<Conflict_pred_in_subspace>
+ Conflict_traversal_pred_in_subspace;
+ typedef Conflict_traversal_predicate<Conflict_pred_in_fullspace>
+ Conflict_traversal_pred_in_fullspace;
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MEMBER VARIABLES
+ std::vector<Weighted_point> m_hidden_points;
+
+}; // class Regular_triangulation
+
+
+// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
+// FUNCTIONS THAT ARE MEMBER METHODS:
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS
+
+
+// Warning: this function is not correct since it does not restore hidden
+// vertices
+template< typename Traits, typename TDS >
+typename Regular_triangulation<Traits, TDS>::Full_cell_handle
+Regular_triangulation<Traits, TDS>
+::remove( Vertex_handle v )
+{
+ CGAL_precondition( ! is_infinite(v) );
+ CGAL_expensive_precondition( is_vertex(v) );
+
+ // THE CASE cur_dim == 0
+ if( 0 == current_dimension() )
+ {
+ remove_decrease_dimension(v);
+ return Full_cell_handle();
+ }
+ else if( 1 == current_dimension() )
+ { // THE CASE cur_dim == 1
+ if( 2 == number_of_vertices() )
+ {
+ remove_decrease_dimension(v);
+ return Full_cell_handle();
+ }
+ Full_cell_handle left = v->full_cell();
+ if( 0 == left->index(v) )
+ left = left->neighbor(1);
+ CGAL_assertion( 1 == left->index(v) );
+ Full_cell_handle right = left->neighbor(0);
+ tds().associate_vertex_with_full_cell(left, 1, right->vertex(1));
+ set_neighbors(left, 0, right->neighbor(0), right->mirror_index(0));
+ tds().delete_vertex(v);
+ tds().delete_full_cell(right);
+ return left;
+ }
+
+ // THE CASE cur_dim >= 2
+ // Gather the finite vertices sharing an edge with |v|
+ typedef typename Base::template Full_cell_set<Full_cell_handle> Simplices;
+ Simplices simps;
+ std::back_insert_iterator<Simplices> out(simps);
+ tds().incident_full_cells(v, out);
+ typedef std::set<Vertex_handle> Vertex_set;
+ Vertex_set verts;
+ Vertex_handle vh;
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ for( int i = 0; i <= current_dimension(); ++i )
+ {
+ vh = (*it)->vertex(i);
+ if( is_infinite(vh) )
+ continue;
+ if( vh == v )
+ continue;
+ verts.insert(vh);
+ }
+
+ // After gathering finite neighboring vertices, create their Dark Delaunay triangulation
+ typedef Triangulation_vertex<Geom_traits, Vertex_handle> Dark_vertex_base;
+ typedef Triangulation_full_cell<
+ Geom_traits,
+ internal::Triangulation::Dark_full_cell_data<TDS> > Dark_full_cell_base;
+ typedef Triangulation_data_structure<Maximal_dimension,
+ Dark_vertex_base,
+ Dark_full_cell_base
+ > Dark_tds;
+ typedef Regular_triangulation<Traits, Dark_tds> Dark_triangulation;
+ typedef typename Dark_triangulation::Face Dark_face;
+ typedef typename Dark_triangulation::Facet Dark_facet;
+ typedef typename Dark_triangulation::Vertex_handle Dark_v_handle;
+ typedef typename Dark_triangulation::Full_cell_handle Dark_s_handle;
+
+ // If flat_orientation_ is defined, we give it the Dark triangulation
+ // so that the orientation it uses for "current_dimension()"-simplices is
+ // coherent with the global triangulation
+ Dark_triangulation dark_side(
+ maximal_dimension(),
+ flat_orientation_ ?
+ std::pair<int, const Flat_orientation_d *>(current_dimension(), flat_orientation_.get_ptr())
+ : std::pair<int, const Flat_orientation_d *>(std::numeric_limits<int>::max(), NULL) );
+
+ Dark_s_handle dark_s;
+ Dark_v_handle dark_v;
+ typedef std::map<Vertex_handle, Dark_v_handle> Vertex_map;
+ Vertex_map light_to_dark;
+ typename Vertex_set::iterator vit = verts.begin();
+ while( vit != verts.end() )
+ {
+ dark_v = dark_side.insert((*vit)->point(), dark_s);
+ dark_s = dark_v->full_cell();
+ dark_v->data() = *vit;
+ light_to_dark[*vit] = dark_v;
+ ++vit;
+ }
+
+ if( dark_side.current_dimension() != current_dimension() )
+ {
+ CGAL_assertion( dark_side.current_dimension() + 1 == current_dimension() );
+ // Here, the finite neighbors of |v| span a affine subspace of
+ // dimension one less than the current dimension. Two cases are possible:
+ if( (size_type)(verts.size() + 1) == number_of_vertices() )
+ {
+ remove_decrease_dimension(v);
+ return Full_cell_handle();
+ }
+ else
+ { // |v| is strictly outside the convex hull of the rest of the points. This is an
+ // easy case: first, modify the finite full_cells, then, delete the infinite ones.
+ // We don't even need the Dark triangulation.
+ Simplices infinite_simps;
+ {
+ Simplices finite_simps;
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ if( is_infinite(*it) )
+ infinite_simps.push_back(*it);
+ else
+ finite_simps.push_back(*it);
+ simps.swap(finite_simps);
+ } // now, simps only contains finite simplices
+ // First, modify the finite full_cells:
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ {
+ int v_idx = (*it)->index(v);
+ tds().associate_vertex_with_full_cell(*it, v_idx, infinite_vertex());
+ }
+ // Make the handles to infinite full cells searchable
+ infinite_simps.make_searchable();
+ // Then, modify the neighboring relation
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ {
+ for( int i = 0 ; i <= current_dimension(); ++i )
+ {
+ if (is_infinite((*it)->vertex(i)))
+ continue;
+ (*it)->vertex(i)->set_full_cell(*it);
+ Full_cell_handle n = (*it)->neighbor(i);
+ // Was |n| a finite full cell prior to removing |v| ?
+ if( ! infinite_simps.contains(n) )
+ continue;
+ int n_idx = n->index(v);
+ set_neighbors(*it, i, n->neighbor(n_idx), n->neighbor(n_idx)->index(n));
+ }
+ }
+ Full_cell_handle ret_s;
+ // Then, we delete the infinite full_cells
+ for( typename Simplices::iterator it = infinite_simps.begin(); it != infinite_simps.end(); ++it )
+ tds().delete_full_cell(*it);
+ tds().delete_vertex(v);
+ return simps.front();
+ }
+ }
+ else // From here on, dark_side.current_dimension() == current_dimension()
+ {
+ dark_side.infinite_vertex()->data() = infinite_vertex();
+ light_to_dark[infinite_vertex()] = dark_side.infinite_vertex();
+ }
+
+ // Now, compute the conflict zone of v->point() in
+ // the dark side. This is precisely the set of full_cells
+ // that we have to glue back into the light side.
+ Dark_face dark_f(dark_side.maximal_dimension());
+ Dark_facet dark_ft;
+ typename Dark_triangulation::Locate_type lt;
+ dark_s = dark_side.locate(v->point(), lt, dark_f, dark_ft);
+ CGAL_assertion( lt != Dark_triangulation::ON_VERTEX
+ && lt != Dark_triangulation::OUTSIDE_AFFINE_HULL );
+
+ // |ret_s| is the full_cell that we return
+ Dark_s_handle dark_ret_s = dark_s;
+ Full_cell_handle ret_s;
+
+ typedef typename Base::template Full_cell_set<Dark_s_handle> Dark_full_cells;
+ Dark_full_cells conflict_zone;
+ std::back_insert_iterator<Dark_full_cells> dark_out(conflict_zone);
+
+ dark_ft = dark_side.compute_conflict_zone(v->point(), dark_s, dark_out);
+ // Make the dark simplices in the conflict zone searchable
+ conflict_zone.make_searchable();
+
+ // THE FOLLOWING SHOULD MAYBE GO IN TDS.
+ // Here is the plan:
+ // 1. Pick any Facet from boundary of the light zone
+ // 2. Find corresponding Facet on boundary of dark zone
+ // 3. stitch.
+
+ // 1. Build a facet on the boudary of the light zone:
+ Full_cell_handle light_s = *simps.begin();
+ Facet light_ft(light_s, light_s->index(v));
+
+ // 2. Find corresponding Dark_facet on boundary of the dark zone
+ Dark_full_cells dark_incident_s;
+ for( int i = 0; i <= current_dimension(); ++i )
+ {
+ if( index_of_covertex(light_ft) == i )
+ continue;
+ Dark_v_handle dark_v = light_to_dark[full_cell(light_ft)->vertex(i)];
+ dark_incident_s.clear();
+ dark_out = std::back_inserter(dark_incident_s);
+ dark_side.tds().incident_full_cells(dark_v, dark_out);
+ for(typename Dark_full_cells::iterator it = dark_incident_s.begin();
+ it != dark_incident_s.end();
+ ++it)
+ {
+ (*it)->data().count_ += 1;
+ }
+ }
+
+ for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it )
+ {
+ if( current_dimension() != (*it)->data().count_ )
+ continue;
+ if( ! conflict_zone.contains(*it) )
+ continue;
+ // We found a full_cell incident to the dark facet corresponding to the light facet |light_ft|
+ int ft_idx = 0;
+ while( light_s->has_vertex( (*it)->vertex(ft_idx)->data() ) )
+ ++ft_idx;
+ dark_ft = Dark_facet(*it, ft_idx);
+ break;
+ }
+ // Pre-3. Now, we are ready to traverse both boundary and do the stiching.
+
+ // But first, we create the new full_cells in the light triangulation,
+ // with as much adjacency information as possible.
+
+ // Create new full_cells with vertices
+ for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it )
+ {
+ Full_cell_handle new_s = new_full_cell();
+ (*it)->data().light_copy_ = new_s;
+ for( int i = 0; i <= current_dimension(); ++i )
+ tds().associate_vertex_with_full_cell(new_s, i, (*it)->vertex(i)->data());
+ if( dark_ret_s == *it )
+ ret_s = new_s;
+ }
+
+ // Setup adjacencies inside the hole
+ for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it )
+ {
+ Full_cell_handle new_s = (*it)->data().light_copy_;
+ for( int i = 0; i <= current_dimension(); ++i )
+ if( conflict_zone.contains((*it)->neighbor(i)) )
+ tds().set_neighbors(new_s, i, (*it)->neighbor(i)->data().light_copy_, (*it)->mirror_index(i));
+ }
+
+ // 3. Stitch
+ simps.make_searchable();
+ typedef std::queue<std::pair<Facet, Dark_facet> > Queue;
+ Queue q;
+ q.push(std::make_pair(light_ft, dark_ft));
+ dark_s = dark_side.full_cell(dark_ft);
+ int dark_i = dark_side.index_of_covertex(dark_ft);
+ // mark dark_ft as visited:
+ // TODO try by marking with Dark_v_handle (vertex)
+ dark_s->neighbor(dark_i)->set_neighbor(dark_s->mirror_index(dark_i), Dark_s_handle());
+ while( ! q.empty() )
+ {
+ std::pair<Facet, Dark_facet> p = q.front();
+ q.pop();
+ light_ft = p.first;
+ dark_ft = p.second;
+ light_s = full_cell(light_ft);
+ int light_i = index_of_covertex(light_ft);
+ dark_s = dark_side.full_cell(dark_ft);
+ int dark_i = dark_side.index_of_covertex(dark_ft);
+ Full_cell_handle light_n = light_s->neighbor(light_i);
+ set_neighbors(dark_s->data().light_copy_, dark_i, light_n, light_s->mirror_index(light_i));
+ for( int di = 0; di <= current_dimension(); ++di )
+ {
+ if( di == dark_i )
+ continue;
+ int li = light_s->index(dark_s->vertex(di)->data());
+ Rotor light_r(light_s, li, light_i);
+ typename Dark_triangulation::Rotor dark_r(dark_s, di, dark_i);
+
+ while( simps.contains(cpp11::get<0>(light_r)->neighbor(cpp11::get<1>(light_r))) )
+ light_r = rotate_rotor(light_r);
+
+ while( conflict_zone.contains(cpp11::get<0>(dark_r)->neighbor(cpp11::get<1>(dark_r))) )
+ dark_r = dark_side.rotate_rotor(dark_r);
+
+ Dark_s_handle dark_ns = cpp11::get<0>(dark_r);
+ int dark_ni = cpp11::get<1>(dark_r);
+ Full_cell_handle light_ns = cpp11::get<0>(light_r);
+ int light_ni = cpp11::get<1>(light_r);
+ // mark dark_r as visited:
+ // TODO try by marking with Dark_v_handle (vertex)
+ Dark_s_handle outside = dark_ns->neighbor(dark_ni);
+ Dark_v_handle mirror = dark_ns->mirror_vertex(dark_ni, current_dimension());
+ int dn = outside->index(mirror);
+ if( Dark_s_handle() == outside->neighbor(dn) )
+ continue;
+ outside->set_neighbor(dn, Dark_s_handle());
+ q.push(std::make_pair(Facet(light_ns, light_ni), Dark_facet(dark_ns, dark_ni)));
+ }
+ }
+ tds().delete_full_cells(simps.begin(), simps.end());
+ tds().delete_vertex(v);
+ return ret_s;
+}
+
+template< typename Traits, typename TDS >
+void
+Regular_triangulation<Traits, TDS>
+::remove_decrease_dimension(Vertex_handle v)
+{
+ CGAL_precondition( current_dimension() >= 0 );
+ tds().remove_decrease_dimension(v, infinite_vertex());
+ // reset the predicates:
+ reset_flat_orientation();
+ if( 1 <= current_dimension() )
+ {
+ Full_cell_handle inf_v_cell = infinite_vertex()->full_cell();
+ int inf_v_index = inf_v_cell->index(infinite_vertex());
+ Full_cell_handle s = inf_v_cell->neighbor(inf_v_index);
+ Orientation o = orientation(s);
+ CGAL_assertion( ZERO != o );
+ if( NEGATIVE == o )
+ reorient_full_cells();
+ }
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS
+
+template< typename Traits, typename TDS >
+typename Regular_triangulation<Traits, TDS>::Vertex_handle
+Regular_triangulation<Traits, TDS>
+::insert(const Weighted_point & p, Locate_type lt, const Face & f, const Facet & ft, Full_cell_handle s)
+{
+ switch( lt )
+ {
+ case Base::OUTSIDE_AFFINE_HULL:
+ return insert_outside_affine_hull(p);
+ break;
+ case Base::ON_VERTEX:
+ {
+ Vertex_handle v = s->vertex(f.index(0));
+ typename RTTraits::Compute_weight_d pw =
+ geom_traits().compute_weight_d_object();
+
+ if (pw(p) == pw(v->point()))
+ return v;
+ // If dim == 0 and the new point has a bigger weight,
+ // we just replace the point, and the former point gets hidden
+ else if (current_dimension() == 0)
+ {
+ if (pw(p) > pw(v->point()))
+ {
+ m_hidden_points.push_back(v->point());
+ v->set_point(p);
+ return v;
+ }
+ // Otherwise, the new point is hidden
+ else
+ {
+ m_hidden_points.push_back(p);
+ return Vertex_handle();
+ }
+ }
+ // Otherwise, we apply the "normal" algorithm
+
+ // !NO break here!
+ }
+ default:
+ return insert_in_conflicting_cell(p, s);
+ }
+}
+
+/*
+Inserts the point `p` in the regular triangulation. Returns a handle to the
+newly created vertex at that position.
+\pre The point `p`
+must lie outside the affine hull of the regular triangulation. This implies that
+`rt`.`current_dimension()` must be smaller than `rt`.`maximal_dimension()`.
+*/
+template< typename Traits, typename TDS >
+typename Regular_triangulation<Traits, TDS>::Vertex_handle
+Regular_triangulation<Traits, TDS>
+::insert_outside_affine_hull(const Weighted_point & p)
+{
+ // we don't use Base::insert_outside_affine_hull(...) because here, we
+ // also need to reset the side_of_oriented_subsphere functor.
+ CGAL_precondition( current_dimension() < maximal_dimension() );
+ Vertex_handle v = tds().insert_increase_dimension(infinite_vertex());
+ // reset the predicates:
+ reset_flat_orientation();
+ v->set_point(p);
+ if( current_dimension() >= 1 )
+ {
+ Full_cell_handle inf_v_cell = infinite_vertex()->full_cell();
+ int inf_v_index = inf_v_cell->index(infinite_vertex());
+ Full_cell_handle s = inf_v_cell->neighbor(inf_v_index);
+ Orientation o = orientation(s);
+ CGAL_assertion( ZERO != o );
+ if( NEGATIVE == o )
+ reorient_full_cells();
+
+ // We just inserted the second finite point and the right infinite
+ // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be
+ // consistent with the rest of the cells
+ if (current_dimension() == 1)
+ {
+ // Is "inf_v_cell" the right infinite cell? Then inf_v_index should be 1
+ if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0
+ && inf_v_index == 0)
+ {
+ inf_v_cell->swap_vertices(current_dimension() - 1, current_dimension());
+ }
+ else
+ {
+ inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2);
+ inf_v_index = inf_v_cell->index(infinite_vertex());
+ // Is "inf_v_cell" the right infinite cell? Then inf_v_index should be 1
+ if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0
+ && inf_v_index == 0)
+ {
+ inf_v_cell->swap_vertices(current_dimension() - 1, current_dimension());
+ }
+ }
+ }
+ }
+ return v;
+}
+
+template< typename Traits, typename TDS >
+typename Regular_triangulation<Traits, TDS>::Vertex_handle
+Regular_triangulation<Traits, TDS>
+::insert_if_in_star(const Weighted_point & p,
+ Vertex_handle star_center,
+ Locate_type lt,
+ const Face & f,
+ const Facet & ft,
+ Full_cell_handle s)
+{
+ switch( lt )
+ {
+ case Base::OUTSIDE_AFFINE_HULL:
+ return insert_outside_affine_hull(p);
+ break;
+ case Base::ON_VERTEX:
+ {
+ Vertex_handle v = s->vertex(f.index(0));
+ typename RTTraits::Compute_weight_d pw =
+ geom_traits().compute_weight_d_object();
+ if (pw(p) == pw(v->point()))
+ return v;
+ // If dim == 0 and the new point has a bigger weight,
+ // we replace the point
+ else if (current_dimension() == 0)
+ {
+ if (pw(p) > pw(v->point()))
+ v->set_point(p);
+ else
+ return v;
+ }
+ // Otherwise, we apply the "normal" algorithm
+
+ // !NO break here!
+ }
+ default:
+ return insert_in_conflicting_cell(p, s, star_center);
+ }
+
+ return Vertex_handle();
+}
+
+/*
+[Undocumented function]
+
+Inserts the point `p` in the regular triangulation. `p` must be
+in conflict with the second parameter `c`, which is used as a
+starting point for `compute_conflict_zone`.
+The function is faster than the standard `insert` function since
+it does not need to call `locate`.
+
+If this insertion creates a vertex, this vertex is returned.
+
+If `p` coincides with an existing vertex and has a greater weight,
+then the existing weighted point becomes hidden and `p` replaces it as vertex
+of the triangulation.
+
+If `p` coincides with an already existing vertex (both point and
+weights being equal), then this vertex is returned and the triangulation
+remains unchanged.
+
+Otherwise if `p` does not appear as a vertex of the triangulation,
+then it is stored as a hidden point and this method returns the default
+constructed handle.
+
+\pre The point `p` must be in conflict with the full cell `c`.
+*/
+
+template< typename Traits, typename TDS >
+typename Regular_triangulation<Traits, TDS>::Vertex_handle
+Regular_triangulation<Traits, TDS>
+::insert_in_conflicting_cell(const Weighted_point & p,
+ Full_cell_handle s,
+ Vertex_handle only_if_this_vertex_is_in_the_cz)
+{
+ typedef std::vector<Full_cell_handle> Full_cell_h_vector;
+
+ bool in_conflict = is_in_conflict(p, s);
+
+ // If p is not in conflict with s, then p is hidden
+ // => we don't insert it
+ if (!in_conflict)
+ {
+ m_hidden_points.push_back(p);
+ return Vertex_handle();
+ }
+ else
+ {
+ Full_cell_h_vector cs; // for storing conflicting full_cells.
+ cs.reserve(64);
+ std::back_insert_iterator<Full_cell_h_vector> out(cs);
+ Facet ft = compute_conflict_zone(p, s, out);
+
+ // Check if the CZ contains "only_if_this_vertex_is_in_the_cz"
+ if (only_if_this_vertex_is_in_the_cz != Vertex_handle()
+ && !does_cell_range_contain_vertex(cs.begin(), cs.end(),
+ only_if_this_vertex_is_in_the_cz))
+ {
+ return Vertex_handle();
+ }
+
+ // Otherwise, proceed with the insertion
+ std::vector<Vertex_handle> cz_vertices;
+ cz_vertices.reserve(64);
+ process_conflict_zone(cs.begin(), cs.end(),
+ std::back_inserter(cz_vertices));
+
+ Vertex_handle ret = insert_in_hole(p, cs.begin(), cs.end(), ft);
+
+ process_cz_vertices_after_insertion(cz_vertices.begin(), cz_vertices.end());
+
+ return ret;
+ }
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES
+
+// NOT DOCUMENTED
+template< typename Traits, typename TDS >
+template< typename OrientationPred >
+Oriented_side
+Regular_triangulation<Traits, TDS>
+::perturbed_power_side_of_power_sphere(const Weighted_point & p, Full_cell_const_handle s,
+ const OrientationPred & ori) const
+{
+ CGAL_precondition_msg( ! is_infinite(s), "full cell must be finite");
+ CGAL_expensive_precondition( POSITIVE == orientation(s) );
+ typedef std::vector<const Weighted_point *> Points;
+ Points points(current_dimension() + 2);
+ int i(0);
+ for( ; i <= current_dimension(); ++i )
+ points[i] = &(s->vertex(i)->point());
+ points[i] = &p;
+ std::sort(points.begin(), points.end(),
+ internal::Triangulation::Compare_points_for_perturbation<Self>(*this));
+ typename Points::const_reverse_iterator cut_pt = points.rbegin();
+ Points test_points;
+ while( cut_pt != points.rend() )
+ {
+ if( &p == *cut_pt )
+ // because the full_cell "s" is assumed to be positively oriented
+ return ON_NEGATIVE_SIDE; // we consider |p| to lie outside the sphere
+ test_points.clear();
+ Point_const_iterator spit = points_begin(s);
+ int adjust_sign = -1;
+ for( i = 0; i < current_dimension(); ++i )
+ {
+ if( &(*spit) == *cut_pt )
+ {
+ ++spit;
+ adjust_sign = (((current_dimension() + i) % 2) == 0) ? -1 : +1;
+ }
+ test_points.push_back(&(*spit));
+ ++spit;
+ }
+ test_points.push_back(&p);
+
+ typedef typename CGAL::Iterator_project<
+ typename Points::iterator,
+ internal::Triangulation::Point_from_pointer<Self>,
+ const Weighted_point &, const Weighted_point *
+ > Point_pointer_iterator;
+
+ Orientation ori_value = ori(
+ Point_pointer_iterator(test_points.begin()),
+ Point_pointer_iterator(test_points.end()));
+
+ if( ZERO != ori_value )
+ return Oriented_side( - adjust_sign * ori_value );
+
+ ++cut_pt;
+ }
+ CGAL_assertion(false); // we should never reach here
+ return ON_NEGATIVE_SIDE;
+}
+
+template< typename Traits, typename TDS >
+bool
+Regular_triangulation<Traits, TDS>
+::is_in_conflict(const Weighted_point & p, Full_cell_const_handle s) const
+{
+ CGAL_precondition( 1 <= current_dimension() );
+ if( current_dimension() < maximal_dimension() )
+ {
+ Conflict_pred_in_subspace c(
+ *this, p,
+ coaffine_orientation_predicate(),
+ power_side_of_power_sphere_for_non_maximal_dim_predicate());
+ return c(s);
+ }
+ else
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ Power_side_of_power_sphere_d side = geom_traits().power_side_of_power_sphere_d_object();
+ Conflict_pred_in_fullspace c(*this, p, ori, side);
+ return c(s);
+ }
+}
+
+template< typename Traits, typename TDS >
+template< typename OutputIterator >
+typename Regular_triangulation<Traits, TDS>::Facet
+Regular_triangulation<Traits, TDS>
+::compute_conflict_zone(const Weighted_point & p, Full_cell_handle s, OutputIterator out) const
+{
+ CGAL_precondition( 1 <= current_dimension() );
+ if( current_dimension() < maximal_dimension() )
+ {
+ Conflict_pred_in_subspace c(
+ *this, p,
+ coaffine_orientation_predicate(),
+ power_side_of_power_sphere_for_non_maximal_dim_predicate());
+ Conflict_traversal_pred_in_subspace tp(*this, c);
+ return tds().gather_full_cells(s, tp, out);
+ }
+ else
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ Power_side_of_power_sphere_d side = geom_traits().power_side_of_power_sphere_d_object();
+ Conflict_pred_in_fullspace c(*this, p, ori, side);
+ Conflict_traversal_pred_in_fullspace tp(*this, c);
+ return tds().gather_full_cells(s, tp, out);
+ }
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY
+
+template< typename Traits, typename TDS >
+bool
+Regular_triangulation<Traits, TDS>
+::is_valid(bool verbose, int level) const
+{
+ if (!Base::is_valid(verbose, level))
+ return false;
+
+ int dim = current_dimension();
+ if (dim == maximal_dimension())
+ {
+ for (Finite_full_cell_const_iterator cit = finite_full_cells_begin() ;
+ cit != finite_full_cells_end() ; ++cit )
+ {
+ Full_cell_const_handle ch = cit.base();
+ for(int i = 0; i < dim+1 ; ++i )
+ {
+ // If the i-th neighbor is not an infinite cell
+ Vertex_handle opposite_vh =
+ ch->neighbor(i)->vertex(ch->neighbor(i)->index(ch));
+ if (!is_infinite(opposite_vh))
+ {
+ Power_side_of_power_sphere_d side =
+ geom_traits().power_side_of_power_sphere_d_object();
+ if (side(Point_const_iterator(ch->vertices_begin()),
+ Point_const_iterator(ch->vertices_end()),
+ opposite_vh->point()) == ON_POSITIVE_SIDE)
+ {
+ if (verbose)
+ CGAL_warning_msg(false, "Non-empty sphere");
+ return false;
+ }
+ }
+ }
+ }
+ }
+ return true;
+}
+
+} //namespace CGAL
+
+#endif //CGAL_REGULAR_TRIANGULATION_H
diff --git a/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h b/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h
new file mode 100644
index 00000000..78bb95a6
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h
@@ -0,0 +1,288 @@
+// Copyright (c) 2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Clement Jamin
+
+#ifndef CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H
+#define CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H
+
+#include <CGAL/basic.h>
+
+#include <boost/iterator/transform_iterator.hpp>
+
+namespace CGAL {
+
+// Wrapper class to make a model of `RegularTriangulationTraits` easily usable
+// by the `Regular_triangulation` class. By using this class:
+// - Point_d (used by `Triangulation` and the TDS) becomes a weighted point
+// - Predicates and functors such as Less_coordinate_d or Orientation_d
+// can be called using weighted points instead of bare points (this is
+// needed because `Weighted_point_d` is not convertible to `Point_d`)
+// This way, `Triangulation` works perfectly well with weighted points.
+
+template <class K>
+class Regular_triangulation_traits_adapter
+ : public K
+{
+public:
+ typedef K Base;
+
+ // Required by TriangulationTraits
+ typedef typename K::Dimension Dimension;
+ typedef typename K::FT FT;
+ typedef typename K::Flat_orientation_d Flat_orientation_d;
+ typedef typename K::Weighted_point_d Point_d;
+
+ // Required by RegularTriangulationTraits
+ typedef typename K::Point_d Bare_point_d;
+ typedef typename K::Weighted_point_d Weighted_point_d;
+ typedef typename K::Construct_point_d Construct_point_d;
+ typedef typename K::Compute_weight_d Compute_weight_d;
+ typedef typename K::Power_side_of_power_sphere_d Power_side_of_power_sphere_d;
+ typedef typename K::In_flat_power_side_of_power_sphere_d
+ In_flat_power_side_of_power_sphere_d;
+
+ //===========================================================================
+ // Custom types
+ //===========================================================================
+
+ // Required by SpatialSortingTraits_d
+ class Less_coordinate_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef bool result_type;
+
+ Less_coordinate_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ result_type operator()(
+ Weighted_point_d const& p, Weighted_point_d const& q, int i) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.less_coordinate_d_object() (cp(p), cp(q), i);
+ }
+ };
+
+ //===========================================================================
+
+ // Required by TriangulationTraits
+ class Orientation_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef Orientation result_type;
+
+ Orientation_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ template <typename ForwardIterator>
+ result_type operator()(ForwardIterator start, ForwardIterator end) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.orientation_d_object() (
+ boost::make_transform_iterator(start, cp),
+ boost::make_transform_iterator(end, cp)
+ );
+ }
+ };
+
+ //===========================================================================
+
+ // Required by TriangulationTraits
+ class Construct_flat_orientation_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef Flat_orientation_d result_type;
+
+ Construct_flat_orientation_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ template <typename ForwardIterator>
+ result_type operator()(ForwardIterator start, ForwardIterator end) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.construct_flat_orientation_d_object() (
+ boost::make_transform_iterator(start, cp),
+ boost::make_transform_iterator(end, cp)
+ );
+ }
+ };
+
+
+ //===========================================================================
+
+ // Required by TriangulationTraits
+ class In_flat_orientation_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef Orientation result_type;
+
+ In_flat_orientation_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ template <typename ForwardIterator>
+ result_type operator()(Flat_orientation_d orient,
+ ForwardIterator start, ForwardIterator end) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.in_flat_orientation_d_object() (
+ orient,
+ boost::make_transform_iterator(start, cp),
+ boost::make_transform_iterator(end, cp)
+ );
+ }
+ };
+
+ //===========================================================================
+
+ // Required by TriangulationTraits
+ class Contained_in_affine_hull_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef bool result_type;
+
+ Contained_in_affine_hull_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ template <typename ForwardIterator>
+ result_type operator()(ForwardIterator start, ForwardIterator end,
+ const Weighted_point_d & p) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.contained_in_affine_hull_d_object() (
+ boost::make_transform_iterator(start, cp),
+ boost::make_transform_iterator(end, cp),
+ cp(p)
+ );
+ }
+ };
+
+ //===========================================================================
+
+ // Required by TriangulationTraits
+ class Compare_lexicographically_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef Comparison_result result_type;
+
+ Compare_lexicographically_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ result_type operator()(
+ const Weighted_point_d & p, const Weighted_point_d & q) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.compare_lexicographically_d_object()(cp(p), cp(q));
+ }
+ };
+
+ //===========================================================================
+
+ // Only for Triangulation_off_ostream.h (undocumented)
+ class Compute_coordinate_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef FT result_type;
+
+ Compute_coordinate_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ result_type operator()(
+ const Weighted_point_d & p, const int i) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.compute_coordinate_d_object()(cp(p), i);
+ }
+ };
+
+ //===========================================================================
+
+ // To satisfy SpatialSortingTraits_d
+ // and also for Triangulation_off_ostream.h (undocumented)
+ class Point_dimension_d
+ {
+ const K &m_kernel;
+
+ public:
+ typedef int result_type;
+
+ Point_dimension_d(const K &kernel)
+ : m_kernel(kernel) {}
+
+ result_type operator()(
+ const Weighted_point_d & p) const
+ {
+ Construct_point_d cp = m_kernel.construct_point_d_object();
+ return m_kernel.point_dimension_d_object()(cp(p));
+ }
+ };
+
+ //===========================================================================
+ // Object creation
+ //===========================================================================
+
+ Less_coordinate_d less_coordinate_d_object() const
+ {
+ return Less_coordinate_d(*this);
+ }
+ Contained_in_affine_hull_d contained_in_affine_hull_d_object() const
+ {
+ return Contained_in_affine_hull_d(*this);
+ }
+ Orientation_d orientation_d_object() const
+ {
+ return Orientation_d(*this);
+ }
+ Construct_flat_orientation_d construct_flat_orientation_d_object() const
+ {
+ return Construct_flat_orientation_d(*this);
+ }
+ In_flat_orientation_d in_flat_orientation_d_object() const
+ {
+ return In_flat_orientation_d(*this);
+ }
+ Compare_lexicographically_d compare_lexicographically_d_object() const
+ {
+ return Compare_lexicographically_d(*this);
+ }
+ Compute_coordinate_d compute_coordinate_d_object() const
+ {
+ return Compute_coordinate_d(*this);
+ }
+ Point_dimension_d point_dimension_d_object() const
+ {
+ return Point_dimension_d(*this);
+ }
+};
+
+
+} //namespace CGAL
+
+#endif // CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H
diff --git a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h
new file mode 100644
index 00000000..9a6030e5
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h
@@ -0,0 +1,99 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H
+#define CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H
+
+#include <CGAL/Dimension.h>
+#include <CGAL/Compact_container.h>
+#include <CGAL/internal/Static_or_dynamic_array.h>
+
+#include <boost/cstdint.hpp>
+
+namespace CGAL {
+
+// POLICY TAG
+
+struct TDS_full_cell_default_storage_policy {}; // stores no additional data. Uses XOR trick.
+
+template< typename V, typename S, typename D, typename StoragePolicy >
+struct TFC_data; // TFC = Triangulation Full Cell
+
+template< typename Vertex_handle, typename Full_cell_handle, typename Dimen >
+struct TFC_data< Vertex_handle, Full_cell_handle, Dimen, TDS_full_cell_default_storage_policy >
+{
+ typedef typename internal::Dimen_plus_one<Dimen>::type Dimen_plus;
+ typedef typename internal::S_or_D_array< Vertex_handle, Dimen_plus, true > Vertex_handle_array;
+ typedef typename internal::S_or_D_array< Full_cell_handle, Dimen_plus > Full_cell_handle_array;
+
+ Vertex_handle_array vertices_;
+ Full_cell_handle_array neighbors_;
+
+ TFC_data(const int dmax)
+ : vertices_(dmax+1), neighbors_(dmax+1)
+ {}
+ void* for_compact_container() const { return vertices_.for_compact_container(); }
+ void* & for_compact_container() { return vertices_.for_compact_container(); }
+ int dimension() const { return ( vertices_.size() - 1 ); }
+ void set_mirror_index(const int, const int) {}
+#ifdef BOOST_NO_INT64_T
+ typedef std::ptrdiff_t Xor_type;
+#else
+ typedef boost::int_least64_t Xor_type;
+#endif
+ Xor_type xor_of_vertices(const int cur_dim) const
+ {
+ Xor_type result(0);
+ for( int i = 0; i <= cur_dim; ++i )
+ result ^= reinterpret_cast<Xor_type>(&(*vertices_[i]));
+ return result;
+ }
+ // ASSUMES |*this| is indeed a neighbor of neighbor(i):
+ // NOT correct when the hole (in insert_in_hole) is doubly covered.
+ int mirror_index(const int i) const
+ {
+ int index = 0;
+ Full_cell_handle n = neighbors_[i];
+ Full_cell_handle o = n->neighbor(index);
+ while( &(o->combinatorics_) != this )
+ o = n->neighbor(++index);
+ return index;
+ }
+ Vertex_handle mirror_vertex(const int i, const int cur_dim) const
+ {
+ Xor_type opp_vertex = xor_of_vertices(cur_dim)
+ ^ neighbors_[i]->xor_of_vertices(cur_dim)
+ ^ reinterpret_cast<Xor_type>(&(*vertices_[i]));
+ Vertex_handle mirror;
+ typedef typename Vertex_handle::pointer pointer;
+ // mirror.set_pointer(reinterpret_cast<pointer>(opp_vertex));
+ mirror = Compact_container<typename Vertex_handle::value_type>
+ ::s_iterator_to(*(reinterpret_cast<pointer>(opp_vertex)));
+ return mirror;
+ }
+ void swap_vertices(const int d1, const int d2)
+ {
+ std::swap(vertices_[d1], vertices_[d2]);
+ std::swap(neighbors_[d1], neighbors_[d2]);
+ }
+};
+
+} //namespace CGAL
+
+#endif // CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H
diff --git a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h
new file mode 100644
index 00000000..095dfe68
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h
@@ -0,0 +1,71 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H
+#define CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H
+
+#include <CGAL/TDS_full_cell_default_storage_policy.h>
+
+namespace CGAL {
+
+// POLICY TAGS
+
+struct TDS_full_cell_mirror_storage_policy {}; // Stores the mirror index of all vertices.
+
+template< typename Vertex_handle, typename Full_cell_handle, typename Maximal_dimension >
+struct TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_mirror_storage_policy >
+: public TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_default_storage_policy >
+{
+ typedef TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_default_storage_policy > Base;
+ typedef typename Base::Vertex_handle_array Vertex_handle_array;
+ typedef typename Base::Full_cell_handle_array Full_cell_handle_array;
+ typedef typename internal::S_or_D_array< int, typename Base::Dimen_plus > Int_array;
+
+private:
+ Int_array mirror_vertices_;
+
+public:
+ TFC_data(const int dmax)
+ : Base(dmax), mirror_vertices_(dmax+1)
+ {}
+
+ void set_mirror_index(const int i, const int index)
+ {
+ mirror_vertices_[i] = index;
+ }
+ int mirror_index(const int i) const
+ {
+ return mirror_vertices_[i];
+ }
+ Vertex_handle mirror_vertex(const int i, const int) const
+ {
+ return Base::neighbors_[i]->vertex(mirror_index(i));
+ }
+ void swap_vertices(const int d1, const int d2)
+ {
+ Base::swap_vertices(d1, d2);
+ std::swap(mirror_vertices_[d1], mirror_vertices_[d2]);
+ Base::neighbors_[d1]->set_mirror_index(mirror_vertices_[d1], d1);
+ Base::neighbors_[d2]->set_mirror_index(mirror_vertices_[d2], d2);
+ }
+};
+
+} //namespace CGAL
+
+#endif // CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation.h b/src/common/include/gudhi_patches/CGAL/Triangulation.h
new file mode 100644
index 00000000..906df92e
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation.h
@@ -0,0 +1,1424 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_H
+#define CGAL_TRIANGULATION_H
+
+#include <CGAL/internal/Triangulation/utilities.h>
+#include <CGAL/Triangulation_data_structure.h>
+#include <CGAL/Triangulation_full_cell.h>
+#include <CGAL/Triangulation_vertex.h>
+#include <CGAL/Iterator_project.h>
+#include <CGAL/spatial_sort.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/iterator.h>
+#include <CGAL/Default.h>
+#include <CGAL/Random.h>
+
+#include <boost/iterator/filter_iterator.hpp>
+#include <boost/iterator/transform_iterator.hpp>
+
+namespace CGAL {
+
+// Iterator which iterates over vertex_handle's, but returns a point when
+// dereferenced. If the current
+// vertex_handle vh == vh_where_point_should_be_substituted, it returns
+// "subtitute_point", otherwise, it returns vh->point()
+template<class VertexHandleConstIter>
+class Substitute_point_in_vertex_iterator
+{
+ typedef typename std::iterator_traits<VertexHandleConstIter>::value_type Vertex_handle;
+ typedef typename Vertex_handle::value_type Vertex;
+ typedef typename Vertex::Point Point;
+
+public:
+ typedef Point const& result_type; // For result_of
+
+ Substitute_point_in_vertex_iterator(
+ Vertex_handle vh_where_point_should_be_substituted,
+ Point const *subtitute_point)
+ : vh_where_point_should_be_substituted_(vh_where_point_should_be_substituted)
+ , subtitute_point_(subtitute_point)
+ {}
+
+ result_type operator()(Vertex_handle vh) const
+ {
+ if (vh == vh_where_point_should_be_substituted_)
+ return *subtitute_point_;
+ else
+ return vh->point();
+ }
+
+private:
+ Vertex_handle vh_where_point_should_be_substituted_;
+ Point const *subtitute_point_;
+
+};
+
+
+template < class TriangulationTraits, class TDS_ = Default >
+class Triangulation
+{
+ typedef typename TriangulationTraits::Dimension Maximal_dimension_;
+ typedef typename Default::Get<TDS_, Triangulation_data_structure
+ < Maximal_dimension_,
+ Triangulation_vertex<TriangulationTraits>,
+ Triangulation_full_cell<TriangulationTraits> >
+ >::type TDS;
+ typedef Triangulation<TriangulationTraits, TDS_> Self;
+
+protected:
+ typedef typename TriangulationTraits::Flat_orientation_d Flat_orientation_d;
+ typedef typename TriangulationTraits::Construct_flat_orientation_d Construct_flat_orientation_d;
+ typedef typename TriangulationTraits::In_flat_orientation_d In_flat_orientation_d;
+
+ // Wrapper
+ struct Coaffine_orientation_d
+ {
+ boost::optional<Flat_orientation_d>* fop;
+ Construct_flat_orientation_d cfo;
+ In_flat_orientation_d ifo;
+
+ Coaffine_orientation_d(
+ boost::optional<Flat_orientation_d>& x,
+ Construct_flat_orientation_d const&y,
+ In_flat_orientation_d const&z)
+ : fop(&x), cfo(y), ifo(z) {}
+
+ template<class Iter>
+ CGAL::Orientation operator()(Iter a, Iter b) const
+ {
+ if (*fop)
+ return ifo(fop->get(),a,b);
+ *fop = cfo(a,b);
+ CGAL_assertion(ifo(fop->get(),a,b) == CGAL::POSITIVE);
+ return CGAL::POSITIVE;
+ }
+ };
+
+ void reset_flat_orientation()
+ {
+ if (current_dimension() == preset_flat_orientation_.first)
+ {
+ CGAL_assertion(preset_flat_orientation_.second != NULL);
+ flat_orientation_ = *preset_flat_orientation_.second;
+ }
+ else
+ flat_orientation_ = boost::none;
+ }
+
+ typedef typename TriangulationTraits::Orientation_d
+ Orientation_d;
+
+public:
+
+ typedef TriangulationTraits Geom_traits;
+ typedef TDS Triangulation_ds;
+
+ typedef typename TDS::Vertex Vertex;
+ typedef typename TDS::Full_cell Full_cell;
+ typedef typename TDS::Facet Facet;
+ typedef typename TDS::Face Face;
+
+ typedef Maximal_dimension_ Maximal_dimension;
+ typedef typename Geom_traits::Point_d Point;
+
+ typedef typename TDS::Vertex_handle Vertex_handle;
+ typedef typename TDS::Vertex_iterator Vertex_iterator;
+ typedef typename TDS::Vertex_const_handle Vertex_const_handle;
+ typedef typename TDS::Vertex_const_iterator Vertex_const_iterator;
+
+ typedef typename TDS::Full_cell_handle Full_cell_handle;
+ typedef typename TDS::Full_cell_iterator Full_cell_iterator;
+ typedef typename TDS::Full_cell_const_handle Full_cell_const_handle;
+ typedef typename TDS::Full_cell_const_iterator Full_cell_const_iterator;
+
+ typedef typename TDS::Facet_iterator Facet_iterator;
+
+ typedef typename TDS::size_type size_type;
+ typedef typename TDS::difference_type difference_type;
+
+ /// The type of location a new point is found lying on
+ enum Locate_type
+ {
+ ON_VERTEX = 0 // simplex of dimension 0
+ , IN_FACE = 1 // simplex of dimension in [ 1, |current_dimension()| - 2 ]
+ , IN_FACET = 2 // simplex of dimension |current_dimension()| - 1
+ , IN_FULL_CELL = 3 /// simplex of dimension |current_dimension()|
+ , OUTSIDE_CONVEX_HULL = 4
+ , OUTSIDE_AFFINE_HULL = 5
+ };
+
+ // Finite elements iterators
+
+ class Finiteness_predicate;
+
+ typedef boost::filter_iterator<Finiteness_predicate, Vertex_iterator>
+ Finite_vertex_iterator;
+ typedef boost::filter_iterator<Finiteness_predicate, Vertex_const_iterator>
+ Finite_vertex_const_iterator;
+ typedef boost::filter_iterator<Finiteness_predicate, Full_cell_iterator>
+ Finite_full_cell_iterator;
+ typedef boost::filter_iterator<Finiteness_predicate, Full_cell_const_iterator>
+ Finite_full_cell_const_iterator;
+ typedef boost::filter_iterator<Finiteness_predicate, Facet_iterator>
+ Finite_facet_iterator;
+
+protected: // DATA MEMBERS
+
+ Triangulation_ds tds_;
+ const Geom_traits kernel_;
+ Vertex_handle infinity_;
+ mutable std::vector<Oriented_side> orientations_;
+ mutable boost::optional<Flat_orientation_d> flat_orientation_;
+ // The user can specify a Flat_orientation_d object to be used for
+ // orienting simplices of a specific dimension
+ // (= preset_flat_orientation_.first)
+ // preset_flat_orientation_.first = numeric_limits<int>::max() otherwise)
+ std::pair<int, const Flat_orientation_d *> preset_flat_orientation_;
+ // for stochastic walk in the locate() function:
+ mutable Random rng_;
+#ifdef CGAL_TRIANGULATION_STATISTICS
+ mutable unsigned long walk_size_;
+#endif
+
+protected: // HELPER FUNCTIONS
+
+ typedef CGAL::Iterator_project<
+ typename Full_cell::Vertex_handle_const_iterator,
+ internal::Triangulation::Point_from_vertex_handle<Vertex_handle, Point>
+ > Point_const_iterator;
+
+ Point_const_iterator points_begin(Full_cell_const_handle c) const
+ { return Point_const_iterator(c->vertices_begin()); }
+ Point_const_iterator points_end(Full_cell_const_handle c) const
+ { return Point_const_iterator(c->vertices_end()); }
+ Point_const_iterator points_begin(Full_cell_handle c) const
+ { return Point_const_iterator(c->vertices_begin()); }
+ Point_const_iterator points_end(Full_cell_handle c) const
+ { return Point_const_iterator(c->vertices_end()); }
+
+public:
+
+ // FACETS OPERATIONS
+
+ Full_cell_handle full_cell(const Facet & f) const
+ {
+ return tds().full_cell(f);
+ }
+
+ int index_of_covertex(const Facet & f) const
+ {
+ return tds().index_of_covertex(f);
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - UTILITIES
+
+ // A co-dimension 2 sub-simplex. called a Rotor because we can rotate
+ // the two "covertices" around the sub-simplex. Useful for traversing the
+ // boundary of a hole. NOT DOCUMENTED
+ typedef cpp11::tuple<Full_cell_handle, int, int> Rotor;
+
+ // Commented out because it was causing "internal compiler error" in MSVC
+ /*Full_cell_handle full_cell(const Rotor & r) const // NOT DOCUMENTED
+ {
+ return cpp11::get<0>(r);
+ }
+ int index_of_covertex(const Rotor & r) const // NOT DOCUMENTED
+ {
+ return cpp11::get<1>(r);
+ }
+ int index_of_second_covertex(const Rotor & r) const // NOT DOCUMENTED
+ {
+ return cpp11::get<2>(r);
+ }*/
+ Rotor rotate_rotor(Rotor & r) // NOT DOCUMENTED...
+ {
+ int opposite = cpp11::get<0>(r)->mirror_index(cpp11::get<1>(r));
+ Full_cell_handle s = cpp11::get<0>(r)->neighbor(cpp11::get<1>(r));
+ int new_second = s->index(cpp11::get<0>(r)->vertex(cpp11::get<2>(r)));
+ return Rotor(s, new_second, opposite);
+ }
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS
+
+ Triangulation(int dim, const Geom_traits &k = Geom_traits())
+ : tds_(dim)
+ , kernel_(k)
+ , infinity_()
+ , preset_flat_orientation_((std::numeric_limits<int>::max)(),
+ (Flat_orientation_d*) NULL)
+ , rng_((long)0)
+#ifdef CGAL_TRIANGULATION_STATISTICS
+ ,walk_size_(0)
+#endif
+ {
+ clear();
+ }
+
+ // With this constructor,
+ // the user can specify a Flat_orientation_d object to be used for
+ // orienting simplices of a specific dimension
+ // (= preset_flat_orientation_.first)
+ // It it used for by dark triangulations created by DT::remove
+ Triangulation(
+ int dim,
+ const std::pair<int, const Flat_orientation_d *> &preset_flat_orientation,
+ const Geom_traits k = Geom_traits())
+ : tds_(dim)
+ , kernel_(k)
+ , infinity_()
+ , preset_flat_orientation_(preset_flat_orientation)
+ , rng_((long)0)
+#ifdef CGAL_TRIANGULATION_STATISTICS
+ ,walk_size_(0)
+#endif
+ {
+ clear();
+ }
+
+ Triangulation(const Triangulation & t2)
+ : tds_(t2.tds_)
+ , kernel_(t2.kernel_)
+ , infinity_()
+ , preset_flat_orientation_((std::numeric_limits<int>::max)(),
+ (Flat_orientation_d*) NULL)
+ , rng_(t2.rng_)
+#ifdef CGAL_TRIANGULATION_STATISTICS
+ ,walk_size_(t2.walk_size_)
+#endif
+ {
+ // We find the vertex at infinity by scanning the vertices of both
+ // triangulations. This works because Compact_container garantees that
+ // the vertices in the copy (*this) are stored in the same order as in
+ // the original triangulation (t2)
+ infinity_ = vertices_begin();
+ Vertex_const_iterator inf2 = t2.vertices_begin();
+ while( inf2 != t2.infinite_vertex() )
+ {
+ ++infinity_;
+ ++inf2;
+ }
+ // A full_cell has at most 1 + maximal_dimension() facets:
+ orientations_.resize(1 + maximal_dimension());
+ // Our coaffine orientation predicates HAS state member variables
+ reset_flat_orientation();
+ }
+
+ ~Triangulation() {}
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS FUNCTIONS
+
+ /* These three function are no longer needed since we do not use them anymore
+ in the Delaunay_triangulation::remove. *But*, they may reappear in the future
+ if we manage to passe the information that flags/TDS_data is available or not
+ for marking simplices in Delaunay_triangulation::remove. This would be useful
+ to make it a little faster, instead of binary searching if a simplex is marked
+ or not...
+ // NOT DOCUMENTED --
+ bool get_visited(Full_cell_handle s) const
+ {
+ return tds().get_visited(s);
+ }
+ // NOT DOCUMENTED --
+ bool get_visited(Full_cell_const_handle s) const
+ {
+ return tds().get_visited(s);
+ }
+
+ // NOT DOCUMENTED --
+ void set_visited(Full_cell_handle s, bool b) const
+ {
+ tds().set_visited(s, b);
+ } */
+
+ Coaffine_orientation_d coaffine_orientation_predicate() const
+ {
+ return Coaffine_orientation_d (
+ flat_orientation_,
+ geom_traits().construct_flat_orientation_d_object(),
+ geom_traits().in_flat_orientation_d_object()
+ );
+ }
+
+ const Triangulation_ds & tds() const
+ {
+ return tds_;
+ }
+
+ Triangulation_ds & tds()
+ {
+ return tds_;
+ }
+
+ const Geom_traits & geom_traits() const
+ {
+ return kernel_;
+ }
+
+ int maximal_dimension() const { return tds().maximal_dimension(); }
+ int current_dimension() const { return tds().current_dimension(); }
+
+ bool empty() const
+ {
+ return current_dimension() == -1;
+ }
+
+ size_type number_of_vertices() const
+ {
+ return tds().number_of_vertices() - 1;
+ }
+
+ size_type number_of_full_cells() const
+ {
+ return tds().number_of_full_cells();
+ }
+
+ Vertex_handle infinite_vertex() const
+ {
+ return infinity_;
+ }
+
+ Full_cell_handle infinite_full_cell() const
+ {
+ CGAL_assertion(infinite_vertex()->full_cell()->has_vertex(infinite_vertex()));
+ return infinite_vertex()->full_cell();
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - NON CONSTANT-TIME ACCESS FUNCTIONS
+
+ size_type number_of_finite_full_cells() const
+ {
+ Full_cell_const_iterator s = full_cells_begin();
+ size_type result = number_of_full_cells();
+ for( ; s != full_cells_end(); ++s )
+ {
+ if( is_infinite(s) )
+ --result;
+ }
+ return result;
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TRAVERSAL
+
+ Vertex_iterator vertices_begin() { return tds().vertices_begin(); }
+ Vertex_iterator vertices_end() { return tds().vertices_end(); }
+
+ Vertex_const_iterator vertices_begin() const { return tds().vertices_begin(); }
+ Vertex_const_iterator vertices_end() const { return tds().vertices_end(); }
+
+ Finite_vertex_iterator finite_vertices_begin()
+ { return Finite_vertex_iterator(Finiteness_predicate(*this), vertices_begin(), vertices_end()); }
+ Finite_vertex_iterator finite_vertices_end()
+ { return Finite_vertex_iterator(Finiteness_predicate(*this), vertices_end(), vertices_end()); }
+ Finite_vertex_const_iterator finite_vertices_begin() const
+ { return Finite_vertex_const_iterator(Finiteness_predicate(*this), vertices_begin(), vertices_end()); }
+ Finite_vertex_const_iterator finite_vertices_end() const
+ { return Finite_vertex_const_iterator(Finiteness_predicate(*this), vertices_end(), vertices_end()); }
+
+ Full_cell_iterator full_cells_begin() { return tds().full_cells_begin(); }
+ Full_cell_iterator full_cells_end() { return tds().full_cells_end(); }
+
+ Full_cell_const_iterator full_cells_begin() const { return tds().full_cells_begin(); }
+ Full_cell_const_iterator full_cells_end() const { return tds().full_cells_end(); }
+
+ Finite_full_cell_iterator finite_full_cells_begin()
+ { return Finite_full_cell_iterator(Finiteness_predicate(*this), full_cells_begin(), full_cells_end()); }
+ Finite_full_cell_iterator finite_full_cells_end()
+ { return Finite_full_cell_iterator(Finiteness_predicate(*this), full_cells_end(), full_cells_end()); }
+ Finite_full_cell_const_iterator finite_full_cells_begin() const
+ { return Finite_full_cell_const_iterator(Finiteness_predicate(*this), full_cells_begin(), full_cells_end()); }
+ Finite_full_cell_const_iterator finite_full_cells_end() const
+ { return Finite_full_cell_const_iterator(Finiteness_predicate(*this), full_cells_end(), full_cells_end()); }
+
+ Facet_iterator facets_begin() { return tds().facets_begin(); }
+ Facet_iterator facets_end() { return tds().facets_end(); }
+ Facet_iterator finite_facets_begin()
+ { return Finite_facet_iterator(Finiteness_predicate(*this), facets_begin(), facets_end()); }
+ Facet_iterator finite_facets_end()
+ { return Finite_facet_iterator(Finiteness_predicate(*this), facets_end(), facets_end()); }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SOME PREDICATE FUNCTORS
+
+ class Finiteness_predicate
+ {
+ const Self & t_;
+ public:
+ Finiteness_predicate(const Self & t) : t_(t) {}
+ template < class T >
+ bool operator()(const T & t) const
+ {
+ return ! t_.is_infinite(t);
+ }
+ };
+
+ class Point_equality_predicate
+ {
+ const Point & o_;
+ public:
+ Point_equality_predicate(const Point & o) : o_(o) {}
+ bool operator()(const Point & o) const { return (o == o_ );}
+ };
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SIMPLE QUERIES
+/*
+ bool is_vertex(const Point & p, Vertex_handle & v, Full_cell_handle hint = Full_cell_handle()) const
+ {
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, hint);
+ if( ON_VERTEX == lt )
+ {
+ v = s->vertex(f.index(0));
+ return true;
+ }
+ return false;
+ }
+
+ bool is_vertex(Vertex_const_handle v) const
+ {
+ return tds().is_vertex(v);
+ }
+
+ bool is_full_cell(Full_cell_const_handle s) const
+ {
+ return tds().is_full_cell(s);
+ }
+*/
+
+ bool is_infinite(Vertex_const_handle v) const
+ {
+ CGAL_precondition(Vertex_const_handle() != v);
+ return (infinite_vertex() == v);
+ }
+
+ bool is_infinite(const Vertex & v) const /* internal use, not documented */
+ {
+ return (&(*infinite_vertex()) == &v);
+ }
+
+ bool is_infinite(Full_cell_const_handle s) const
+ {
+ CGAL_precondition(Full_cell_const_handle() != s);
+ return is_infinite(*s);
+ }
+ bool is_infinite(const Full_cell & s) const /* internal use, not documented */
+ {
+ for(int i = 0; i <= current_dimension(); ++i)
+ if( is_infinite(s.vertex(i)) )
+ return true;
+ return false;
+ }
+ bool is_infinite(const Facet & ft) const
+ {
+ Full_cell_const_handle s = full_cell(ft);
+ CGAL_precondition(s != Full_cell_const_handle());
+ if( is_infinite(s) )
+ return (s->vertex(index_of_covertex(ft)) != infinite_vertex());
+ return false;
+ }
+
+ bool is_infinite(const Face & f) const
+ {
+ Full_cell_const_handle s = f.full_cell();
+ CGAL_precondition(s != Full_cell_const_handle());
+ if( is_infinite(s) )
+ {
+ Vertex_handle v;
+ for( int i(0); i<= f.face_dimension(); ++i)
+ if ( is_infinite( f.vertex(i) )) return true;
+ }
+ return false;
+ }
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ELEMENT GATHERING
+
+
+ template< typename OutputIterator >
+ OutputIterator incident_full_cells(const Face & f, OutputIterator out) const
+ {
+ return tds().incident_full_cells(f, out);
+ }
+ template< typename OutputIterator >
+ OutputIterator incident_full_cells(Vertex_const_handle v, OutputIterator out) const
+ {
+ return tds().incident_full_cells(v, out);
+ }
+ template< typename OutputIterator >
+ OutputIterator star(const Face & f, OutputIterator out) const
+ {
+ return tds().star(f, out);
+ }
+
+ template< typename OutputIterator >
+ OutputIterator incident_faces(Vertex_const_handle v, int d, OutputIterator out) const
+ {
+ return tds().incident_faces(v, d, out);
+ }
+ /*
+ template< typename OutputIterator, class Comparator >
+ OutputIterator incident_upper_faces( Vertex_const_handle v, int d,
+ OutputIterator out, Comparator cmp = Comparator())
+ {
+ return tds().incident_upper_faces(v, d, out, cmp);
+ }
+ template< typename OutputIterator >
+ OutputIterator incident_upper_faces( Vertex_const_handle v, int d,
+ OutputIterator out)
+ { // FIXME: uncomment this function, since it uses a comparator specific to
+ // *geometric* triangulation (taking infinite vertex into account)
+ internal::Triangulation::Compare_vertices_for_upper_face<Self> cmp(*this);
+ return tds().incident_upper_faces(v, d, out, cmp);
+ }
+ */
+ Orientation orientation(Full_cell_const_handle s, bool in_is_valid = false) const
+ {
+ if( ! in_is_valid )
+ CGAL_assertion( ! is_infinite(s) );
+ if( 0 == current_dimension() )
+ return POSITIVE;
+ if( current_dimension() == maximal_dimension() )
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ return ori(points_begin(s), points_begin(s) + 1 + current_dimension());
+ }
+ else
+ {
+ return coaffine_orientation_predicate()(points_begin(s), points_begin(s) + 1 + current_dimension());
+ }
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - UPDATE OPERATIONS
+
+ void clear()
+ {
+ tds_.clear();
+ infinity_ = tds().insert_increase_dimension();
+ // A full_cell has at most 1 + maximal_dimension() facets:
+ orientations_.resize(1 + maximal_dimension());
+ // Our coaffine orientation predicates HAS state member variables
+ reset_flat_orientation();
+#ifdef CGAL_TRIANGULATION_STATISTICS
+ walk_size_ = 0;
+#endif
+ }
+
+ void set_current_dimension(int d)
+ {
+ tds().set_current_dimension(d);
+ }
+
+ Full_cell_handle new_full_cell()
+ {
+ return tds().new_full_cell();
+ }
+
+ Vertex_handle new_vertex()
+ {
+ return tds().new_vertex();
+ }
+
+ Vertex_handle new_vertex(const Point & p)
+ {
+ return tds().new_vertex(p);
+ }
+
+ void set_neighbors(Full_cell_handle s, int i, Full_cell_handle s1, int j)
+ {
+ tds().set_neighbors(s, i, s1, j);
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY
+
+ bool is_valid(bool = false, int = 0) const;
+ bool are_incident_full_cells_valid(Vertex_const_handle, bool = false, int = 0) const;
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - POINT LOCATION
+
+protected:
+ template< typename OrientationPredicate >
+ Full_cell_handle do_locate(const Point &, Locate_type &, Face &, Facet &,
+ Full_cell_handle start,
+ const OrientationPredicate & o) const;
+public:
+ Full_cell_handle locate(const Point &, Locate_type &, Face &, Facet &,
+ Full_cell_handle start = Full_cell_handle()) const;
+ Full_cell_handle locate(const Point &, Locate_type &, Face &, Facet &,
+ Vertex_handle) const;
+ Full_cell_handle locate(const Point & p, Full_cell_handle s = Full_cell_handle()) const;
+ Full_cell_handle locate(const Point & p, Vertex_handle v) const;
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS
+
+ Vertex_handle contract_face(const Point &, const Face &);
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - POINT INSERTION
+
+ template< typename ForwardIterator >
+ size_type insert(ForwardIterator start, ForwardIterator end)
+ {
+ size_type n = number_of_vertices();
+ std::vector<Point> points(start, end);
+ spatial_sort(points.begin(), points.end(), geom_traits());
+ Full_cell_handle hint = Full_cell_handle();
+ typename std::vector<Point>::const_iterator s = points.begin();
+ while( s != points.end() )
+ {
+ hint = insert(*s++, hint)->full_cell();
+ }
+ return number_of_vertices() - n;
+ }
+ Vertex_handle insert(const Point &, Locate_type, const Face &, const Facet &, Full_cell_handle);
+ Vertex_handle insert(const Point &, Full_cell_handle start = Full_cell_handle());
+ Vertex_handle insert(const Point &, Vertex_handle);
+ template< typename ForwardIterator >
+ Vertex_handle insert_in_hole(const Point & p, ForwardIterator start, ForwardIterator end, const Facet & ft)
+ {
+ Emptyset_iterator out;
+ return insert_in_hole(p, start, end, ft, out);
+ }
+ template< typename ForwardIterator, typename OutputIterator >
+ Vertex_handle insert_in_hole(const Point & p, ForwardIterator start, ForwardIterator end, const Facet & ft,
+ OutputIterator out)
+ {
+ Vertex_handle v = tds().insert_in_hole(start, end, ft, out);
+ v->set_point(p);
+ return v;
+ }
+ Vertex_handle insert_in_face(const Point &, const Face &);
+ Vertex_handle insert_in_facet(const Point &, const Facet &);
+ Vertex_handle insert_in_full_cell(const Point &, Full_cell_handle);
+ Vertex_handle insert_outside_convex_hull_1(const Point &, Full_cell_handle);
+ Vertex_handle insert_outside_convex_hull(const Point &, Full_cell_handle);
+ Vertex_handle insert_outside_affine_hull(const Point &);
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - FACET-TRAVERSAL PREDICATES
+
+ template< typename OrientationPredicate >
+ class Outside_convex_hull_traversal_predicate
+ {
+ Triangulation & t_;
+ const Point & p_;
+ OrientationPredicate const& ori_;
+ int cur_dim_;
+ public:
+ Outside_convex_hull_traversal_predicate(Triangulation & t, const Point & p,
+ OrientationPredicate const& ori)
+ : t_(t), p_(p), ori_(ori), cur_dim_(t.current_dimension()) {}
+ // FUTURE change parameter to const reference
+ bool operator()(Facet f) const
+ {
+ Full_cell_handle s = t_.full_cell(f);
+ const int i = t_.index_of_covertex(f);
+ Full_cell_handle n = s->neighbor(i);
+ if( ! t_.is_infinite(n) )
+ return false;
+ int inf_v_index = n->index(t_.infinite_vertex());
+ n->vertex(inf_v_index)->set_point(p_);
+ bool ok = (POSITIVE == ori_(t_.points_begin(n), t_.points_begin(n) + cur_dim_ + 1));
+ return ok;
+ }
+ };
+
+ // make sure all full_cells have positive orientation
+ void reorient_full_cells();
+
+protected:
+ // This is used in the |remove(v)| member function to manage sets of Full_cell_handles
+ template< typename FCH >
+ struct Full_cell_set : public std::vector<FCH>
+ {
+ typedef std::vector<FCH> Base_set;
+ using Base_set::begin;
+ using Base_set::end;
+ void make_searchable()
+ { // sort the full cell handles
+ std::sort(begin(), end());
+ }
+ bool contains(const FCH & fch) const
+ {
+ return std::binary_search(begin(), end(), fch);
+ }
+ bool contains_1st_and_not_2nd(const FCH & fst, const FCH & snd) const
+ {
+ return ( ! contains(snd) ) && ( contains(fst) );
+ }
+ };
+
+ void display_all_full_cells__debugging() const
+ {
+ std::cerr << "ALL FULL CELLS:" << std::endl;
+ for (Full_cell_const_iterator cit = full_cells_begin() ;
+ cit != full_cells_end() ; ++cit )
+ {
+ std::cerr << std::hex << &*cit << ": ";
+ for (int jj = 0 ; jj <= current_dimension() ; ++jj)
+ std::cerr << (is_infinite(cit->vertex(jj)) ? 0xFFFFFFFF : (unsigned int)&*cit->vertex(jj)) << " - ";
+ std::cerr << std::dec << std::endl;
+ }
+ std::cerr << std::endl;
+ }
+
+
+}; // Triangulation<...>
+
+// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
+
+// CLASS MEMBER FUNCTIONS
+
+template < class TT, class TDS >
+void
+Triangulation<TT, TDS>
+::reorient_full_cells()
+{
+ if( current_dimension() < 1 )
+ return;
+
+ Full_cell_iterator sit = full_cells_begin();
+ Full_cell_iterator send = full_cells_end();
+ for ( ; sit != send ; ++sit)
+ {
+ if( ! (is_infinite(sit) && (1 == current_dimension())) )
+ {
+ sit->swap_vertices(current_dimension() - 1, current_dimension());
+ }
+ }
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - THE REMOVAL METHODS
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::contract_face(const Point & p, const Face & f)
+{
+ CGAL_precondition( ! is_infinite(f) );
+ Vertex_handle v = tds().contract_face(f);
+ v->set_point(p);
+ CGAL_expensive_postcondition_msg(are_incident_full_cells_valid(v), "new point is not where it should be");
+ return v;
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - THE INSERTION METHODS
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert(const Point & p, Locate_type lt, const Face & f, const Facet & ft, Full_cell_handle s)
+{
+ switch( lt )
+ {
+ case IN_FULL_CELL:
+ return insert_in_full_cell(p, s);
+ break;
+ case OUTSIDE_CONVEX_HULL:
+ return insert_outside_convex_hull(p, s);
+ break;
+ case OUTSIDE_AFFINE_HULL:
+ return insert_outside_affine_hull(p);
+ break;
+ case IN_FACET:
+ {
+ return insert_in_facet(p, ft);
+ break;
+ }
+ case IN_FACE:
+ return insert_in_face(p, f);
+ break;
+ case ON_VERTEX:
+ s->vertex(f.index(0))->set_point(p);
+ return s->vertex(f.index(0));
+ break;
+ }
+ CGAL_assertion(false);
+ return Vertex_handle();
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert(const Point & p, Full_cell_handle start)
+{
+ Locate_type lt;
+ Face f(maximal_dimension());
+ Facet ft;
+ Full_cell_handle s = locate(p, lt, f, ft, start);
+ return insert(p, lt, f, ft, s);
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert(const Point & p, Vertex_handle v)
+{
+ if( Vertex_handle() == v )
+ v = infinite_vertex();
+ return insert(p, v->full_cell());
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert_in_face(const Point & p, const Face & f)
+{
+ CGAL_precondition( ! is_infinite(f) );
+ Vertex_handle v = tds().insert_in_face(f);
+ v->set_point(p);
+ return v;
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert_in_facet(const Point & p, const Facet & ft)
+{
+ CGAL_precondition( ! is_infinite(ft) );
+ Vertex_handle v = tds().insert_in_facet(ft);
+ v->set_point(p);
+ return v;
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert_in_full_cell(const Point & p, Full_cell_handle s)
+{
+ CGAL_precondition( ! is_infinite(s) );
+ Vertex_handle v = tds().insert_in_full_cell(s);
+ v->set_point(p);
+ return v;
+}
+
+// NOT DOCUMENTED...
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert_outside_convex_hull_1(const Point & p, Full_cell_handle s)
+{
+ // This is a special case for dimension 1, because in that case, the right
+ // infinite full_cell is not correctly oriented... (sice its first vertex is the
+ // infinite one...
+ CGAL_precondition( is_infinite(s) );
+ CGAL_precondition( 1 == current_dimension() );
+ Vertex_handle v = tds().insert_in_full_cell(s);
+ v->set_point(p);
+ return v;
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert_outside_convex_hull(const Point & p, Full_cell_handle s)
+{
+ if( 1 == current_dimension() )
+ {
+ return insert_outside_convex_hull_1(p, s);
+ }
+ CGAL_precondition( is_infinite(s) );
+ CGAL_assertion( current_dimension() >= 2 );
+ std::vector<Full_cell_handle> simps;
+ simps.reserve(64);
+ std::back_insert_iterator<std::vector<Full_cell_handle> > out(simps);
+ if( current_dimension() < maximal_dimension() )
+ {
+ Coaffine_orientation_d ori = coaffine_orientation_predicate();
+ Outside_convex_hull_traversal_predicate<Coaffine_orientation_d>
+ ochtp(*this, p, ori);
+ tds().gather_full_cells(s, ochtp, out);
+ }
+ else
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ Outside_convex_hull_traversal_predicate<Orientation_d>
+ ochtp(*this, p, ori);
+ tds().gather_full_cells(s, ochtp, out);
+ }
+ int inf_v_index = s->index(infinite_vertex());
+ Vertex_handle v = insert_in_hole(
+ p, simps.begin(), simps.end(), Facet(s, inf_v_index));
+ return v;
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Vertex_handle
+Triangulation<TT, TDS>
+::insert_outside_affine_hull(const Point & p)
+{
+ CGAL_precondition( current_dimension() < maximal_dimension() );
+ Vertex_handle v = tds().insert_increase_dimension(infinite_vertex());
+ // reset the orientation predicate:
+ reset_flat_orientation();
+ v->set_point(p);
+ if( current_dimension() >= 1 )
+ {
+ Full_cell_handle inf_v_cell = infinite_vertex()->full_cell();
+ int inf_v_index = inf_v_cell->index(infinite_vertex());
+ Full_cell_handle s = inf_v_cell->neighbor(inf_v_index);
+ Orientation o = orientation(s);
+ CGAL_assertion( COPLANAR != o );
+ if( NEGATIVE == o )
+ reorient_full_cells();
+
+
+ // We just inserted the second finite point and the right infinite
+ // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be
+ // consistent with the rest of the cells
+ if (current_dimension() == 1)
+ {
+ // Is "inf_v_cell" the right infinite cell?
+ // Then inf_v_index should be 1
+ if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0
+ && inf_v_index == 0)
+ {
+ inf_v_cell->swap_vertices(
+ current_dimension() - 1, current_dimension());
+ }
+ // Otherwise, let's find the right infinite cell
+ else
+ {
+ inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2);
+ inf_v_index = inf_v_cell->index(infinite_vertex());
+ // Is "inf_v_cell" the right infinite cell?
+ // Then inf_v_index should be 1
+ if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0
+ && inf_v_index == 0)
+ {
+ inf_v_cell->swap_vertices(
+ current_dimension() - 1, current_dimension());
+ }
+ }
+ }
+ }
+ return v;
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - THE MAIN LOCATE(...) FUNCTION
+
+template < class TT, class TDS >
+template< typename OrientationPredicate >
+typename Triangulation<TT, TDS>::Full_cell_handle
+Triangulation<TT, TDS>
+::do_locate(const Point & p, // query point
+ Locate_type & loc_type,// type of result (full_cell, face, vertex)
+ Face & face,// the face containing the query in its interior (when appropriate)
+ Facet & facet,// the facet containing the query in its interior (when appropriate)
+ Full_cell_handle start, // starting full_cell for the walk
+ OrientationPredicate const& orientation_pred
+ ) const
+{
+ const int cur_dim = current_dimension();
+
+ if( cur_dim == -1 )
+ {
+ loc_type = OUTSIDE_AFFINE_HULL;
+ return Full_cell_handle();
+ }
+ else if( cur_dim == 0 )
+ {
+ Vertex_handle vit = infinite_full_cell()->neighbor(0)->vertex(0);
+ if( EQUAL != geom_traits().compare_lexicographically_d_object()(p, vit->point()) )
+ {
+ loc_type = OUTSIDE_AFFINE_HULL;
+ return Full_cell_handle();
+ }
+ else
+ {
+ loc_type = ON_VERTEX;
+ face.set_full_cell(vit->full_cell());
+ face.set_index(0, 0);
+ return vit->full_cell();
+ }
+ }
+
+ Full_cell_handle s;
+
+ // if we don't know where to start, we start from any bounded full_cell
+ if( Full_cell_handle() == start )
+ {
+ // THE HACK THAT NOBODY SHOULD DO... BUT DIFFICULT TO WORK AROUND
+ // THIS... TODO: WORK AROUND IT
+ Full_cell_handle inf_c = const_cast<Self*>(this)->infinite_full_cell();
+ int inf_v_index = inf_c->index(infinite_vertex());
+ s = inf_c->neighbor(inf_v_index);
+ }
+ else
+ {
+ s = start;
+ if( is_infinite(s) )
+ {
+ int inf_v_index = s->index(infinite_vertex());
+ s = s->neighbor(inf_v_index);
+ }
+ }
+
+ // Check if query |p| is outside the affine hull
+ if( cur_dim < maximal_dimension() )
+ {
+ if( ! geom_traits().contained_in_affine_hull_d_object()(
+ points_begin(s),
+ points_begin(s) + current_dimension() + 1,
+ p) )
+ {
+ loc_type = OUTSIDE_AFFINE_HULL;
+ return Full_cell_handle();
+ }
+ }
+
+ // we remember the |previous|ly visited full_cell to avoid the evaluation
+ // of one |orientation| predicate
+ Full_cell_handle previous = Full_cell_handle();
+ bool full_cell_not_found = true;
+ while(full_cell_not_found) // we walk until we locate the query point |p|
+ {
+ #ifdef CGAL_TRIANGULATION_STATISTICS
+ ++walk_size_;
+ #endif
+ // For the remembering stochastic walk, we need to start trying
+ // with a random index:
+ int j, i = rng_.get_int(0, cur_dim);
+ // we check |p| against all the full_cell's hyperplanes in turn
+
+ for(j = 0; j <= cur_dim; ++j, i = (i + 1) % (cur_dim + 1) )
+ {
+ Full_cell_handle next = s->neighbor(i);
+ if( previous == next )
+ { // no need to compute the orientation, we already know it
+ orientations_[i] = POSITIVE;
+ continue; // go to next full_cell's facet
+ }
+
+ Substitute_point_in_vertex_iterator<
+ typename Full_cell::Vertex_handle_const_iterator>
+ spivi(s->vertex(i), &p);
+
+ orientations_[i] = orientation_pred(
+ boost::make_transform_iterator(s->vertices_begin(), spivi),
+ boost::make_transform_iterator(s->vertices_begin() + cur_dim + 1,
+ spivi));
+
+ if( orientations_[i] != NEGATIVE )
+ {
+ // from this facet's point of view, we are inside the
+ // full_cell or on its boundary, so we continue to next facet
+ continue;
+ }
+
+ // At this point, we know that we have to jump to the |next|
+ // full_cell because orientation_[i] == NEGATIVE
+ previous = s;
+ s = next;
+ if( is_infinite(next) )
+ { // we have arrived OUTSIDE the convex hull of the triangulation,
+ // so we stop the search
+ full_cell_not_found = false;
+ loc_type = OUTSIDE_CONVEX_HULL;
+ face.set_full_cell(s);
+ }
+ break;
+ } // end of the 'for' loop
+ if( ( cur_dim + 1 ) == j ) // we found the full_cell containing |p|
+ full_cell_not_found = false;
+ }
+ // Here, we know in which full_cell |p| is in.
+ // We now check more precisely where |p| landed:
+ // vertex, facet, face or full_cell.
+ if( ! is_infinite(s) )
+ {
+ face.set_full_cell(s);
+ int num(0);
+ int verts(0);
+ for(int i = 0; i < cur_dim; ++i)
+ {
+ if( orientations_[i] == COPLANAR )
+ {
+ ++num;
+ facet = Facet(s, i);
+ }
+ else
+ face.set_index(verts++, i);
+ }
+ //-- We could put the if{}else{} below in the loop above, but then we would
+ // need to test if (verts < cur_dim) many times... we do it only once
+ // here:
+ if( orientations_[cur_dim] == COPLANAR )
+ {
+ ++num;
+ facet = Facet(s, cur_dim);
+ }
+ else if( verts < cur_dim )
+ face.set_index(verts, cur_dim);
+ //-- end of remark above //
+ if( 0 == num )
+ {
+ loc_type = IN_FULL_CELL;
+ face.clear();
+ }
+ else if( cur_dim == num )
+ loc_type = ON_VERTEX;
+ else if( 1 == num )
+ loc_type = IN_FACET;
+ else
+ loc_type = IN_FACE;
+ }
+ return s;
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Full_cell_handle
+Triangulation<TT, TDS>
+::locate( const Point & p, // query point
+ Locate_type & loc_type,// type of result (full_cell, face, vertex)
+ Face & face,// the face containing the query in its interior (when appropriate)
+ Facet & facet,// the facet containing the query in its interior (when appropriate)
+ Full_cell_handle start// starting full_cell for the walk
+ ) const
+{
+ if( current_dimension() == maximal_dimension() )
+ {
+ Orientation_d ori = geom_traits().orientation_d_object();
+ return do_locate(p, loc_type, face, facet, start, ori);
+ }
+ else
+ return do_locate(p, loc_type, face, facet, start, coaffine_orientation_predicate());
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - the locate(...) variants
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Full_cell_handle
+Triangulation<TT, TDS>
+::locate( const Point & p,
+ Locate_type & loc_type,
+ Face & face,
+ Facet & facet,
+ Vertex_handle start) const
+{
+ if( Vertex_handle() == start )
+ start = infinite_vertex();
+ return locate(p, loc_type, face, facet, start->full_cell());
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Full_cell_handle
+Triangulation<TT, TDS>
+::locate(const Point & p, Full_cell_handle s) const
+{
+ Locate_type lt;
+ Face face(maximal_dimension());
+ Facet facet;
+ return locate(p, lt, face, facet, s);
+}
+
+template < class TT, class TDS >
+typename Triangulation<TT, TDS>::Full_cell_handle
+Triangulation<TT, TDS>
+::locate(const Point & p, Vertex_handle v) const
+{
+ if( Vertex_handle() != v )
+ v = infinite_vertex();
+ return this->locate(p, v->full_cell());
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY
+
+template < class TT, class TDS >
+bool
+Triangulation<TT, TDS>
+::is_valid(bool verbose, int level) const
+{
+ if( ! tds().is_valid(verbose, level) )
+ return false;
+
+ Full_cell_const_iterator c;
+ if( current_dimension() < 0 )
+ return true;
+ Orientation o;
+ for( c = full_cells_begin(); c != full_cells_end(); ++c )
+ {
+ if( is_infinite(c) )
+ {
+ if( current_dimension() > 1 )
+ {
+ int i = c->index( infinite_vertex() );
+ Full_cell_handle n = c->neighbor(i);
+ infinite_vertex()->set_point(n->vertex(c->mirror_index(i))->point());
+ o = - orientation(c, true);
+ }
+ else
+ o = POSITIVE;
+ }
+ else
+ o = orientation(c, true);
+ if( NEGATIVE == o )
+ {
+ if( verbose ) CGAL_warning_msg(false, "full_cell is not correctly oriented");
+ return false;
+ }
+ if( COPLANAR == o )
+ {
+ if( verbose ) CGAL_warning_msg(false, "full_cell is flat");
+ return false;
+ }
+ }
+ return true;
+}
+
+template < class TT, class TDS >
+bool Triangulation<TT, TDS>::are_incident_full_cells_valid(Vertex_const_handle v, bool verbose, int) const
+{
+ if( current_dimension() <= 0 )
+ return true;
+ typedef std::vector<Full_cell_const_handle> Simps;
+ Simps simps;
+ simps.reserve(64);
+ std::back_insert_iterator<Simps> out(simps);
+ incident_full_cells(v, out);
+ typename Simps::const_iterator sit = simps.begin();
+ for( ; sit != simps.end(); ++sit )
+ {
+ if( is_infinite(*sit) )
+ continue;
+ Orientation o = orientation(*sit);
+ if( NEGATIVE == o )
+ {
+ if( verbose ) CGAL_warning_msg(false, "full_cell is not correctly oriented");
+ return false;
+ }
+ if( COPLANAR == o )
+ {
+ if( verbose ) CGAL_warning_msg(false, "full_cell is flat");
+ return false;
+ }
+ }
+ return true;
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS:
+
+template < class TT, class TDS >
+std::istream &
+operator>>(std::istream & is, Triangulation<TT, TDS> & tr)
+ // reads :
+ // - the dimensions (maximal and current)
+ // - the number of finite vertices
+ // - the non combinatorial information on vertices (point, etc)
+ // - the number of full_cells
+ // - the full_cells by the indices of their vertices in the preceding list
+ // of vertices, plus the non combinatorial information on each full_cell
+ // - the neighbors of each full_cell by their index in the preceding list
+{
+ typedef Triangulation<TT, TDS> T;
+ typedef typename T::Vertex_handle Vertex_handle;
+
+ // read current dimension and number of vertices
+ size_t n;
+ int cd;
+ if( is_ascii(is) )
+ is >> cd >> n;
+ else
+ {
+ read(is, cd);
+ read(is, n, io_Read_write());
+ }
+
+ CGAL_assertion_msg( cd <= tr.maximal_dimension(), "input Triangulation has too high dimension");
+
+ tr.clear();
+ tr.set_current_dimension(cd);
+
+ if( n == 0 )
+ return is;
+
+ std::vector<Vertex_handle> vertices;
+ vertices.resize(n+1);
+ vertices[0] = tr.infinite_vertex();
+ is >> (*vertices[0]);
+
+ // read the vertices:
+ size_t i(1);
+ while( i <= n )
+ {
+ vertices[i] = tr.new_vertex();
+ is >> (*vertices[i]); // read a vertex
+ ++i;
+ }
+
+ // now, read the combinatorial information
+ return tr.tds().read_full_cells(is, vertices);
+}
+
+template < class TT, class TDS >
+std::ostream &
+operator<<(std::ostream & os, const Triangulation<TT, TDS> & tr)
+ // writes :
+ // - the dimensions (maximal and current)
+ // - the number of finite vertices
+ // - the non combinatorial information on vertices (point, etc)
+ // - the number of full_cells
+ // - the full_cells by the indices of their vertices in the preceding list
+ // of vertices, plus the non combinatorial information on each full_cell
+ // - the neighbors of each full_cell by their index in the preceding list
+{
+ typedef Triangulation<TT, TDS> T;
+ typedef typename T::Vertex_const_handle Vertex_handle;
+ typedef typename T::Vertex_const_iterator Vertex_iterator;
+
+ // outputs dimensions and number of vertices
+ size_t n = tr.number_of_vertices();
+ if( is_ascii(os) )
+ os << tr.current_dimension() << std::endl << n << std::endl;
+ else
+ {
+ write(os, tr.current_dimension());
+ write(os, n, io_Read_write());
+ }
+
+ if( n == 0 )
+ return os;
+
+ size_t i(0);
+ // write the vertices
+ std::map<Vertex_handle, int> index_of_vertex;
+
+ // infinite vertex has index 0 (among all the vertices)
+ index_of_vertex[tr.infinite_vertex()] = i++;
+ os << *tr.infinite_vertex();
+ for( Vertex_iterator it = tr.vertices_begin(); it != tr.vertices_end(); ++it )
+ {
+ if( tr.is_infinite(it) )
+ continue;
+ os << *it; // write the vertex
+ index_of_vertex[it] = i++;
+ }
+ CGAL_assertion( i == n+1 );
+
+ // output the combinatorial information
+ return tr.tds().write_full_cells(os, index_of_vertex);
+}
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h b/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h
new file mode 100644
index 00000000..2493c712
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h
@@ -0,0 +1,1603 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_DATA_STRUCTURE_H
+#define CGAL_TRIANGULATION_DATA_STRUCTURE_H
+
+#include <CGAL/basic.h>
+#include <CGAL/Default.h>
+#include <CGAL/iterator.h>
+#include <CGAL/Compact_container.h>
+#include <CGAL/Triangulation_face.h>
+#include <CGAL/Triangulation_ds_vertex.h>
+#include <CGAL/Triangulation_ds_full_cell.h>
+#include <CGAL/internal/Combination_enumerator.h>
+#include <CGAL/internal/Triangulation/utilities.h>
+#include <CGAL/internal/Triangulation/Triangulation_ds_iterators.h>
+
+#include <algorithm>
+#include <vector>
+#include <queue>
+#include <set>
+
+namespace CGAL {
+
+template< class Dimen,
+ class Vb = Default,
+ class Fcb = Default >
+class Triangulation_data_structure
+{
+ typedef Triangulation_data_structure<Dimen, Vb, Fcb> Self;
+ typedef typename Default::Get<Vb, Triangulation_ds_vertex<> >::type V_base;
+ typedef typename Default::Get<Fcb, Triangulation_ds_full_cell<> >::type FC_base;
+
+public:
+ typedef typename V_base::template Rebind_TDS<Self>::Other Vertex; /* Concept */
+ typedef typename FC_base::template Rebind_TDS<Self>::Other Full_cell; /* Concept */
+
+ // Tools to change the Vertex and Cell types of the TDS.
+ template < typename Vb2 >
+ struct Rebind_vertex {
+ typedef Triangulation_data_structure<Dimen, Vb2, Fcb> Other;
+ };
+
+ template < typename Fcb2 >
+ struct Rebind_full_cell {
+ typedef Triangulation_data_structure<Dimen, Vb, Fcb2> Other;
+ };
+
+
+
+ // we want to store an object of this class in every Full_cell:
+ class Full_cell_data
+ {
+ unsigned char bits_;
+ public:
+ Full_cell_data() : bits_(0) {}
+ Full_cell_data(const Full_cell_data & fcd) : bits_(fcd.bits_) {}
+
+ void clear() { bits_ = 0; }
+ void mark_visited() { bits_ = 1; }
+ void clear_visited() { bits_ = 0; }
+
+ bool is_clear() const { return bits_ == 0; }
+ bool is_visited() const { return bits_ == 1; }
+ // WARNING: if we use more bits and several bits can be set at once,
+ // then make sure to use bitwise operation above, instead of direct
+ // affectation.
+ };
+
+protected:
+ typedef Compact_container<Vertex> Vertex_container;
+ typedef Compact_container<Full_cell> Full_cell_container;
+
+public:
+ typedef Dimen Maximal_dimension;
+
+ typedef typename Vertex_container::size_type size_type; /* Concept */
+ typedef typename Vertex_container::difference_type difference_type; /* Concept */
+
+ typedef typename Vertex_container::iterator Vertex_handle; /* Concept */
+ typedef typename Vertex_container::iterator Vertex_iterator; /* Concept */
+ typedef typename Vertex_container::const_iterator Vertex_const_handle;
+ typedef typename Vertex_container::const_iterator Vertex_const_iterator;
+
+ typedef typename Full_cell_container::iterator Full_cell_handle; /* Concept */
+ typedef typename Full_cell_container::iterator Full_cell_iterator; /* Concept */
+ typedef typename Full_cell_container::const_iterator Full_cell_const_handle;
+ typedef typename Full_cell_container::const_iterator Full_cell_const_iterator;
+
+ typedef internal::Triangulation::
+ Triangulation_ds_facet_iterator<Self> Facet_iterator; /* Concept */
+
+ /* The 2 types defined below, |Facet| and |Rotor| are used when traversing
+ the boundary `B' of the union of a set of full cells. |Rotor| makes it
+ easy to rotate around itself, in the search of neighbors in `B' (see
+ |rotate_rotor| and |insert_in_tagged_hole|) */
+
+ // A co-dimension 1 sub-simplex.
+ class Facet /* Concept */
+ {
+ Full_cell_handle full_cell_;
+ int index_of_covertex_;
+ public:
+ Facet() : full_cell_(), index_of_covertex_(0) {}
+ Facet(Full_cell_handle f, int i) : full_cell_(f), index_of_covertex_(i) {}
+ Full_cell_handle full_cell() const { return full_cell_; }
+ int index_of_covertex() const { return index_of_covertex_; }
+ };
+
+ // A co-dimension 2 sub-simplex. called a Rotor because we can rotate
+ // the two "covertices" around the sub-simplex. Useful for traversing the
+ // boundary of a hole. NOT DOCUMENTED
+ class Rotor : public Facet
+ {
+ int index_of_second_covertex_;
+ public:
+ Rotor() : Facet(), index_of_second_covertex_(0) {}
+ Rotor(Full_cell_handle f, int first, int second) : Facet(f, first), index_of_second_covertex_(second) {}
+ int index_of_second_covertex() const { return index_of_second_covertex_; }
+ };
+
+ typedef Triangulation_face<Self> Face; /* Concept */
+
+protected: // DATA MEMBERS
+
+ int dmax_, dcur_; // dimension of the current triangulation
+ Vertex_container vertices_; // list of all vertices
+ Full_cell_container full_cells_; // list of all full cells
+
+private:
+
+ void clean_dynamic_memory()
+ {
+ vertices_.clear();
+ full_cells_.clear();
+ }
+
+ template < class Dim_tag >
+ struct get_maximal_dimension
+ {
+ static int value(int D) { return D; }
+ };
+ // specialization
+ template < int D >
+ struct get_maximal_dimension<Dimension_tag<D> >
+ {
+ static int value(int) { return D; }
+ };
+
+public:
+ Triangulation_data_structure( int dim=0) /* Concept */
+ : dmax_(get_maximal_dimension<Dimen>::value(dim)), dcur_(-2),
+ vertices_(), full_cells_()
+ {
+ CGAL_assertion_msg(dmax_ > 0, "maximal dimension must be positive.");
+ }
+
+ ~Triangulation_data_structure()
+ {
+ clean_dynamic_memory();
+ }
+
+ Triangulation_data_structure(const Triangulation_data_structure & tds)
+ : dmax_(tds.dmax_), dcur_(tds.dcur_),
+ vertices_(tds.vertices_), full_cells_(tds.full_cells_)
+ {
+ typedef std::map<Vertex_const_handle, Vertex_handle> V_map;
+ typedef std::map<Full_cell_const_handle, Full_cell_handle> C_map;
+ V_map vmap;
+ C_map cmap;
+ Vertex_const_iterator vfrom = tds.vertices_begin();
+ Vertex_iterator vto = vertices_begin();
+ Full_cell_const_iterator cfrom = tds.full_cells_begin();
+ Full_cell_iterator cto = full_cells_begin();
+ while( vfrom != tds.vertices_end() )
+ vmap[vfrom++] = vto++;
+ while( cfrom != tds.full_cells_end() )
+ cmap[cfrom++] = cto++;
+ cto = full_cells_begin();
+ while( cto != full_cells_end() )
+ {
+ for( int i = 0; i <= (std::max)(0, current_dimension()); ++i )
+ {
+ associate_vertex_with_full_cell(cto, i, vmap[cto->vertex(i)]);
+ cto->set_neighbor(i, cmap[cto->neighbor(i)]);
+ }
+ ++cto;
+ }
+ }
+
+ // QUERIES
+
+protected:
+
+ bool check_range(int i) const
+ {
+ if( current_dimension() < 0 )
+ {
+ return (0 == i);
+ }
+ return ( (0 <= i) && (i <= current_dimension()) );
+ }
+
+public:
+
+ /* returns the current dimension of the full cells in the triangulation. */
+ int maximal_dimension() const { return dmax_; } /* Concept */
+ int current_dimension() const { return dcur_; } /* Concept */
+
+ size_type number_of_vertices() const /* Concept */
+ {
+ return this->vertices_.size();
+ }
+ size_type number_of_full_cells() const /* Concept */
+ {
+ return this->full_cells_.size();
+ }
+
+ bool empty() const /* Concept */
+ {
+ return current_dimension() == -2;
+ }
+
+ Vertex_container & vertices() { return vertices_; }
+ const Vertex_container & vertices() const { return vertices_; }
+ Full_cell_container & full_cells() { return full_cells_; }
+ const Full_cell_container & full_cells() const { return full_cells_; }
+
+ Vertex_handle vertex(Full_cell_handle s, int i) const /* Concept */
+ {
+ CGAL_precondition(s != Full_cell_handle() && check_range(i));
+ return s->vertex(i);
+ }
+
+ Vertex_const_handle vertex(Full_cell_const_handle s, int i) const /* Concept */
+ {
+ CGAL_precondition(s != Full_cell_handle() && check_range(i));
+ return s->vertex(i);
+ }
+
+ bool is_vertex(Vertex_const_handle v) const /* Concept */
+ {
+ if( Vertex_const_handle() == v )
+ return false;
+ Vertex_const_iterator vit = vertices_begin();
+ while( vit != vertices_end() && ( v != vit ) )
+ ++vit;
+ return v == vit;
+ }
+
+ bool is_full_cell(Full_cell_const_handle s) const /* Concept */
+ {
+ if( Full_cell_const_handle() == s )
+ return false;
+ Full_cell_const_iterator sit = full_cells_begin();
+ while( sit != full_cells_end() && ( s != sit ) )
+ ++sit;
+ return s == sit;
+ }
+
+ Full_cell_handle full_cell(Vertex_handle v) const /* Concept */
+ {
+ CGAL_precondition(v != Vertex_handle());
+ return v->full_cell();
+ }
+
+ Full_cell_const_handle full_cell(Vertex_const_handle v) const /* Concept */
+ {
+ CGAL_precondition(Vertex_const_handle() != v);
+ return v->full_cell();
+ }
+
+ Full_cell_handle neighbor(Full_cell_handle s, int i) const /* Concept */
+ {
+ CGAL_precondition(Full_cell_handle() != s && check_range(i));
+ return s->neighbor(i);
+ }
+
+ Full_cell_const_handle neighbor(Full_cell_const_handle s, int i) const/* Concept */
+ {
+ CGAL_precondition(Full_cell_const_handle() != s && check_range(i));
+ return s->neighbor(i);
+ }
+
+ int mirror_index(Full_cell_handle s, int i) const /* Concept */
+ {
+ CGAL_precondition(Full_cell_handle() != s && check_range(i));
+ return s->mirror_index(i);
+ }
+
+ int mirror_index(Full_cell_const_handle s, int i) const
+ {
+ CGAL_precondition(Full_cell_const_handle() != s && check_range(i)); /* Concept */
+ return s->mirror_index(i);
+ }
+
+ int mirror_vertex(Full_cell_handle s, int i) const /* Concept */
+ {
+ CGAL_precondition(Full_cell_handle() != s && check_range(i));
+ return s->mirror_vertex(i);
+ }
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - FACETS OPERATIONS
+
+ // works for Face_ = Facet and Face_ = Rotor.
+ // NOT DOCUMENTED for the Rotor case...
+ template< typename Face_ >
+ Full_cell_handle full_cell(const Face_ & f) const /* Concept */
+ {
+ return f.full_cell();
+ }
+
+ // works for Face_ = Facet and Face_ = Rotor.
+ // NOT DOCUMENTED for the Rotor case...
+ template< class Face_ >
+ int index_of_covertex(const Face_ & f) const /* Concept */
+ {
+ return f.index_of_covertex();
+ }
+
+ // NOT DOCUMENTED
+ // A Rotor has two covertices
+ int index_of_second_covertex(const Rotor & f) const
+ {
+ return f.index_of_second_covertex();
+ }
+
+ // works for Face_ = Facet and Face_ = Rotor.
+ // NOT DOCUMENTED...
+ template< class Face_ >
+ bool is_boundary_facet(const Face_ & f) const
+ {
+ if( get_visited(neighbor(full_cell(f), index_of_covertex(f))) )
+ return false;
+ if( ! get_visited(full_cell(f)) )
+ return false;
+ return true;
+ }
+
+ // NOT DOCUMENTED...
+ Rotor rotate_rotor(Rotor & f)
+ {
+ int opposite = mirror_index(full_cell(f), index_of_covertex(f));
+ Full_cell_handle s = neighbor(full_cell(f), index_of_covertex(f));
+ int new_second = s->index(vertex(full_cell(f), index_of_second_covertex(f)));
+ return Rotor(s, new_second, opposite);
+ }
+
+ // NICE UPDATE OPERATIONS
+
+protected:
+ void do_insert_increase_dimension(Vertex_handle, Vertex_handle);
+public:
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS
+
+ Vertex_handle collapse_face(const Face &); /* Concept */
+ void remove_decrease_dimension(Vertex_handle, Vertex_handle); /* Concept */
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS
+
+ Vertex_handle insert_in_full_cell(Full_cell_handle); /* Concept */
+ Vertex_handle insert_in_face(const Face &); /* Concept */
+ Vertex_handle insert_in_facet(const Facet &); /* Concept */
+ template< typename Forward_iterator >
+ Vertex_handle insert_in_hole(Forward_iterator, Forward_iterator, Facet); /* Concept */
+ template< typename Forward_iterator, typename OutputIterator >
+ Vertex_handle insert_in_hole(Forward_iterator, Forward_iterator, Facet, OutputIterator); /* Concept */
+
+ template< typename OutputIterator >
+ Full_cell_handle insert_in_tagged_hole(Vertex_handle, Facet, OutputIterator);
+
+ Vertex_handle insert_increase_dimension(Vertex_handle=Vertex_handle()); /* Concept */
+
+private:
+
+ // Used by insert_in_tagged_hole
+ struct IITH_task
+ {
+ IITH_task(
+ Facet boundary_facet_,
+ int index_of_inside_cell_in_outside_cell_,
+ Full_cell_handle future_neighbor_ = Full_cell_handle(),
+ int new_cell_index_in_future_neighbor_ = -1,
+ int index_of_future_neighbor_in_new_cell_ = -1)
+ : boundary_facet(boundary_facet_),
+ index_of_inside_cell_in_outside_cell(index_of_inside_cell_in_outside_cell_),
+ future_neighbor(future_neighbor_),
+ new_cell_index_in_future_neighbor(new_cell_index_in_future_neighbor_),
+ index_of_future_neighbor_in_new_cell(index_of_future_neighbor_in_new_cell_)
+ {}
+
+ // "new_cell" is the cell about to be created
+ Facet boundary_facet;
+ int index_of_inside_cell_in_outside_cell;
+ Full_cell_handle future_neighbor;
+ int new_cell_index_in_future_neighbor;
+ int index_of_future_neighbor_in_new_cell;
+ };
+
+ // NOT DOCUMENTED
+ void clear_visited_marks(Full_cell_handle) const;
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - DANGEROUS UPDATE OPERATIONS
+
+private:
+
+ // NOT DOCUMENTED
+ template< typename FCH > // FCH = Full_cell_[const_]handle
+ bool get_visited(FCH c) const
+ {
+ return c->tds_data().is_visited();
+ }
+
+ // NOT DOCUMENTED
+ template< typename FCH > // FCH = Full_cell_[const_]handle
+ void set_visited(FCH c, bool m) const
+ {
+ if( m )
+ c->tds_data().mark_visited();
+ else
+ c->tds_data().clear_visited();
+ }
+
+public:
+
+ void clear() /* Concept */
+ {
+ clean_dynamic_memory();
+ dcur_ = -2;
+ }
+
+ void set_current_dimension(int d) /* Concept */
+ {
+ CGAL_precondition(-2<=d && d<=maximal_dimension());
+ dcur_ = d;
+ }
+
+ Full_cell_handle new_full_cell(Full_cell_handle s)
+ {
+ return full_cells_.emplace(*s);
+ }
+
+ Full_cell_handle new_full_cell() /* Concept */
+ {
+ return full_cells_.emplace(dmax_);
+ }
+
+ void delete_full_cell(Full_cell_handle s) /* Concept */
+ {
+ CGAL_precondition(Full_cell_handle() != s);
+ // CGAL_expensive_precondition(is_full_cell(s));
+ full_cells_.erase(s);
+ }
+
+ template< typename Forward_iterator >
+ void delete_full_cells(Forward_iterator start, Forward_iterator end) /* Concept */
+ {
+ Forward_iterator s = start;
+ while( s != end )
+ full_cells_.erase(*s++);
+ }
+
+ template< class T >
+ Vertex_handle new_vertex( const T & t )
+ {
+ return vertices_.emplace(t);
+ }
+
+ Vertex_handle new_vertex() /* Concept */
+ {
+ return vertices_.emplace();
+ }
+
+ void delete_vertex(Vertex_handle v) /* Concept */
+ {
+ CGAL_precondition( Vertex_handle() != v );
+ vertices_.erase(v);
+ }
+
+ void associate_vertex_with_full_cell(Full_cell_handle s, int i, Vertex_handle v) /* Concept */
+ {
+ CGAL_precondition(check_range(i));
+ CGAL_precondition(s != Full_cell_handle());
+ CGAL_precondition(v != Vertex_handle());
+ s->set_vertex(i, v);
+ v->set_full_cell(s);
+ }
+
+ void set_neighbors(Full_cell_handle s, int i, Full_cell_handle s1, int j) /* Concept */
+ {
+ CGAL_precondition(check_range(i));
+ CGAL_precondition(check_range(j));
+ CGAL_precondition(s != Full_cell_handle());
+ CGAL_precondition(s1 != Full_cell_handle());
+ s->set_neighbor(i, s1);
+ s1->set_neighbor(j, s);
+ s->set_mirror_index(i, j);
+ s1->set_mirror_index(j, i);
+ }
+
+ // SANITY CHECKS
+
+ bool is_valid(bool = true, int = 0) const; /* Concept */
+
+ // NOT DOCUMENTED
+ template< class OutStream> void write_graph(OutStream &);
+
+ Vertex_iterator vertices_begin() { return vertices_.begin(); } /* Concept */
+ Vertex_iterator vertices_end() { return vertices_.end(); } /* Concept */
+ Full_cell_iterator full_cells_begin() { return full_cells_.begin(); } /* Concept */
+ Full_cell_iterator full_cells_end() { return full_cells_.end(); } /* Concept */
+
+ Vertex_const_iterator vertices_begin() const { return vertices_.begin(); } /* Concept */
+ Vertex_const_iterator vertices_end() const { return vertices_.end(); } /* Concept */
+ Full_cell_const_iterator full_cells_begin() const { return full_cells_.begin(); } /* Concept */
+ Full_cell_const_iterator full_cells_end() const { return full_cells_.end(); } /* Concept */
+
+ Facet_iterator facets_begin() /* Concept */
+ {
+ if( current_dimension() <= 0 )
+ return facets_end();
+ return Facet_iterator(*this);
+ }
+ Facet_iterator facets_end() /* Concept */
+ {
+ return Facet_iterator(*this, 0);
+ }
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - FULL CELL GATHERING
+
+ // a traversal predicate for gathering full_cells incident to a given face
+ // ``incident'' means that the given face is a subface of the full_cell
+ class Incident_full_cell_traversal_predicate
+ {
+ const Face & f_;
+ int dim_;
+ const Triangulation_data_structure & tds_;
+ public:
+ Incident_full_cell_traversal_predicate(const Triangulation_data_structure & tds,
+ const Face & f)
+ : f_(f), tds_(tds)
+ {
+ dim_ = f.face_dimension();
+ }
+ bool operator()(const Facet & facet) const
+ {
+ Vertex_handle v = tds_.full_cell(facet)->vertex(tds_.index_of_covertex(facet));
+ for( int i = 0; i <= dim_; ++i )
+ {
+ if( v == f_.vertex(i) )
+ return false;
+ }
+ return true;
+ }
+ };
+
+ // a traversal predicate for gathering full_cells having a given face as subface
+ class Star_traversal_predicate
+ {
+ const Face & f_;
+ int dim_;
+ const Triangulation_data_structure & tds_;
+ public:
+ Star_traversal_predicate(const Triangulation_data_structure & tds,
+ const Face & f)
+ : f_(f), tds_(tds)
+ {
+ dim_ = f.face_dimension();
+ }
+ bool operator()(const Facet & facet) const
+ {
+ Full_cell_handle s = tds_.full_cell(facet)->neighbor(tds_.index_of_covertex(facet));
+ for( int j = 0; j <= tds_.current_dimension(); ++j )
+ {
+ for( int i = 0; i <= dim_; ++i )
+ if( s->vertex(j) == f_.vertex(i) )
+ return true;
+ }
+ return false;
+ }
+ };
+
+ template< typename TraversalPredicate, typename OutputIterator >
+ Facet gather_full_cells(Full_cell_handle, TraversalPredicate &, OutputIterator &) const; /* Concept */
+ template< typename OutputIterator >
+ OutputIterator incident_full_cells(const Face &, OutputIterator) const; /* Concept */
+ template< typename OutputIterator >
+ OutputIterator incident_full_cells(Vertex_const_handle, OutputIterator) const; /* Concept */
+ template< typename OutputIterator >
+ OutputIterator star(const Face &, OutputIterator) const; /* Concept */
+#ifndef CGAL_CFG_NO_CPP0X_DEFAULT_TEMPLATE_ARGUMENTS_FOR_FUNCTION_TEMPLATES
+ template< typename OutputIterator, typename Comparator = std::less<Vertex_const_handle> >
+ OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp = Comparator())
+ {
+ return incident_faces(v, dim, out, cmp, true);
+ }
+ template< typename OutputIterator, typename Comparator = std::less<Vertex_const_handle> >
+ OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, Comparator = Comparator(), bool = false) const;
+#else
+ template< typename OutputIterator, typename Comparator >
+ OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp = Comparator())
+ {
+ return incident_faces(v, dim, out, cmp, true);
+ }
+ template< typename OutputIterator >
+ OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out)
+ {
+ return incident_faces(v, dim, out, std::less<Vertex_const_handle>(), true);
+ }
+ template< typename OutputIterator, typename Comparator >
+ OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, Comparator = Comparator(), bool = false) const;
+ template< typename OutputIterator >
+ OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator,
+ std::less<Vertex_const_handle> = std::less<Vertex_const_handle>(), bool = false) const;
+#endif
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INPUT / OUTPUT
+
+ std::istream & read_full_cells(std::istream &, const std::vector<Vertex_handle> &);
+ std::ostream & write_full_cells(std::ostream &, std::map<Vertex_const_handle, int> &) const;
+
+}; // end of ``declaration/definition'' of Triangulation_data_structure<...>
+
+// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
+
+// FUNCTIONS THAT ARE MEMBER FUNCTIONS:
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - THE GATHERING METHODS
+
+template< class Dim, class Vb, class Fcb >
+template< typename OutputIterator >
+OutputIterator
+Triangulation_data_structure<Dim, Vb, Fcb>
+::incident_full_cells(const Face & f, OutputIterator out) const /* Concept */
+{
+ // CGAL_expensive_precondition_msg(is_full_cell(f.full_cell()), "the facet does not belong to the Triangulation");
+ Incident_full_cell_traversal_predicate tp(*this, f);
+ gather_full_cells(f.full_cell(), tp, out);
+ return out;
+}
+
+template< class Dim, class Vb, class Fcb >
+template< typename OutputIterator >
+OutputIterator
+Triangulation_data_structure<Dim, Vb, Fcb>
+::incident_full_cells(Vertex_const_handle v, OutputIterator out) const /* Concept */
+{
+// CGAL_expensive_precondition(is_vertex(v));
+ CGAL_precondition(Vertex_handle() != v);
+ Face f(v->full_cell());
+ f.set_index(0, v->full_cell()->index(v));
+ return incident_full_cells(f, out);
+}
+
+template< class Dim, class Vb, class Fcb >
+template< typename OutputIterator >
+OutputIterator
+Triangulation_data_structure<Dim, Vb, Fcb>
+::star(const Face & f, OutputIterator out) const /* Concept */
+{
+ // CGAL_precondition_msg(is_full_cell(f.full_cell()), "the facet does not belong to the Triangulation");
+ Star_traversal_predicate tp(*this, f);
+ gather_full_cells(f.full_cell(), tp, out);
+ return out;
+}
+
+template< class Dim, class Vb, class Fcb >
+template< typename TraversalPredicate, typename OutputIterator >
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Facet
+Triangulation_data_structure<Dim, Vb, Fcb>
+::gather_full_cells(Full_cell_handle start,
+ TraversalPredicate & tp,
+ OutputIterator & out) const /* Concept */
+{
+ std::queue<Full_cell_handle> queue;
+ set_visited(start, true);
+ queue.push(start);
+ const int cur_dim = current_dimension();
+ Facet ft;
+ while( ! queue.empty() )
+ {
+ Full_cell_handle s = queue.front();
+ queue.pop();
+ *out = s;
+ ++out;
+ for( int i = 0; i <= cur_dim; ++i )
+ {
+ Full_cell_handle n = s->neighbor(i);
+ if( ! get_visited(n) )
+ {
+ set_visited(n, true);
+ if( tp(Facet(s, i)) )
+ queue.push(n);
+ else
+ ft = Facet(s, i);
+ }
+ }
+ }
+ clear_visited_marks(start);
+ return ft;
+}
+
+#ifdef CGAL_CFG_NO_CPP0X_DEFAULT_TEMPLATE_ARGUMENTS_FOR_FUNCTION_TEMPLATES
+template< class Dim, class Vb, class Fcb >
+template< typename OutputIterator >
+OutputIterator
+Triangulation_data_structure<Dim, Vb, Fcb>
+::incident_faces(Vertex_const_handle v, int dim, OutputIterator out,
+ std::less<Vertex_const_handle> cmp, bool upper_faces) const
+{
+ return incident_faces<OutputIterator, std::less<Vertex_const_handle> >(v, dim, out, cmp, upper_faces);
+}
+#endif
+
+template< class Dim, class Vb, class Fcb >
+template< typename OutputIterator, typename Comparator >
+OutputIterator
+Triangulation_data_structure<Dim, Vb, Fcb>
+::incident_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp, bool upper_faces) const
+{
+ CGAL_precondition( 0 < dim );
+ if( dim >= current_dimension() )
+ return out;
+ typedef std::vector<Full_cell_handle> Simplices;
+ Simplices simps;
+ simps.reserve(64);
+ // gather incident full_cells
+ std::back_insert_iterator<Simplices> sout(simps);
+ incident_full_cells(v, sout);
+ // for storing the handles to the vertices of a full_cell
+ typedef std::vector<Vertex_const_handle> Vertices;
+ typedef std::vector<int> Indices;
+ Vertices vertices(1 + current_dimension());
+ Indices sorted_idx(1 + current_dimension());
+ // setup Face comparator and Face_set
+ typedef internal::Triangulation::Compare_faces_with_common_first_vertex<Self>
+ Upper_face_comparator;
+ Upper_face_comparator ufc(dim);
+ typedef std::set<Face, Upper_face_comparator> Face_set;
+ Face_set face_set(ufc);
+ for( typename Simplices::const_iterator s = simps.begin(); s != simps.end(); ++s )
+ {
+ int v_idx(0); // the index of |v| in the sorted full_cell
+ // get the vertices of the full_cell and sort them
+ for( int i = 0; i <= current_dimension(); ++i )
+ vertices[i] = (*s)->vertex(i);
+ if( upper_faces )
+ {
+ std::sort(vertices.begin(), vertices.end(), cmp);
+ while( vertices[v_idx] != v )
+ ++v_idx;
+ }
+ else
+ {
+ while( vertices[v_idx] != v )
+ ++v_idx;
+ if( 0 != v_idx )
+ std::swap(vertices[0], vertices[v_idx]);
+ v_idx = 0;
+ typename Vertices::iterator vbegin(vertices.begin());
+ ++vbegin;
+ std::sort(vbegin, vertices.end(), cmp);
+ }
+ if( v_idx + dim > current_dimension() )
+ continue; // |v| is too far to the right
+ // stores the index of the vertices of s in the same order
+ // as in |vertices|:
+ for( int i = 0; i <= current_dimension(); ++i )
+ sorted_idx[i] = (*s)->index(vertices[i]);
+ // init state for enumerating all candidate faces:
+ internal::Combination_enumerator f_idx(dim, v_idx + 1, current_dimension());
+ Face f(*s);
+ f.set_index(0, sorted_idx[v_idx]);
+ while( ! f_idx.end() )
+ {
+ for( int i = 0; i < dim; ++i )
+ f.set_index(1 + i, sorted_idx[f_idx[i]]);
+ face_set.insert(f); // checks if face has already been found
+
+ // compute next sorted face (lexicographic enumeration)
+ ++f_idx;
+ }
+ }
+ typename Face_set::iterator fit = face_set.begin();
+ while( fit != face_set.end() )
+ *out++ = *fit++;
+ return out;
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - THE REMOVAL METHODS
+
+template <class Dim, class Vb, class Fcb>
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::collapse_face(const Face & f) /* Concept */
+{
+ const int fd = f.face_dimension();
+ CGAL_precondition( (1 <= fd ) && (fd < current_dimension()));
+ std::vector<Full_cell_handle> simps;
+ // save the Face's vertices:
+ Full_cell s;
+ for( int i = 0; i <= fd; ++i )
+ s.set_vertex(i, f.vertex(i));
+ // compute the star of f
+ simps.reserve(64);
+ std::back_insert_iterator<std::vector<Full_cell_handle> > out(simps);
+ star(f, out);
+ Vertex_handle v = insert_in_hole(simps.begin(), simps.end(), Facet(f.full_cell(), f.index(0)));
+ for( int i = 0; i <= fd; ++i )
+ delete_vertex(s.vertex(i));
+ return v;
+}
+
+template <class Dim, class Vb, class Fcb>
+void
+Triangulation_data_structure<Dim, Vb, Fcb>
+::remove_decrease_dimension(Vertex_handle v, Vertex_handle star) /* Concept */
+{
+ CGAL_assertion( current_dimension() >= -1 );
+ if( -1 == current_dimension() )
+ {
+ clear();
+ return;
+ }
+ else if( 0 == current_dimension() )
+ {
+ delete_full_cell(v->full_cell());
+ delete_vertex(v);
+ star->full_cell()->set_neighbor(0, Full_cell_handle());
+ set_current_dimension(-1);
+ return;
+ }
+ else if( 1 == current_dimension() )
+ {
+ Full_cell_handle s = v->full_cell();
+ int star_index;
+ if( s->has_vertex(star, star_index) )
+ s = s->neighbor(star_index);
+ // Here, |star| is not a vertex of |s|, so it's the only finite
+ // full_cell
+ Full_cell_handle inf1 = s->neighbor(0);
+ Full_cell_handle inf2 = s->neighbor(1);
+ Vertex_handle v2 = s->vertex(1 - s->index(v));
+ delete_vertex(v);
+ delete_full_cell(s);
+ inf1->set_vertex(1, Vertex_handle());
+ inf1->set_vertex(1, Vertex_handle());
+ inf2->set_neighbor(1, Full_cell_handle());
+ inf2->set_neighbor(1, Full_cell_handle());
+ associate_vertex_with_full_cell(inf1, 0, star);
+ associate_vertex_with_full_cell(inf2, 0, v2);
+ set_neighbors(inf1, 0, inf2, 0);
+ set_current_dimension(0);
+ return;
+ }
+ typedef std::vector<Full_cell_handle> Simplices;
+ Simplices simps;
+ incident_full_cells(v, std::back_inserter(simps));
+ for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it )
+ {
+ int v_idx = (*it)->index(v);
+ if( ! (*it)->has_vertex(star) )
+ {
+ delete_full_cell((*it)->neighbor(v_idx));
+ for( int i = 0; i <= current_dimension(); ++i )
+ (*it)->vertex(i)->set_full_cell(*it);
+ }
+ else
+ star->set_full_cell(*it);
+ if( v_idx != current_dimension() )
+ {
+ (*it)->swap_vertices(v_idx, current_dimension());
+ (*it)->swap_vertices(current_dimension() - 2, current_dimension() - 1);
+ }
+ (*it)->set_vertex(current_dimension(), Vertex_handle());
+ (*it)->set_neighbor(current_dimension(), Full_cell_handle());
+ }
+ set_current_dimension(current_dimension()-1);
+ delete_vertex(v);
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - THE INSERTION METHODS
+
+template <class Dim, class Vb, class Fcb>
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_in_full_cell(Full_cell_handle s) /* Concept */
+{
+ CGAL_precondition(0 < current_dimension());
+ CGAL_precondition(Full_cell_handle() != s);
+ // CGAL_expensive_precondition(is_full_cell(s));
+
+ const int cur_dim = current_dimension();
+ Vertex_handle v = new_vertex();
+ // the full_cell 'fc' is just used to store the handle to all the new full_cells.
+ Full_cell fc(maximal_dimension());
+ for( int i = 1; i <= cur_dim; ++i )
+ {
+ Full_cell_handle new_s = new_full_cell(s);
+ fc.set_neighbor(i, new_s);
+ associate_vertex_with_full_cell(new_s, i, v);
+ s->vertex(i-1)->set_full_cell(new_s);
+ set_neighbors(new_s, i, neighbor(s, i), mirror_index(s, i));
+ }
+ fc.set_neighbor(0, s);
+ associate_vertex_with_full_cell(s, 0, v);
+ for( int i = 0; i <= cur_dim; ++i )
+ for( int j = 0; j <= cur_dim; ++j )
+ {
+ if( j == i ) continue;
+ set_neighbors(fc.neighbor(i), j, fc.neighbor(j), i);
+ }
+ return v;
+}
+
+template <class Dim, class Vb, class Fcb >
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_in_face(const Face & f) /* Concept */
+{
+ std::vector<Full_cell_handle> simps;
+ simps.reserve(64);
+ std::back_insert_iterator<std::vector<Full_cell_handle> > out(simps);
+ incident_full_cells(f, out);
+ return insert_in_hole(simps.begin(), simps.end(), Facet(f.full_cell(), f.index(0)));
+}
+template <class Dim, class Vb, class Fcb >
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_in_facet(const Facet & ft) /* Concept */
+{
+ Full_cell_handle s[2];
+ s[0] = full_cell(ft);
+ int i = index_of_covertex(ft);
+ s[1] = s[0]->neighbor(i);
+ i = ( i + 1 ) % current_dimension();
+ return insert_in_hole(s, s+2, Facet(s[0], i));
+}
+
+template <class Dim, class Vb, class Fcb >
+template < typename OutputIterator >
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Full_cell_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_in_tagged_hole(Vertex_handle v, Facet f,
+ OutputIterator new_full_cells)
+{
+ CGAL_assertion_msg(is_boundary_facet(f), "starting facet should be on the hole boundary");
+
+ const int cur_dim = current_dimension();
+ Full_cell_handle new_s;
+
+ std::queue<IITH_task> task_queue;
+ task_queue.push(
+ IITH_task(f, mirror_index(full_cell(f), index_of_covertex(f))) );
+
+ while (!task_queue.empty())
+ {
+ IITH_task task = task_queue.front();
+ task_queue.pop();
+
+ Full_cell_handle old_s = full_cell(task.boundary_facet);
+ const int facet_index = index_of_covertex(task.boundary_facet);
+
+ Full_cell_handle outside_neighbor = neighbor(old_s, facet_index);
+ // Here, "new_s" might actually be a new cell, but it might also be "old_s"
+ // if it has not been treated already in the meantime
+ new_s = neighbor(outside_neighbor, task.index_of_inside_cell_in_outside_cell);
+ // If the cell has not been treated yet
+ if (old_s == new_s)
+ {
+ new_s = new_full_cell();
+
+ int i(0);
+ for ( ; i < facet_index ; ++i)
+ associate_vertex_with_full_cell(new_s, i, old_s->vertex(i));
+ ++i; // skip facet_index
+ for ( ; i <= cur_dim ; ++i)
+ associate_vertex_with_full_cell(new_s, i, old_s->vertex(i));
+ associate_vertex_with_full_cell(new_s, facet_index, v);
+ set_neighbors(new_s,
+ facet_index,
+ outside_neighbor,
+ mirror_index(old_s, facet_index));
+
+ // add the new full_cell to the list of new full_cells
+ *new_full_cells++ = new_s;
+
+ // check all of |Facet f|'s neighbors
+ for (i = 0 ; i <= cur_dim ; ++i)
+ {
+ if (facet_index == i)
+ continue;
+ // we define a |Rotor| because it makes it easy to rotate around
+ // in a self contained fashion. The corresponding potential
+ // boundary facet is Facet(full_cell(rot), index_of_covertex(rot))
+ Rotor rot(old_s, i, facet_index);
+ // |rot| on line above, stands for Candidate Facet
+ while (!is_boundary_facet(rot))
+ rot = rotate_rotor(rot);
+
+ // we did find the |i|-th neighbor of Facet(old_s, facet_index)...
+ // has it already been extruded to center point |v| ?
+ Full_cell_handle inside = full_cell(rot);
+ Full_cell_handle outside = neighbor(inside, index_of_covertex(rot));
+ // "m" is the vertex of outside which is not on the boundary
+ Vertex_handle m = inside->mirror_vertex(index_of_covertex(rot), current_dimension()); // CJTODO: use mirror_index?
+ // "index" is the index of m in "outside"
+ int index = outside->index(m);
+ // new_neighbor is the inside cell which is registered as the neighbor
+ // of the outside cell => it's either a newly created inside cell or an
+ // old inside cell which we are about to delete
+ Full_cell_handle new_neighbor = outside->neighbor(index);
+
+ // Is new_neighbor still the old neighbor?
+ if (new_neighbor == inside)
+ {
+ task_queue.push(IITH_task(
+ Facet(inside, index_of_covertex(rot)), // boundary facet
+ index, // index_of_inside_cell_in_outside_cell
+ new_s, // future_neighbor
+ i, // new_cell_index_in_future_neighbor
+ index_of_second_covertex(rot) // index_of_future_neighbor_in_new_cell
+ ));
+ }
+ }
+ }
+
+ // If there is some neighbor stories to fix
+ if (task.future_neighbor != Full_cell_handle())
+ {
+ // now the new neighboring full_cell exists, we link both
+ set_neighbors(new_s,
+ task.index_of_future_neighbor_in_new_cell,
+ task.future_neighbor,
+ task.new_cell_index_in_future_neighbor);
+ }
+ }
+
+ return new_s;
+}
+
+template< class Dim, class Vb, class Fcb >
+template< typename Forward_iterator, typename OutputIterator >
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_in_hole(Forward_iterator start, Forward_iterator end, Facet f,
+ OutputIterator out) /* Concept */
+{
+ CGAL_expensive_precondition(
+ ( std::distance(start, end) == 1 )
+ || ( current_dimension() > 1 ) );
+ Forward_iterator sit = start;
+ while( end != sit )
+ set_visited(*sit++, true);
+ Vertex_handle v = new_vertex();
+ insert_in_tagged_hole(v, f, out);
+ delete_full_cells(start, end);
+ return v;
+}
+
+template< class Dim, class Vb, class Fcb >
+template< typename Forward_iterator >
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_in_hole(Forward_iterator start, Forward_iterator end, Facet f) /* Concept */
+{
+ Emptyset_iterator out;
+ return insert_in_hole(start, end, f, out);
+}
+
+template <class Dim, class Vb, class Fcb>
+void
+Triangulation_data_structure<Dim, Vb, Fcb>
+::clear_visited_marks(Full_cell_handle start) const // NOT DOCUMENTED
+{
+ CGAL_precondition(start != Full_cell_handle());
+
+ std::queue<Full_cell_handle> queue;
+ set_visited(start, false);
+ queue.push(start);
+ const int cur_dim = current_dimension();
+ while( ! queue.empty() )
+ {
+ Full_cell_handle s = queue.front();
+ queue.pop();
+ for( int i = 0; i <= cur_dim; ++i )
+ {
+ if( get_visited(s->neighbor(i)) )
+ {
+ set_visited(s->neighbor(i), false);
+ queue.push(s->neighbor(i));
+ }
+ }
+ }
+}
+
+template <class Dim, class Vb, class Fcb>
+void Triangulation_data_structure<Dim, Vb, Fcb>
+::do_insert_increase_dimension(Vertex_handle x, Vertex_handle star)
+{
+ Full_cell_handle start = full_cells_begin();
+ Full_cell_handle swap_me;
+ const int cur_dim = current_dimension();
+ for( Full_cell_iterator S = full_cells_begin(); S != full_cells_end(); ++S )
+ {
+ if( Vertex_handle() != S->vertex(cur_dim) )
+ continue;
+ set_visited(S, true);
+ // extends full_cell |S| to include the new vertex as the
+ // current_dimension()-th vertex
+ associate_vertex_with_full_cell(S, cur_dim, x);
+ if( ! S->has_vertex(star) )
+ { // S is bounded, we create its unbounded "twin" full_cell
+ Full_cell_handle S_new = new_full_cell();
+ set_neighbors(S, cur_dim, S_new, 0);
+ associate_vertex_with_full_cell(S_new, 0, star);
+ // here, we could be clever so as to get consistent orientation
+ for( int k = 1; k <= cur_dim; ++k )
+ associate_vertex_with_full_cell(S_new, k, vertex(S, k - 1));
+ }
+ }
+ // now we setup the neighbors
+ set_visited(start, false);
+ std::queue<Full_cell_handle> queue;
+ queue.push(start);
+ while( ! queue.empty() )
+ {
+ Full_cell_handle S = queue.front();
+ queue.pop();
+ // here, the first visit above ensured that all neighbors exist now.
+ // Now we need to connect them with adjacency relation
+ int star_index;
+ if( S->has_vertex(star, star_index) )
+ {
+ set_neighbors( S, cur_dim, neighbor(neighbor(S, star_index), cur_dim),
+ // this is tricky :-) :
+ mirror_index(S, star_index) + 1);
+ }
+ else
+ {
+ Full_cell_handle S_new = neighbor(S, cur_dim);
+ for( int k = 0 ; k < cur_dim ; ++k )
+ {
+ Full_cell_handle S_opp = neighbor(S, k);
+ if( ! S_opp->has_vertex(star) )
+ set_neighbors(S_new, k + 1, neighbor(S_opp, cur_dim), mirror_index(S, k) + 1);
+ // neighbor of S_new opposite to v is S_new'
+ // the vertex opposite to v remains the same but ...
+ // remember the shifting of the vertices one step to the right
+ }
+ }
+ for( int k = 0 ; k < cur_dim ; ++k )
+ if( get_visited(neighbor(S, k)) )
+ {
+ set_visited(neighbor(S, k), false);
+ queue.push(neighbor(S, k));
+ }
+ }
+ if( ( ( cur_dim % 2 ) == 0 ) && ( cur_dim > 1 ) )
+ {
+ for( Full_cell_iterator S = full_cells_begin(); S != full_cells_end(); ++S )
+ {
+ if( x != S->vertex(cur_dim) )
+ S->swap_vertices(cur_dim - 1, cur_dim);
+ }
+ }
+ if( Full_cell_handle() != swap_me )
+ swap_me->swap_vertices(1, 2);
+}
+
+template <class Dim, class Vb, class Fcb>
+typename Triangulation_data_structure<Dim, Vb, Fcb>::Vertex_handle
+Triangulation_data_structure<Dim, Vb, Fcb>
+::insert_increase_dimension(Vertex_handle star) /* Concept */
+{
+ const int prev_cur_dim = current_dimension();
+ CGAL_precondition(prev_cur_dim < maximal_dimension());
+ if( -2 != current_dimension() )
+ {
+ CGAL_precondition( Vertex_handle() != star );
+ CGAL_expensive_precondition(is_vertex(star));
+ }
+
+ set_current_dimension(prev_cur_dim + 1);
+ Vertex_handle v = new_vertex();
+ switch( prev_cur_dim )
+ {
+ case -2:
+ { // insertion of the first vertex
+ // ( geometrically : infinite vertex )
+ Full_cell_handle s = new_full_cell();
+ associate_vertex_with_full_cell(s, 0, v);
+ break;
+ }
+ case -1:
+ { // insertion of the second vertex
+ // ( geometrically : first finite vertex )
+ //we create a triangulation of the 0-sphere, with
+ // vertices |star| and |v|
+ Full_cell_handle infinite_full_cell = star->full_cell();
+ Full_cell_handle finite_full_cell = new_full_cell();
+ associate_vertex_with_full_cell(finite_full_cell, 0, v);
+ set_neighbors(infinite_full_cell, 0, finite_full_cell, 0);
+ break;
+ }
+ default:
+ do_insert_increase_dimension(v, star);
+ break;
+ }
+ return v;
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY CHECKS
+
+template <class Dimen, class Vb, class Fcb>
+bool Triangulation_data_structure<Dimen, Vb, Fcb>
+::is_valid(bool verbose, int /* level */) const /* Concept */
+{
+ Full_cell_const_handle s, t;
+ Vertex_const_handle v;
+ int i, j, k;
+
+ if( current_dimension() == -2 )
+ {
+ if( ! vertices_.empty() || ! full_cells_.empty() )
+ {
+ if( verbose ) CGAL_warning_msg(false, "current dimension is -2 but there are vertices or full_cells");
+ return false;
+ }
+ }
+
+ if( current_dimension() == -1 )
+ {
+ if ( (number_of_vertices() != 1) || (number_of_full_cells() != 1) )
+ {
+ if( verbose ) CGAL_warning_msg(false, "current dimension is -1 but there isn't one vertex and one full_cell");
+ return false;
+ }
+ }
+
+ for( v = vertices_begin(); v != vertices_end(); ++v )
+ {
+ if( ! v->is_valid(verbose) )
+ return false;
+ }
+
+ // FUTURE: for each vertex v, gather incident full_cells. then, check that
+ // any full_cell containing v is among those gathered full_cells...
+
+ if( current_dimension() < 0 )
+ return true;
+
+ for( s = full_cells_begin(); s != full_cells_end(); ++s )
+ {
+ if( ! s->is_valid(verbose) )
+ return false;
+ // check that the full cell has no duplicate vertices
+ for( i = 0; i <= current_dimension(); ++i )
+ for( j = i + 1; j <= current_dimension(); ++j )
+ if( vertex(s,i) == vertex(s,j) )
+ {
+ CGAL_warning_msg(false, "a full_cell has two equal vertices");
+ return false;
+ }
+ }
+
+ for( s = full_cells_begin(); s != full_cells_end(); ++s )
+ {
+ for( i = 0; i <= current_dimension(); ++i )
+ if( (t = neighbor(s,i)) != Full_cell_const_handle() )
+ {
+ int l = mirror_index(s,i);
+ if( s != neighbor(t,l) || i != mirror_index(t,l) )
+ {
+ if( verbose ) CGAL_warning_msg(false, "neighbor relation is not symmetric");
+ return false;
+ }
+ for( j = 0; j <= current_dimension(); ++j )
+ if( j != i )
+ {
+ // j must also occur as a vertex of t
+ for( k = 0; k <= current_dimension() && ( vertex(s,j) != vertex(t,k) || k == l); ++k )
+ ;
+ if( k > current_dimension() )
+ {
+ if( verbose ) CGAL_warning_msg(false, "too few shared vertices between neighbors full_cells.");
+ return false;
+ }
+ }
+ }
+ else
+ {
+ if( verbose ) CGAL_warning_msg(false, "full_cell has a NULL neighbor");
+ return false;
+ }
+ }
+ return true;
+}
+
+// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+// - - - - - - - - - - - - - - - - - - - - - - - - INPUT / OUTPUT
+
+// NOT DOCUMENTED
+template <class Dim, class Vb, class Fcb>
+template <class OutStream>
+void Triangulation_data_structure<Dim, Vb, Fcb>
+::write_graph(OutStream & os)
+{
+ std::vector<std::set<int> > edges;
+ os << number_of_vertices() + 1; // add the vertex at infinity
+ int count(1);
+ for( Vertex_iterator vit = vertices_begin(); vit != vertices_end(); ++vit )
+ vit->idx_ = count++;
+ edges.resize(number_of_vertices()+1);
+ for( Full_cell_iterator sit = full_cells_begin(); sit != full_cells_end(); ++sit )
+ {
+ int v1 = 0;
+ while( v1 < current_dimension() )
+ {
+ int v2 = v1 + 1;
+ while( v2 <= current_dimension() )
+ {
+ int i1, i2;
+ if( Vertex_handle() != sit-> vertex(v1) )
+ i1 = sit->vertex(v1)->idx_;
+ else
+ i1 = 0;
+ if( Vertex_handle() != sit-> vertex(v2) )
+ i2 = sit->vertex(v2)->idx_;
+ else
+ i2 = 0;
+ edges[i1].insert(i2);
+ edges[i2].insert(i1);
+ ++v2;
+ }
+ ++v1;
+ }
+ }
+ for( std::size_t i = 0; i < edges.size(); ++i )
+ {
+ os << std::endl << edges[i].size();
+ for( std::set<int>::const_iterator nit = edges[i].begin();
+ nit != edges[i].end(); ++nit )
+ {
+ os << ' ' << (*nit);
+ }
+ }
+}
+
+// NOT DOCUMENTED...
+template<class Dimen, class Vb, class Fcb>
+std::istream &
+Triangulation_data_structure<Dimen, Vb, Fcb>
+::read_full_cells(std::istream & is, const std::vector<Vertex_handle> & vertices)
+{
+ std::size_t m; // number of full_cells
+ int index;
+ const int cd = current_dimension();
+ if( is_ascii(is) )
+ is >> m;
+ else
+ read(is, m, io_Read_write());
+
+ std::vector<Full_cell_handle> full_cells;
+ full_cells.reserve(m);
+ // read the vertices of each full_cell
+ std::size_t i = 0;
+ while( i < m )
+ {
+ Full_cell_handle s = new_full_cell();
+ full_cells.push_back(s);
+ for( int j = 0; j <= cd; ++j )
+ {
+ if( is_ascii(is) )
+ is >> index;
+ else
+ read(is, index);
+ s->set_vertex(j, vertices[index]);
+ }
+ // read other non-combinatorial information for the full_cells
+ is >> (*s);
+ ++i;
+ }
+
+ // read the neighbors of each full_cell
+ i = 0;
+ if( is_ascii(is) )
+ while( i < m )
+ {
+ for( int j = 0; j <= cd; ++j )
+ {
+ is >> index;
+ full_cells[i]->set_neighbor(j, full_cells[index]);
+ }
+ ++i;
+ }
+ else
+ while( i < m )
+ {
+ for( int j = 0; j <= cd; ++j )
+ {
+ read(is, index);
+ full_cells[i]->set_neighbor(j, full_cells[index]);
+ }
+ ++i;
+ }
+
+ // compute the mirror indices
+ for( i = 0; i < m; ++i )
+ {
+ Full_cell_handle s = full_cells[i];
+ for( int j = 0; j <= cd; ++j )
+ {
+ if( -1 != s->mirror_index(j) )
+ continue;
+ Full_cell_handle n = s->neighbor(j);
+ int k = 0;
+ Full_cell_handle nn = n->neighbor(k);
+ while( s != nn )
+ nn = n->neighbor(++k);
+ s->set_mirror_index(j,k);
+ n->set_mirror_index(k,j);
+ }
+ }
+ return is;
+}
+
+// NOT DOCUMENTED...
+template<class Dimen, class Vb, class Fcb>
+std::ostream &
+Triangulation_data_structure<Dimen, Vb, Fcb>
+::write_full_cells(std::ostream & os, std::map<Vertex_const_handle, int> & index_of_vertex) const
+{
+ std::map<Full_cell_const_handle, int> index_of_full_cell;
+
+ std::size_t m = number_of_full_cells();
+
+ if( is_ascii(os) )
+ os << std::endl << m;
+ else
+ write(os, m, io_Read_write());
+
+ const int cur_dim = current_dimension();
+ // write the vertex indices of each full_cell
+ int i = 0;
+ for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it )
+ {
+ index_of_full_cell[it] = i++;
+ if( is_ascii(os) )
+ os << std::endl;
+ for( int j = 0; j <= cur_dim; ++j )
+ {
+ if( is_ascii(os) )
+ os << ' ' << index_of_vertex[it->vertex(j)];
+ else
+ write(os, index_of_vertex[it->vertex(j)]);
+ }
+ // write other non-combinatorial information for the full_cells
+ os << (*it);
+ }
+
+ CGAL_assertion( (std::size_t) i == m );
+
+ // write the neighbors of each full_cell
+ if( is_ascii(os) )
+ for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it )
+ {
+ os << std::endl;
+ for( int j = 0; j <= cur_dim; ++j )
+ os << ' ' << index_of_full_cell[it->neighbor(j)];
+ }
+ else
+ for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it )
+ {
+ for( int j = 0; j <= cur_dim; ++j )
+ write(os, index_of_full_cell[it->neighbor(j)]);
+ }
+
+ return os;
+}
+
+// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
+
+// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS:
+
+template<class Dimen, class Vb, class Fcb>
+std::istream &
+operator>>(std::istream & is, Triangulation_data_structure<Dimen, Vb, Fcb> & tr)
+ // reads :
+ // - the dimensions (maximal and current)
+ // - the number of finite vertices
+ // - the non combinatorial information on vertices (point, etc)
+ // - the number of full_cells
+ // - the full_cells by the indices of their vertices in the preceding list
+ // of vertices, plus the non combinatorial information on each full_cell
+ // - the neighbors of each full_cell by their index in the preceding list
+{
+ typedef Triangulation_data_structure<Dimen, Vb, Fcb> TDS;
+ typedef typename TDS::Vertex_handle Vertex_handle;
+
+ // read current dimension and number of vertices
+ std::size_t n;
+ int cd;
+ if( is_ascii(is) )
+ is >> cd >> n;
+ else
+ {
+ read(is, cd);
+ read(is, n, io_Read_write());
+ }
+
+ CGAL_assertion_msg( cd <= tr.maximal_dimension(), "input Triangulation_data_structure has too high dimension");
+
+ tr.clear();
+ tr.set_current_dimension(cd);
+
+ if( n == 0 )
+ return is;
+
+ std::vector<Vertex_handle> vertices;
+ vertices.resize(n);
+
+ // read the vertices:
+ std::size_t i(0);
+ while( i < n )
+ {
+ vertices[i] = tr.new_vertex();
+ is >> (*vertices[i]); // read a vertex
+ ++i;
+ }
+
+ // now, read the combinatorial information
+ return tr.read_full_cells(is, vertices);
+}
+
+template<class Dimen, class Vb, class Fcb>
+std::ostream &
+operator<<(std::ostream & os, const Triangulation_data_structure<Dimen, Vb, Fcb> & tr)
+ // writes :
+ // - the dimensions (maximal and current)
+ // - the number of finite vertices
+ // - the non combinatorial information on vertices (point, etc)
+ // - the number of full cells
+ // - the full cells by the indices of their vertices in the preceding list
+ // of vertices, plus the non combinatorial information on each full_cell
+ // - the neighbors of each full_cell by their index in the preceding list
+{
+ typedef Triangulation_data_structure<Dimen, Vb, Fcb> TDS;
+ typedef typename TDS::Vertex_const_handle Vertex_handle;
+ typedef typename TDS::Vertex_const_iterator Vertex_iterator;
+
+ // outputs dimension and number of vertices
+ std::size_t n = tr.number_of_vertices();
+ if( is_ascii(os) )
+ os << tr.current_dimension() << std::endl << n;
+ else
+ {
+ write(os, tr.current_dimension());
+ write(os, n, io_Read_write());
+ }
+
+ if( n == 0 )
+ return os;
+
+ // write the vertices
+ std::map<Vertex_handle, int> index_of_vertex;
+ int i = 0;
+ for( Vertex_iterator it = tr.vertices_begin(); it != tr.vertices_end(); ++it, ++i )
+ {
+ os << *it; // write the vertex
+ if (is_ascii(os))
+ os << std::endl;
+ index_of_vertex[it] = i;
+ }
+ CGAL_assertion( (std::size_t) i == n );
+
+ // output the combinatorial information
+ return tr.write_full_cells(os, index_of_vertex);
+}
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_DATA_STRUCTURE_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h
new file mode 100644
index 00000000..541a6a85
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h
@@ -0,0 +1,311 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_DS_FULL_CELL_H
+#define CGAL_TRIANGULATION_DS_FULL_CELL_H
+
+#include <CGAL/TDS_full_cell_default_storage_policy.h>
+#include <CGAL/TDS_full_cell_mirror_storage_policy.h>
+#include <CGAL/internal/Triangulation/Dummy_TDS.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/Default.h>
+#include <CGAL/array.h>
+
+namespace CGAL {
+
+template< class TDS = void, typename FullCellStoragePolicy = Default >
+class Triangulation_ds_full_cell
+{
+ typedef typename Default::Get<FullCellStoragePolicy, TDS_full_cell_default_storage_policy>::type
+ Storage_policy;
+ typedef Triangulation_ds_full_cell<TDS> Self;
+ typedef typename TDS::Maximal_dimension Maximal_dimension;
+
+public:
+ typedef TDS Triangulation_data_structure;
+ typedef typename TDS::Face Face;
+ typedef typename TDS::Vertex_handle Vertex_handle; /* Concept */
+ typedef typename TDS::Vertex_const_handle Vertex_const_handle;
+ typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */
+ typedef typename TDS::Full_cell_const_handle Full_cell_const_handle;
+ typedef typename TDS::Full_cell_data TDS_data; /* data that the TDS wants to be stored here */
+ template< typename TDS2 >
+ struct Rebind_TDS /* Concept */
+ {
+ typedef Triangulation_ds_full_cell<TDS2, FullCellStoragePolicy> Other;
+ };
+
+private: // STORAGE
+ typedef TFC_data< Vertex_handle, Full_cell_handle,
+ Maximal_dimension, Storage_policy > Combinatorics;
+ friend struct TFC_data< Vertex_handle, Full_cell_handle,
+ Maximal_dimension, Storage_policy >;
+ // array of vertices
+ typedef typename Combinatorics::Vertex_handle_array Vertex_handle_array;
+ // neighbor simplices
+ typedef typename Combinatorics::Full_cell_handle_array Full_cell_handle_array;
+
+ // NOT DOCUMENTED...
+ typename Combinatorics::Xor_type xor_of_vertices(const int cur_dim) const
+ {
+ return combinatorics_.xor_of_vertices(cur_dim);
+ }
+
+public:
+ typedef typename Vertex_handle_array::const_iterator Vertex_handle_const_iterator;
+ typedef Vertex_handle_const_iterator Vertex_handle_iterator; /* Concept */
+
+ Triangulation_ds_full_cell(const int dmax) /* Concept */
+ : combinatorics_(dmax), tds_data_()
+ {
+ CGAL_assertion( dmax > 0 );
+ for( int i = 0; i <= dmax; ++i )
+ {
+ set_neighbor(i, Full_cell_handle());
+ set_vertex(i, Vertex_handle());
+ set_mirror_index(i, -1);
+ }
+ }
+
+ Triangulation_ds_full_cell(const Triangulation_ds_full_cell & s) /* Concept */
+ : combinatorics_(s.combinatorics_), tds_data_(s.tds_data_)
+ {}
+
+ ~Triangulation_ds_full_cell() {}
+
+ int maximal_dimension() const /* Concept */
+ {
+ return static_cast<int>(vertices().size() - 1);
+ }
+
+ Vertex_handle_const_iterator vertices_begin() const /* Concept */
+ {
+ return vertices().begin();
+ }
+
+ Vertex_handle_const_iterator vertices_end() const /* Concept */
+ {
+ return vertices().end();
+ }
+
+ Vertex_handle vertex(const int i) const /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ return vertices()[i];
+ }
+
+ Full_cell_handle neighbor(const int i) const /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ return neighbors()[i];
+ }
+
+ int mirror_index(const int i) const /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ return combinatorics_.mirror_index(i);
+ }
+
+ // Advanced...
+ Vertex_handle mirror_vertex(const int i, const int cur_dim) const /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ return combinatorics_.mirror_vertex(i, cur_dim);
+ }
+
+ int index(Full_cell_const_handle s) const /* Concept */
+ {
+ // WE ASSUME THE FULL CELL WE ARE LOOKING FOR INDEED EXISTS !
+ CGAL_precondition(has_neighbor(s));
+ int index(0);
+ while( neighbor(index) != s )
+ ++index;
+ return index;
+ }
+
+ int index(Vertex_const_handle v) const /* Concept */
+ {
+ // WE ASSUME THE VERTEX WE ARE LOOKING FOR INDEED EXISTS !
+ CGAL_precondition(has_vertex(v));
+ int index(0);
+ while( vertex(index) != v )
+ ++index;
+ return index;
+ }
+
+ void set_vertex(const int i, Vertex_handle v) /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ vertices()[i] = v;
+ }
+
+ void set_neighbor(const int i, Full_cell_handle s) /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ neighbors()[i] = s;
+ }
+
+ void set_mirror_index(const int i, const int index) /* Concept */
+ {
+ CGAL_precondition(0<=i && i<=maximal_dimension());
+ combinatorics_.set_mirror_index(i, index);
+ }
+
+ bool has_vertex(Vertex_const_handle v) const /* Concept */
+ {
+ int index;
+ return has_vertex(v, index);
+ }
+
+ bool has_vertex(Vertex_const_handle v, int & index) const /* Concept */
+ {
+ const int d = maximal_dimension();
+ index = 0;
+ while( (index <= d) && (vertex(index) != v) )
+ ++index;
+ return (index <= d);
+ }
+
+ bool has_neighbor(Full_cell_const_handle s) const /* Concept */
+ {
+ int index;
+ return has_neighbor(s, index);
+ }
+
+ bool has_neighbor(Full_cell_const_handle s, int & index) const /* Concept */
+ {
+ const int d = maximal_dimension();
+ index = 0;
+ while( (index <= d) && (neighbor(index) != s) )
+ ++index;
+ return (index <= d);
+ }
+
+ void swap_vertices(const int d1, const int d2) /* Concept */
+ {
+ CGAL_precondition(0 <= d1 && d1<=maximal_dimension());
+ CGAL_precondition(0 <= d2 && d2<=maximal_dimension());
+ combinatorics_.swap_vertices(d1, d2);
+ }
+
+ const TDS_data & tds_data() const { return tds_data_; } /* Concept */
+ TDS_data & tds_data() { return tds_data_; } /* Concept */
+
+ void* for_compact_container() const { return combinatorics_.for_compact_container(); }
+ void* & for_compact_container() { return combinatorics_.for_compact_container(); }
+
+ bool is_valid(bool verbose = false, int = 0) const /* Concept */
+ {
+ const int d = maximal_dimension();
+ int i(0);
+ // test that the non-null Vertex_handles come first, before all null ones
+ while( i <= d && vertex(i) != Vertex_handle() ) ++i;
+ while( i <= d && vertex(i) == Vertex_handle() ) ++i;
+ if( i <= d )
+ {
+ if( verbose ) CGAL_warning_msg(false, "full cell has garbage handles to vertices.");
+ return false;
+ }
+ for( i = 0; i <= d; ++i )
+ {
+ if( Vertex_handle() == vertex(i) )
+ break; // there are no more vertices
+ Full_cell_handle n(neighbor(i));
+ if( Full_cell_handle() != n )
+ {
+ int mirror_idx(mirror_index(i));
+ if( n->neighbor(mirror_idx) == Full_cell_handle() )
+ {
+ if( verbose ) CGAL_warning_msg(false, "neighbor has no back-neighbor.");
+ return false;
+ }
+ if( &(*(n->neighbor(mirror_idx))) != this )
+ {
+ if( verbose ) CGAL_warning_msg(false, "neighbor does not point back to correct full cell.");
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+private:
+ // access to data members:
+ Full_cell_handle_array & neighbors() {return combinatorics_.neighbors_; }
+ const Full_cell_handle_array & neighbors() const {return combinatorics_.neighbors_; }
+ Vertex_handle_array & vertices() {return combinatorics_.vertices_; }
+ const Vertex_handle_array & vertices() const {return combinatorics_.vertices_; }
+
+ // DATA MEMBERS
+ Combinatorics combinatorics_;
+ mutable TDS_data tds_data_;
+};
+
+// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS:
+
+template < typename TDS, typename SSP >
+std::ostream &
+operator<<(std::ostream & O, const Triangulation_ds_full_cell<TDS,SSP> &) /* Concept */
+{
+ /*if( is_ascii(O) )
+ {
+ // os << '\n';
+ }
+ else {}*/
+ return O;
+}
+
+template < typename TDS, typename SSP >
+std::istream &
+operator>>(std::istream & I, Triangulation_ds_full_cell<TDS,SSP> &) /* Concept */
+{
+ /*if( is_ascii(I) )
+ {}
+ else {}*/
+ return I;
+}
+
+// Special case: specialization when template parameter is void.
+
+// we must declare it for each possible full_cell storage policy because :
+// (GCC error:) default template arguments may not be used in partial specializations
+template< typename StoragePolicy >
+class Triangulation_ds_full_cell<void, StoragePolicy>
+{
+public:
+ typedef internal::Triangulation::Dummy_TDS TDS;
+ typedef TDS Triangulation_data_structure;
+ typedef TDS::Vertex_handle Vertex_handle;
+ typedef TDS::Vertex_const_handle Vertex_const_handle;
+ typedef TDS::Full_cell_handle Full_cell_handle;
+ typedef TDS::Full_cell_const_handle Full_cell_const_handle;
+ typedef TDS::Vertex_handle_const_iterator Vertex_handle_const_iterator;
+ typedef TDS::Full_cell_data TDS_data;
+ template <typename TDS2>
+ struct Rebind_TDS
+ {
+ typedef Triangulation_ds_full_cell<TDS2, StoragePolicy> Other;
+ };
+ Vertex_handle_const_iterator vertices_begin();
+ Vertex_handle_const_iterator vertices_end();
+};
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_DS_FULL_CELL_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h
new file mode 100644
index 00000000..381b97e1
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h
@@ -0,0 +1,154 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_DS_VERTEX_H
+#define CGAL_TRIANGULATION_DS_VERTEX_H
+
+#include <CGAL/Compact_container.h>
+#include <CGAL/internal/Triangulation/Dummy_TDS.h>
+
+namespace CGAL {
+
+/* The template parameter TDS must be a model of the concept
+ * 'TriangulationDataStructure' that stores vertices of type
+ * 'Triangulation_ds_vertex<TDS>'
+ */
+template< class TDS = void >
+class Triangulation_ds_vertex
+{
+ typedef Triangulation_ds_vertex<TDS> Self;
+
+public:
+ typedef TDS Triangulation_data_structure;
+ typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */
+
+ template <typename TDS2>
+ struct Rebind_TDS /* Concept */
+ {
+ typedef Triangulation_ds_vertex<TDS2> Other;
+ };
+
+protected: // DATA MEMBERS
+ Full_cell_handle full_cell_; // A handle to an incident full_cell
+
+public:
+ // Constructs a vertex with incident full_cell 's'
+ Triangulation_ds_vertex(Full_cell_handle s) : full_cell_(s) /* Concept */
+ {
+ CGAL_assertion( Full_cell_handle() != s );
+ }
+ // Constructs a vertex with no incident full_cell
+ Triangulation_ds_vertex() : full_cell_() {} /* Concept */
+
+ ~Triangulation_ds_vertex() {}
+
+ /// Set 's' as an incident full_cell
+ void set_full_cell(Full_cell_handle s) /* Concept */
+ {
+ full_cell_ = s;
+ }
+
+ /// Returns a full_cell incident to the vertex
+ Full_cell_handle full_cell() const /* Concept */
+ {
+ return full_cell_;
+ }
+
+ bool is_valid(bool verbose = false, int /* level */ = 0) const /* Concept */
+ {
+ if( Full_cell_handle() == full_cell() )
+ {
+ if( verbose )
+ CGAL_warning_msg(false, "vertex has no incident full cell.");
+ return false;
+ }
+ bool found(false);
+ // These two typename below are OK because TDS fullfils the
+ // TriangulationDataStructure concept.
+ typename TDS::Full_cell::Vertex_handle_iterator vit(full_cell()->vertices_begin());
+ typedef typename TDS::Vertex_handle Vertex_handle;
+ while( vit != full_cell()->vertices_end() )
+ {
+ if( Vertex_handle() == *vit )
+ break; // The full cell has no more vertices
+ if( this == &(**vit) )
+ {
+ found = true;
+ break;
+ }
+ ++vit;
+ }
+ if( ! found )
+ {
+ if( verbose )
+ CGAL_warning_msg(false, "vertex's adjacent full cell does not contain that vertex.");
+ return false;
+ }
+ return true;
+ }
+
+public: // FOR MEMORY MANAGEMENT
+
+ void* for_compact_container() const { return full_cell_.for_compact_container(); }
+ void* & for_compact_container() { return full_cell_.for_compact_container(); }
+
+}; // end of Triangulation_ds_vertex
+
+// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS:
+
+template < class TDS >
+std::istream &
+operator>>(std::istream & is, Triangulation_ds_vertex<TDS> &) /* Concept */
+{
+ /*if( is_ascii(is) )
+ {}
+ else {}*/
+ return is;
+}
+
+template< class TDS >
+std::ostream &
+operator<<(std::ostream & os, const Triangulation_ds_vertex<TDS> &) /* Concept */
+{
+ /*if( is_ascii(os) )
+ {
+ os << '\n';
+ }
+ else {}*/
+ return os;
+}
+
+// Special case: specialization when template parameter is void.
+
+template<>
+class Triangulation_ds_vertex<void>
+{
+public:
+ typedef internal::Triangulation::Dummy_TDS Triangulation_data_structure;
+ typedef Triangulation_data_structure::Full_cell_handle Full_cell_handle; /* Concept */
+ template <typename TDS2>
+ struct Rebind_TDS /* Concept */
+ {
+ typedef Triangulation_ds_vertex<TDS2> Other;
+ };
+};
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_DS_VERTEX_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_face.h b/src/common/include/gudhi_patches/CGAL/Triangulation_face.h
new file mode 100644
index 00000000..bc9c1781
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation_face.h
@@ -0,0 +1,111 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_FACE_H
+#define CGAL_TRIANGULATION_FACE_H
+
+#include <CGAL/basic.h>
+#include <CGAL/internal/Static_or_dynamic_array.h>
+
+namespace CGAL {
+
+template< typename TDS >
+class Triangulation_face
+{
+ typedef typename internal::Dimen_plus_one<typename TDS::Maximal_dimension>::type Dimen_plus;
+public:
+ typedef TDS Triangulation_data_structure;
+ typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */
+ typedef typename TDS::Vertex_handle Vertex_handle; /* Concept */
+ typedef internal::S_or_D_array<int, Dimen_plus> Indices;
+
+protected:
+ Full_cell_handle full_cell_;
+ Indices indices_;
+
+public:
+ explicit Triangulation_face(Full_cell_handle s) /* Concept */
+ : full_cell_(s), indices_(s->maximal_dimension()+2)
+ {
+ CGAL_assertion( Full_cell_handle() != s );
+ clear();
+ }
+
+ explicit Triangulation_face(const int maximal_dim) /* Concept */
+ : full_cell_(), indices_(maximal_dim+2)
+ {
+ clear();
+ }
+
+ Triangulation_face(const Triangulation_face & f) /* Concept */
+ : full_cell_(f.full_cell_), indices_(f.indices_)
+ {}
+
+ int face_dimension() const /* Concept */
+ {
+ int i(0);
+ while( -1 != indices_[i] ) ++i;
+ return (i-1);
+ }
+
+ Full_cell_handle full_cell() const /* Concept */
+ {
+ return full_cell_;
+ }
+
+ int index(const int i) const /* Concept */
+ {
+ CGAL_precondition( (0 <= i) && (i <= face_dimension()) );
+ return indices_[i];
+ }
+
+ Vertex_handle vertex(const int i) const /* Concept */
+ {
+ int j = index(i);
+ if( j == -1 )
+ return Vertex_handle();
+ return full_cell()->vertex(j);
+ }
+
+// - - - - - - - - - - - - - - - - - - UPDATE FUNCTIONS
+
+ void clear() /* Concept */
+ {
+ const std::size_t d = indices_.size();
+ for(std::size_t i = 0; i < d; ++i )
+ indices_[i] = -1;
+ }
+
+ void set_full_cell(Full_cell_handle s) /* Concept */
+ {
+ CGAL_precondition( Full_cell_handle() != s );
+ full_cell_ = s;
+ }
+
+ void set_index(const int i, const int idx) /* Concept */
+ {
+ CGAL_precondition( (0 <= i) && ((size_t)i+1 < indices_.size()) );
+ CGAL_precondition( (0 <= idx) && ((size_t)idx < indices_.size()) );
+ indices_[i] = idx;
+ }
+};
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_FACE_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h b/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h
new file mode 100644
index 00000000..a0c5246f
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h
@@ -0,0 +1,148 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_SIMPLEX_H
+#define CGAL_TRIANGULATION_SIMPLEX_H
+
+#include <CGAL/Triangulation_ds_full_cell.h>
+#include <CGAL/internal/Triangulation/utilities.h>
+#include <CGAL/Iterator_project.h>
+#include <CGAL/Default.h>
+
+namespace CGAL {
+
+struct No_full_cell_data {};
+
+template< class TriangulationTraits, typename Data_ = No_full_cell_data, class TDSFullCell = Default >
+class Triangulation_full_cell : public Default::Get<TDSFullCell, Triangulation_ds_full_cell<> >::type
+{
+ // The default type for TDSFullCell is Triangulation_ds_full_cell<> :
+ typedef typename Default::Get<TDSFullCell, Triangulation_ds_full_cell<> >::type
+ Base;
+ typedef Triangulation_full_cell<TriangulationTraits, Data_, TDSFullCell> Self;
+public:
+ typedef Data_ Data;
+ typedef typename Base::Vertex_handle Vertex_handle;
+ typedef typename Base::Vertex_const_handle Vertex_const_handle;
+ typedef typename Base::Vertex_handle_const_iterator Vertex_handle_const_iterator;
+ typedef typename Base::Full_cell_const_handle Full_cell_const_handle;
+ typedef typename TriangulationTraits::Point_d Point;
+ typedef typename TriangulationTraits::Point_d Point_d;
+
+private: // DATA MEMBERS
+ Data data_;
+
+public:
+
+ using Base::vertices_begin;
+ using Base::vertices_end;
+
+ template< class TDS2 >
+ struct Rebind_TDS
+ {
+ typedef typename Base::template Rebind_TDS<TDS2>::Other TDSFullCell2;
+ typedef Triangulation_full_cell<TriangulationTraits, Data_, TDSFullCell2> Other;
+ };
+
+ Triangulation_full_cell(const int d)
+ : Base(d), data_() {}
+
+ Triangulation_full_cell(const Self & s)
+ : Base(s), data_(s.data_) {}
+
+ const Data & data() const
+ {
+ return data_;
+ }
+
+ Data & data()
+ {
+ return data_;
+ }
+
+ struct Point_from_vertex_handle
+ {
+ typedef Vertex_handle argument_type;
+ typedef Point result_type;
+ result_type & operator()(argument_type & x) const
+ {
+ return x->point();
+ }
+ const result_type & operator()(const argument_type & x) const
+ {
+ return x->point();
+ }
+ };
+
+protected:
+
+ typedef CGAL::Iterator_project<
+ Vertex_handle_const_iterator,
+ internal::Triangulation::Point_from_vertex_handle<Vertex_handle, Point>
+ > Point_const_iterator;
+
+ Point_const_iterator points_begin() const
+ { return Point_const_iterator(Base::vertices_begin()); }
+ Point_const_iterator points_end() const
+ { return Point_const_iterator(Base::vertices_end()); }
+};
+
+// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS:
+
+inline
+std::istream &
+operator>>(std::istream & is, No_full_cell_data &)
+{
+ return is;
+}
+
+inline
+std::ostream &
+operator<<(std::ostream & os, const No_full_cell_data &)
+{
+ return os;
+}
+
+template < typename TDS, typename Data, typename SSP >
+std::ostream &
+operator<<(std::ostream & O, const Triangulation_full_cell<TDS, Data, SSP> & s)
+{
+ /*if( is_ascii(O) )
+ {
+ // os << '\n';
+ }
+ else {}*/
+ O << s.data();
+ return O;
+}
+
+template < typename TDS, typename Data, typename SSP >
+std::istream &
+operator>>(std::istream & I, Triangulation_full_cell<TDS, Data, SSP> & s)
+{
+ /*if( is_ascii(I) )
+ {}
+ else {}*/
+ I >> s.data();
+ return I;
+}
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_SIMPLEX_H
diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h b/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h
new file mode 100644
index 00000000..f364717f
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h
@@ -0,0 +1,128 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_TRIANGULATION_VERTEX_H
+#define CGAL_TRIANGULATION_VERTEX_H
+
+#include <CGAL/Triangulation_ds_vertex.h>
+#include <CGAL/Default.h>
+
+namespace CGAL {
+
+struct No_vertex_data {};
+
+template< class TriangulationTraits, typename Data_ = No_vertex_data, class TDSVertex = Default >
+class Triangulation_vertex : public Default::Get<TDSVertex, Triangulation_ds_vertex<> >::type
+{
+ // The default type for TDSVertex is Triangulation_ds_vertex<> :
+ typedef typename Default::Get<TDSVertex, Triangulation_ds_vertex<> >::type
+ Base;
+ typedef Triangulation_vertex<TriangulationTraits, Data_, TDSVertex> Self;
+public:
+ typedef Data_ Data;
+ typedef typename TriangulationTraits::Point_d Point;
+ typedef typename TriangulationTraits::Point_d Point_d;
+ typedef typename Base::Full_cell_handle Full_cell_handle;
+
+ template <typename TDS2>
+ struct Rebind_TDS
+ {
+ typedef typename Base::template Rebind_TDS<TDS2>::Other TDSVertex2;
+ typedef Triangulation_vertex<TriangulationTraits, Data_, TDSVertex2> Other;
+ };
+
+private: // DATA MEMBERS
+ Point point_;
+ Data data_;
+
+public:
+ template< typename T >
+ Triangulation_vertex(Full_cell_handle s, const Point & p, const T & t)
+ : Base(s), point_(p), data_(t) {}
+ Triangulation_vertex(Full_cell_handle s, const Point & p)
+ : Base(s), point_(p), data_() {}
+ template< typename T >
+ Triangulation_vertex(const Point & p, const T & t)
+ : Base(), point_(p), data_(t) {}
+ Triangulation_vertex(const Point & p)
+ : Base(), point_(p), data_() {}
+ Triangulation_vertex() : Base(), point_(), data_() {}
+
+ ~Triangulation_vertex() {}
+
+ /// Set the position in space of the vertex to 'p'
+ void set_point(const Point & p)
+ {
+ point_ = p;
+ }
+
+ /// Returns the position in space of the vertex
+ const Point & point() const
+ {
+ return point_;
+ }
+
+ const Data & data() const
+ {
+ return data_;
+ }
+
+ Data & data()
+ {
+ return data_;
+ }
+
+}; // end of Triangulation_vertex
+
+// NON CLASS-MEMBER FUNCTIONS
+
+inline
+std::istream &
+operator>>(std::istream & is, No_vertex_data &)
+{
+ return is;
+}
+
+inline
+std::ostream &
+operator<<(std::ostream & os, const No_vertex_data &)
+{
+ return os;
+}
+
+template < class A, typename Data, class B >
+std::istream &
+operator>>(std::istream & is, Triangulation_vertex<A, Data, B> & v)
+{
+ is >> v.point();
+ return (is >> v.data());
+}
+
+template< class A, typename Data, class B >
+std::ostream &
+operator<<(std::ostream & os, const Triangulation_vertex<A, Data, B> & v)
+{
+ os << v.point();
+ os << v.data();
+ return os;
+}
+
+} //namespace CGAL
+
+#endif // CGAL_TRIANGULATION_VERTEX_H
diff --git a/src/common/include/gudhi_patches/CGAL/argument_swaps.h b/src/common/include/gudhi_patches/CGAL/argument_swaps.h
new file mode 100644
index 00000000..aa16f29b
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/argument_swaps.h
@@ -0,0 +1,88 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_ARGUMENT_SWAPS_H
+#define CGAL_ARGUMENT_SWAPS_H
+
+#include <CGAL/config.h>
+#include <utility>
+
+#ifndef CGAL_CXX11
+#include <boost/preprocessor/repetition.hpp>
+#include <boost/utility/result_of.hpp>
+#endif
+
+namespace CGAL {
+
+#ifdef CGAL_CXX11
+
+namespace internal {
+
+template<int,class...> struct Apply_to_last_then_rest_;
+
+template<int d,class F,class T,class... U>
+struct Apply_to_last_then_rest_<d,F,T,U...> {
+ typedef typename Apply_to_last_then_rest_<d-1,F,U...,T>::result_type result_type;
+ inline result_type operator()(F&&f,T&&t,U&&...u)const{
+ return Apply_to_last_then_rest_<d-1,F,U...,T>()(
+ std::forward<F>(f),
+ std::forward<U>(u)...,
+ std::forward<T>(t));
+ }
+};
+
+template<class F,class T,class... U>
+struct Apply_to_last_then_rest_<0,F,T,U...> {
+ typedef decltype(std::declval<F>()(std::declval<T>(), std::declval<U>()...)) result_type;
+ inline result_type operator()(F&&f,T&&t,U&&...u)const{
+ return std::forward<F>(f)(std::forward<T>(t), std::forward<U>(u)...);
+ }
+};
+
+} // namespace internal
+
+
+struct Apply_to_last_then_rest {
+ template<class F,class T,class...U> inline
+ typename internal::Apply_to_last_then_rest_<sizeof...(U),F,T,U...>::result_type
+ operator()(F&&f,T&&t,U&&...u)const{
+ return internal::Apply_to_last_then_rest_<sizeof...(U),F,T,U...>()(
+ std::forward<F>(f),
+ std::forward<T>(t),
+ std::forward<U>(u)...);
+ }
+};
+
+#else // CGAL_CXX11
+
+struct Apply_to_last_then_rest {
+#define CGAL_CODE(Z,N,_) template<class F,class T,BOOST_PP_ENUM_PARAMS(N,class T)> \
+ typename boost::result_of<F(T,BOOST_PP_ENUM_PARAMS(N,T))>::type \
+ operator()(F const&f, BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t), T const&t) const { \
+ return f(t,BOOST_PP_ENUM_PARAMS(N,t)); \
+ }
+ BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_)
+#undef CGAL_CODE
+};
+
+#endif // CGAL_CXX11
+
+} // namespace CGAL
+
+#endif // CGAL_ARGUMENT_SWAPS_H
diff --git a/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h b/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h
new file mode 100644
index 00000000..e1bad64e
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h
@@ -0,0 +1,117 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_DETVEC_H
+#define CGAL_DETVEC_H
+#include <CGAL/determinant.h>
+#include <CGAL/predicates/sign_of_determinant.h>
+
+namespace CGAL {
+ // TODO: determine whether it is better to pass them by lines or columns.
+
+ template <class NT, class Vector> inline
+ NT determinant_of_vectors(Vector const&a, Vector const&b){
+ return determinant<NT>(a[0],a[1],b[0],b[1]);
+ }
+ template <class NT, class Vector> inline
+ typename Sgn<NT>::result_type
+ sign_of_determinant_of_vectors(Vector const&a, Vector const&b){
+ return sign_of_determinant<NT>(a[0],a[1],b[0],b[1]);
+ }
+
+ template <class NT, class Vector>
+ NT determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c){
+ return determinant<NT>(a[0],a[1],a[2],b[0],b[1],b[2],c[0],c[1],c[2]);
+ }
+ template <class NT, class Vector>
+ typename Sgn<NT>::result_type
+ sign_of_determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c){
+ return sign_of_determinant<NT>(a[0],a[1],a[2],b[0],b[1],b[2],c[0],c[1],c[2]);
+ }
+
+ template <class NT, class Vector>
+ NT determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d){
+ return determinant<NT>(
+ a[0],a[1],a[2],a[3],
+ b[0],b[1],b[2],b[3],
+ c[0],c[1],c[2],c[3],
+ d[0],d[1],d[2],d[3]);
+ }
+ template <class NT, class Vector>
+ typename Sgn<NT>::result_type
+ sign_of_determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d){
+ return sign_of_determinant<NT>(
+ a[0],a[1],a[2],a[3],
+ b[0],b[1],b[2],b[3],
+ c[0],c[1],c[2],c[3],
+ d[0],d[1],d[2],d[3]);
+ }
+
+ template <class NT, class Vector>
+ NT determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e){
+ return determinant<NT>(
+ a[0],a[1],a[2],a[3],a[4],
+ b[0],b[1],b[2],b[3],b[4],
+ c[0],c[1],c[2],c[3],c[4],
+ d[0],d[1],d[2],d[3],d[4],
+ e[0],e[1],e[2],e[3],e[4]);
+ }
+ template <class NT, class Vector>
+ typename Sgn<NT>::result_type
+ sign_of_determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e){
+ return sign_of_determinant<NT>(
+ a[0],a[1],a[2],a[3],a[4],
+ b[0],b[1],b[2],b[3],b[4],
+ c[0],c[1],c[2],c[3],c[4],
+ d[0],d[1],d[2],d[3],d[4],
+ e[0],e[1],e[2],e[3],e[4]);
+ }
+
+ template <class NT, class Vector>
+ NT determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f){
+ return determinant<NT>(
+ a[0],a[1],a[2],a[3],a[4],a[5],
+ b[0],b[1],b[2],b[3],b[4],b[5],
+ c[0],c[1],c[2],c[3],c[4],c[5],
+ d[0],d[1],d[2],d[3],d[4],d[5],
+ e[0],e[1],e[2],e[3],e[4],e[5],
+ f[0],f[1],f[2],f[3],f[4],f[5]);
+ }
+ template <class NT, class Vector>
+ typename Sgn<NT>::result_type
+ sign_of_determinant_of_vectors(Vector const&a, Vector const&b,
+ Vector const&c, Vector const&d, Vector const&e, Vector const&f){
+ return sign_of_determinant<NT>(
+ a[0],a[1],a[2],a[3],a[4],a[5],
+ b[0],b[1],b[2],b[3],b[4],b[5],
+ c[0],c[1],c[2],c[3],c[4],c[5],
+ d[0],d[1],d[2],d[3],d[4],d[5],
+ e[0],e[1],e[2],e[3],e[4],e[5],
+ f[0],f[1],f[2],f[3],f[4],f[5]);
+ }
+
+}
+#endif
diff --git a/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h b/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h
new file mode 100644
index 00000000..f411e827
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h
@@ -0,0 +1,148 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_INTERNAL_COMBINATION_ENUMERATOR_H
+#define CGAL_INTERNAL_COMBINATION_ENUMERATOR_H
+
+#include <CGAL/basic.h>
+#include <vector>
+
+namespace CGAL {
+
+namespace internal {
+
+class Combination_enumerator
+{
+ // types and member data
+ typedef std::vector<int> Combination;
+ Combination combi_;
+ const int k_;
+ const int min_;
+ const int max_;
+ const int max_at_pos_0_;
+
+public:
+
+ // For generating all the combinations of |k| distinct elements in the
+ // interval [min, max] (both included)
+ Combination_enumerator(const int k, const int min, const int max)
+ : combi_(k), k_(k), min_(min), max_(max), max_at_pos_0_(max + 1 - k)
+ {
+ CGAL_assertion_msg( min <= max, "min is larger than max");
+ CGAL_assertion_msg( 1 <= k && k <= ( max - min + 1 ), "wrong value of k");
+ init();
+ }
+
+ Combination_enumerator(const Combination_enumerator & c)
+ : combi_(c.combi_), k_(c.k_), min_(c.min_), max_(c.max_), max_at_pos_0_(c.max_at_pos_0_)
+ {}
+
+ int number_of_elements()
+ {
+ return k_;
+ }
+
+ void init()
+ {
+ combi_.resize(k_);
+ for( int i = 0; i < k_; ++i )
+ element(i) = min_ + i;
+ }
+
+ bool end() const
+ {
+ return ( element(0) > max_at_pos_0_ );
+ }
+
+ int element(const int i) const
+ {
+ CGAL_assertion( 0 <= i && i < k_ );
+ return combi_[i];
+ }
+
+ int & element(const int i)
+ {
+ CGAL_assertion( 0 <= i && i < k_ );
+ return combi_[i];
+ }
+
+ int operator[](const int i) const
+ {
+ return element(i);
+ }
+
+ int & operator[](const int i)
+ {
+ return element(i);
+ }
+
+ void operator++()
+ {
+ int i = k_ - 1;
+ int max_at_pos_i(max_);
+ while( ( i >= 0 ) && ( element(i) >= max_at_pos_i ) )
+ {
+ --i;
+ --max_at_pos_i;
+ }
+ if( -1 == i )
+ {
+ if( element(0) == max_at_pos_0_ )
+ ++element(0); // mark then end of the enumeration with an impossible value
+ // Note than when we have arrived at the end of the enumeration, applying
+ // operator++() again does not change anything, so it is safe to
+ // apply it too many times.
+ }
+ else
+ {
+ ++element(i);
+ for( int j = i + 1; j < k_; ++j )
+ element(j) = element(i) + j - i;
+ }
+ }
+
+ Combination_enumerator operator++(int)
+ {
+ Combination_enumerator tmp(*this);
+ ++(*this);
+ return tmp;
+ }
+
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - TESTING
+#if 0
+ void test()
+ {
+ std::cerr << '\n';
+ while( ! end() )
+ {
+ std::cerr << '\n';
+ for( int i = 0; i < k_; ++i )
+ std::cerr << element(i) << ' ';
+ ++(*this);
+ }
+ init();
+ }
+#endif
+};
+
+} // end of namespace internal
+
+} // end of namespace CGAL
+
+#endif // CGAL_INTERNAL_COMBINATION_ENUMERATOR_H
diff --git a/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h b/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h
new file mode 100644
index 00000000..ee6195d9
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h
@@ -0,0 +1,116 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H
+#define CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H
+
+#include <CGAL/Compact_container.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/array.h>
+#include <vector>
+
+namespace CGAL {
+
+namespace internal {
+
+// Utility for adding one to an Dimension_tag:
+
+template<typename D>
+struct Dimen_plus_one;
+
+template<>
+struct Dimen_plus_one<Dynamic_dimension_tag>
+{
+ typedef Dynamic_dimension_tag type;
+};
+
+template<int D>
+struct Dimen_plus_one<Dimension_tag<D> >
+{
+ typedef Dimension_tag<D+1> type;
+};
+
+// A SMALL CONTAINER UTILITY FOR DYNAMIC/STATIC MEMORY MANAGEMENT
+
+// stores an array of static or dynamic size, depending on template parameter <B>.
+
+template< typename Containee, typename D, bool WithCompactContainerHelper = false>
+ struct S_or_D_array; // S = static, D = dynamic
+
+// The case of static size:
+template< typename Containee, int D, bool WithCompactContainerHelper >
+struct S_or_D_array< Containee, Dimension_tag< D >, WithCompactContainerHelper >
+: public array<Containee, D>
+{
+ typedef array<Containee, D> Base;
+ S_or_D_array(const int)
+ : Base()
+ {}
+ S_or_D_array(const int, const Containee & c)
+ : Base()
+ {
+ assign(c);
+ }
+ void* for_compact_container() const
+ {
+ return (*this)[0].for_compact_container();
+ }
+ void* & for_compact_container()
+ {
+ return (*this)[0].for_compact_container();
+ }
+};
+
+// The case of dynamic size
+template< typename Containee >
+struct S_or_D_array< Containee, Dynamic_dimension_tag, false >
+: public std::vector<Containee>
+{
+ typedef std::vector<Containee> Base;
+ // TODO: maybe we should use some "small-vector-optimized" class.
+ S_or_D_array(const int d)
+ : Base(d)
+ {}
+ S_or_D_array(const int d, const Containee & c)
+ : Base(d, c)
+ {}
+};
+
+// The case of dynamic size with for_compact_container
+template< typename Containee >
+struct S_or_D_array< Containee, Dynamic_dimension_tag, true >
+: public std::vector<Containee>
+{
+ typedef std::vector<Containee> Base;
+ S_or_D_array(const int d)
+ : Base(d), fcc_(NULL)
+ {}
+ S_or_D_array(const int d, const Containee & c)
+ : Base(d, c), fcc_(NULL)
+ {}
+ void* fcc_;
+ void* for_compact_container() const { return fcc_; }
+ void* & for_compact_container() { return fcc_; }
+};
+
+} // end of namespace internal
+
+} // end of namespace CGAL
+
+#endif // CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H
diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h
new file mode 100644
index 00000000..b3a0ec98
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h
@@ -0,0 +1,49 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H
+#define CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H
+
+namespace CGAL {
+
+namespace internal {
+namespace Triangulation {
+
+struct Dummy_TDS
+{
+ struct Vertex {};
+ struct Vertex_handle {};
+ struct Vertex_iterator {};
+ struct Vertex_const_handle {};
+ struct Vertex_const_iterator {};
+ struct Full_cell {};
+ struct Full_cell_handle {};
+ struct Full_cell_iterator {};
+ struct Full_cell_const_handle {};
+ struct Full_cell_const_iterator {};
+ struct Vertex_handle_const_iterator {};
+ struct Full_cell_data {};
+};
+
+} // namespace Triangulation
+} // namespace internal
+
+} //namespace CGAL
+
+#endif // CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H
diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h
new file mode 100644
index 00000000..7e360026
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h
@@ -0,0 +1,154 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus (Well... `copy, paste and hack' of Monique Teillaud's work)
+
+#ifndef CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H
+#define CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H
+
+namespace CGAL {
+
+namespace internal {
+namespace Triangulation {
+
+template< typename TDS >
+class Triangulation_ds_facet_iterator
+{
+ typedef typename TDS::Full_cell_handle Full_cell_handle;
+ typedef typename TDS::Facet Facet;
+
+ typedef Facet value_type;
+ typedef const Facet * pointer;
+ typedef const Facet & reference;
+ typedef std::size_t size_type;
+ typedef std::ptrdiff_t difference_type;
+ typedef std::bidirectional_iterator_tag iterator_category;
+
+ typedef Triangulation_ds_facet_iterator<TDS> Facet_iterator;
+
+ TDS & tds_;
+ Facet ft_;
+ const int cur_dim_;
+
+public:
+ Triangulation_ds_facet_iterator(TDS & tds)
+ : tds_(tds), ft_(tds.full_cells_begin(), 0), cur_dim_(tds.current_dimension())
+ {
+ CGAL_assertion( cur_dim_ > 0 );
+ while( ! canonical() )
+ raw_increment();
+ }
+
+ Triangulation_ds_facet_iterator(TDS & tds, int)
+ : tds_(tds), ft_(tds.full_cells_end(), 0), cur_dim_(tds.current_dimension())
+ {
+ CGAL_assertion( cur_dim_ > 0 );
+ CGAL_assertion( canonical() );
+ }
+
+ Facet_iterator & operator++()
+ {
+ increment();
+ return (*this);
+ }
+
+ Facet_iterator operator++(int)
+ {
+ Facet_iterator tmp(*this);
+ increment();
+ return tmp;
+ }
+
+ Facet_iterator & operator--()
+ {
+ decrement();
+ return (*this);
+ }
+
+ Facet_iterator operator--(int)
+ {
+ Facet_iterator tmp(*this);
+ decrement();
+ return tmp;
+ }
+
+ bool operator==(const Facet_iterator & fi) const
+ {
+ return (&tds_ == &fi.tds_) &&
+ (tds_.index_of_covertex(ft_) == fi.tds_.index_of_covertex(fi.ft_)) &&
+ (tds_.full_cell(ft_) == fi.tds_.full_cell(fi.ft_));
+ }
+
+ bool operator!=(const Facet_iterator & fi) const
+ {
+ return !(*this == fi);
+ }
+
+ reference operator*() const
+ {
+ return ft_;
+ }
+
+ pointer operator->() const
+ {
+ return &ft_;
+ }
+
+private:
+ bool canonical()
+ {
+ if( tds_.full_cells_end() == tds_.full_cell(ft_) )
+ return ( 0 == tds_.index_of_covertex(ft_) );
+ return ( tds_.full_cell(ft_) <
+ tds_.full_cell(ft_)->neighbor(tds_.index_of_covertex(ft_)) );
+ }
+
+ void raw_decrement()
+ {
+ int i = tds_.index_of_covertex(ft_);
+ if( i == 0 )
+ ft_ = Facet(--tds_.full_cell(ft_), cur_dim_);
+ else
+ ft_ = Facet(tds_.full_cell(ft_), i - 1);
+ }
+
+ void raw_increment()
+ {
+ int i = tds_.index_of_covertex(ft_);
+ if( i == cur_dim_ )
+ ft_ = Facet(++tds_.full_cell(ft_), 0);
+ else
+ ft_ = Facet(tds_.full_cell(ft_), i + 1);
+ }
+
+ void decrement()
+ {
+ do { raw_decrement(); } while( ! canonical() );
+ }
+
+ void increment()
+ {
+ do { raw_increment(); } while( ! canonical() );
+ }
+};
+
+} // namespace Triangulation
+} // namespace internal
+
+} //namespace CGAL
+
+#endif // CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H
diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h
new file mode 100644
index 00000000..a1ffc775
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h
@@ -0,0 +1,154 @@
+// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France).
+// All rights reserved.
+//
+// This file is part of CGAL (www.cgal.org).
+// You can redistribute it and/or modify it under the terms of the GNU
+// General Public License as published by the Free Software Foundation,
+// either version 3 of the License, or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Samuel Hornus
+
+#ifndef CGAL_INTERNAL_TRIANGULATION_UTILITIES_H
+#define CGAL_INTERNAL_TRIANGULATION_UTILITIES_H
+
+#include <CGAL/basic.h>
+
+namespace CGAL {
+
+namespace internal {
+namespace Triangulation {
+
+template< class TDS >
+struct Dark_full_cell_data
+{
+ typedef typename TDS::Full_cell_handle Full_cell_handle;
+ Full_cell_handle light_copy_;
+ int count_;
+ Dark_full_cell_data() : light_copy_(), count_(0) {}
+};
+
+template< class TDS >
+struct Compare_faces_with_common_first_vertex
+{
+ typedef typename TDS::Face Face;
+
+ const int d_;
+
+public:
+
+ Compare_faces_with_common_first_vertex(const int d)
+ : d_(d)
+ {
+ CGAL_assertion( 0 < d );
+ }
+
+ explicit Compare_faces_with_common_first_vertex();
+
+ bool operator()(const Face & left, const Face & right) const
+ {
+ CGAL_assertion( d_ == left.face_dimension() );
+ CGAL_assertion( d_ == right.face_dimension() );
+ for( int i = 1; i <= d_; ++i )
+ {
+ if( left.vertex(i) < right.vertex(i) )
+ return true;
+ if( right.vertex(i) < left.vertex(i) )
+ return false;
+ }
+ return false;
+ }
+};
+
+template< class T >
+struct Compare_vertices_for_upper_face
+{
+ typedef typename T::Vertex_const_handle VCH;
+
+ const T & t_;
+
+public:
+
+ Compare_vertices_for_upper_face(const T & t)
+ : t_(t)
+ {}
+
+ explicit Compare_vertices_for_upper_face();
+
+ bool operator()(const VCH & left, const VCH & right) const
+ {
+ if( left == right )
+ return false;
+ if( t_.is_infinite(left) )
+ return true;
+ if( t_.is_infinite(right) )
+ return false;
+ return left < right;
+ }
+};
+
+template< class T >
+struct Compare_points_for_perturbation
+{
+ typedef typename T::Geom_traits::Point_d Point;
+
+ const T & t_;
+
+public:
+
+ Compare_points_for_perturbation(const T & t)
+ : t_(t)
+ {}
+
+ explicit Compare_points_for_perturbation();
+
+ bool operator()(const Point * left, const Point * right) const
+ {
+ return (SMALLER == t_.geom_traits().compare_lexicographically_d_object()(*left, *right));
+ }
+};
+
+template< class T >
+struct Point_from_pointer
+{
+ typedef const typename T::Geom_traits::Point_d * argument_type;
+ typedef const typename T::Geom_traits::Point_d result_type;
+ result_type & operator()(argument_type & x) const
+ {
+ return (*x);
+ }
+ const result_type & operator()(const argument_type & x) const
+ {
+ return (*x);
+ }
+};
+
+template< typename Vertex_handle, typename Point >
+struct Point_from_vertex_handle
+{
+ typedef Vertex_handle argument_type;
+ typedef Point result_type;
+ result_type & operator()(argument_type & x) const
+ {
+ return x->point();
+ }
+ const result_type & operator()(const argument_type & x) const
+ {
+ return x->point();
+ }
+};
+
+} // namespace Triangulation
+} // namespace internal
+
+} //namespace CGAL
+
+#endif // CGAL_INTERNAL_TRIANGULATION_UTILITIES_H
diff --git a/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h b/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h
new file mode 100644
index 00000000..110bb4be
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h
@@ -0,0 +1,75 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_ITERATOR_FROM_INDICES_H
+#define CGAL_ITERATOR_FROM_INDICES_H
+#include <CGAL/config.h>
+#include <boost/iterator/iterator_facade.hpp>
+namespace CGAL {
+template <class Ref_>
+struct Default_coordinate_access {
+ typedef Ref_ result_type;
+ template<class T> Ref_ operator()(T const& t, std::ptrdiff_t i)const{
+ return t[i];
+ }
+};
+
+//TODO: default type for Value_: typename same_cv<Container_,typename remove_cv<Container_>::type::value_type>::type
+template <class Container_, class Value_, class Ref_=
+#ifdef CGAL_CXX11
+ decltype(std::declval<Container_>()[0])
+#else
+ Value_&
+#endif
+ , class Coord_access = Default_coordinate_access<Ref_>
+ >
+class Iterator_from_indices
+: public boost::iterator_facade<Iterator_from_indices<Container_,Value_,Ref_,Coord_access>,
+ Value_, std::bidirectional_iterator_tag, Ref_>
+{
+ friend class boost::iterator_core_access;
+ //FIXME: use int to save space
+ //TODO: use a tuple to save space when Coord_access is empty
+ typedef std::ptrdiff_t index_t;
+ Container_* cont;
+ index_t index;
+ Coord_access ca;
+ void increment(){ ++index; }
+ void decrement(){ --index; }
+ void advance(std::ptrdiff_t n){ index+=n; }
+ ptrdiff_t distance_to(Iterator_from_indices const& other)const{
+ return other.index-index;
+ }
+ bool equal(Iterator_from_indices const& other)const{
+ return index==other.index;
+ }
+ Ref_ dereference()const{
+ //FIXME: use the functor properly
+ //Uh, and what did I mean by that?
+ return ca(*cont,index);
+ }
+ public:
+ Iterator_from_indices(Container_& cont_,std::size_t n)
+ : cont(&cont_), index(n) {}
+ template<class T>
+ Iterator_from_indices(Container_& cont_,std::size_t n,T const&t)
+ : cont(&cont_), index(n), ca(t) {}
+};
+}
+#endif // CGAL_ITERATOR_FROM_INDICES_H
diff --git a/src/common/include/gudhi_patches/CGAL/transforming_iterator.h b/src/common/include/gudhi_patches/CGAL/transforming_iterator.h
new file mode 100644
index 00000000..15ea19a5
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/transforming_iterator.h
@@ -0,0 +1,123 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_TRANSFORMING_ITERATOR_H
+#define CGAL_TRANSFORMING_ITERATOR_H
+#include <boost/iterator/iterator_adaptor.hpp>
+#include <boost/utility/result_of.hpp>
+#include <boost/type_traits/is_empty.hpp>
+#include <boost/type_traits/is_reference.hpp>
+#include <boost/type_traits/is_integral.hpp>
+#include <boost/mpl/if.hpp>
+#include <boost/mpl/or.hpp>
+#include <CGAL/Default.h>
+#include <utility>
+
+// Inspired by the boost version, but more compact and
+// without any iterator_category games.
+
+namespace CGAL {
+namespace internal {
+
+// non-empty case
+template<class T,bool=boost::is_empty<T>::value> struct Functor_as_base {
+ Functor_as_base(){}
+ Functor_as_base(T const& t):f(t){}
+ //template<class T2> Functor_as_base(Functor_as_base<T2> const&g):f(g.functor()){}
+ T const& functor()const{return f;}
+ T & functor() {return f;}
+ private:
+ T f;
+};
+
+// empty case
+template<class T> struct Functor_as_base<T,true> : public T {
+ Functor_as_base(){}
+ Functor_as_base(T const& t):T(t){}
+ //template<class T2> Functor_as_base(Functor_as_base<T2> const&g):T(g.functor()){}
+ T const& functor()const{return *this;}
+ T & functor() {return *this;}
+};
+
+template <typename Derived, typename F, typename Iter, typename Ref, typename Val>
+class transforming_iterator_helper
+{
+ typedef std::iterator_traits<Iter> Iter_traits;
+ typedef typename Iter_traits::reference Iter_ref;
+ typedef typename Default::Get<Ref,
+#ifdef CGAL_CXX11
+ decltype(std::declval<F>()(std::declval<Iter_ref>()))
+#else
+ typename boost::result_of<F(typename Iter_traits::value_type)>::type
+ // should be reference instead of value_type
+#endif
+ >::type reference_;
+
+ typedef typename Default::Get<Val,typename boost::remove_cv<typename boost::remove_reference<reference_>::type>::type>::type value_type;
+
+ // Crappy heuristic. If we have *it that returns a Weighted_point and F that returns a reference to the Point contained in the Weighted_point it takes as argument, we do NOT want the transformed iterator to return a reference to the temporary *it. On the other hand, if *it returns an int n, and F returns a reference to array[n] it is not so good to lose the reference. This probably should be done elsewhere and should at least be made optional...
+ typedef typename boost::mpl::if_<
+ boost::mpl::or_<boost::is_reference<Iter_ref>,
+ boost::is_integral<Iter_ref> >,
+ reference_, value_type>::type reference;
+
+ public:
+ typedef boost::iterator_adaptor<
+ Derived,
+ Iter,
+ value_type,
+ typename Iter_traits::iterator_category,
+ reference
+ > type;
+};
+}
+
+template <typename F, typename Iter, typename Ref=Default, typename Val=Default>
+class transforming_iterator
+: public internal::transforming_iterator_helper<transforming_iterator<F,Iter,Ref,Val>,F,Iter,Ref,Val>::type,
+private internal::Functor_as_base<F>
+{
+ friend class boost::iterator_core_access;
+ typedef typename internal::transforming_iterator_helper<transforming_iterator,F,Iter,Ref,Val>::type Base;
+ typedef internal::Functor_as_base<F> Functor_base;
+ typename Base::reference dereference()const{
+ return functor()(*this->base_reference());
+ }
+ public:
+ using Functor_base::functor;
+ transforming_iterator(){}
+ explicit transforming_iterator(Iter i,F const& f=F())
+ :Base(i),Functor_base(f){}
+ template<class F2,class I2,class R2,class V2>
+ transforming_iterator(
+ transforming_iterator<F2,I2,R2,V2> const&i,
+ typename boost::enable_if_convertible<I2, Iter>::type* = 0,
+ typename boost::enable_if_convertible<F2, F>::type* = 0)
+ : Base(i.base()),Functor_base(i.functor()) {}
+
+};
+
+template <typename F, typename Iter> inline
+transforming_iterator<F,Iter> make_transforming_iterator(Iter i, F const&f=F()) {
+ return transforming_iterator<F,Iter>(i,f);
+}
+
+}
+
+#endif // CGAL_TRANSFORMING_ITERATOR_H
diff --git a/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h b/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h
new file mode 100644
index 00000000..48dac132
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h
@@ -0,0 +1,127 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_TRANSFORMING_PAIR_ITERATOR_H
+#define CGAL_TRANSFORMING_PAIR_ITERATOR_H
+// Should be a combination of transform_iterator and zip_iterator,
+// but boost's iterator_category games are a pain.
+
+#include <CGAL/transforming_iterator.h>
+#include <CGAL/assertions.h>
+#include <boost/type_traits/is_convertible.hpp>
+
+
+
+
+namespace CGAL {
+namespace internal {
+template <class Cat1, class Cat2, bool=boost::is_convertible<Cat1,Cat2>::value>
+struct Min_category {
+ CGAL_static_assertion((boost::is_convertible<Cat2,Cat1>::value));
+ typedef Cat1 type;
+};
+
+template <class Cat1, class Cat2>
+struct Min_category<Cat1,Cat2,true> {
+ typedef Cat2 type;
+};
+
+
+template <typename Derived, typename F, typename It1, typename It2, typename Ref, typename Val>
+class transforming_pair_iterator_helper
+{
+ typedef typename Min_category<
+ typename std::iterator_traits<It1>::iterator_category,
+ typename std::iterator_traits<It1>::iterator_category>
+ ::type iterator_category;
+
+ typedef typename Default::Get<Ref,
+#ifdef CGAL_CXX11
+ decltype(std::declval<F>()(std::declval<typename std::iterator_traits<It1>::reference>(),std::declval<typename std::iterator_traits<It2>::reference>()))
+#else
+ typename boost::result_of<F(typename std::iterator_traits<It1>::value_type,typename std::iterator_traits<It2>::value_type)>::type
+ // should be reference instead of value_type
+#endif
+ >::type reference;
+
+ typedef typename Default::Get<Val,typename boost::remove_cv<typename boost::remove_reference<reference>::type>::type>::type value_type;
+
+ public:
+ typedef boost::iterator_facade<
+ Derived,
+ value_type,
+ iterator_category,
+ reference
+ // expect ptrdiff_t is good enough for difference
+ > type;
+};
+}
+
+template <typename F, typename It1, typename It2, typename Ref=Default, typename Val=Default>
+class transforming_pair_iterator
+: public internal::transforming_pair_iterator_helper<transforming_pair_iterator<F,It1,It2,Ref,Val>,F,It1,It2,Ref,Val>::type,
+private internal::Functor_as_base<F>
+{
+ It1 iter1; It2 iter2;
+ friend class boost::iterator_core_access;
+ typedef typename internal::transforming_pair_iterator_helper<transforming_pair_iterator,F,It1,It2,Ref,Val>::type Base;
+ typedef internal::Functor_as_base<F> Functor_base;
+ typename Base::reference dereference()const{
+ return functor()(*iter1,*iter2);
+ }
+ bool equal(transforming_pair_iterator const&i)const{
+ bool b=(iter1==i.iter1);
+ CGAL_assertion(b==(iter2==i.iter2));
+ //FIXME: or do we want only one driving iterator
+ return b;
+ }
+ void increment(){ ++iter1; ++iter2; }
+ void decrement(){ --iter1; --iter2; }
+ void advance(std::ptrdiff_t n){
+ std::advance(iter1,n);
+ std::advance(iter2,n);
+ }
+ std::ptrdiff_t distance_to(transforming_pair_iterator const&i)const{
+ std::ptrdiff_t dist=std::distance(iter1,i.iter1);
+ CGAL_assertion(dist==std::distance(iter2,i.iter2));
+ return dist;
+ }
+ public:
+ using Functor_base::functor;
+ transforming_pair_iterator(){}
+ explicit transforming_pair_iterator(It1 i1,It2 i2,F const& f=F())
+ :Functor_base(f),iter1(i1),iter2(i2){}
+ template<class F2,class J1,class J2,class R2,class V2>
+ transforming_pair_iterator(
+ transforming_pair_iterator<F2,J1,J2,R2,V2> const&i,
+ typename boost::enable_if_convertible<J1, It1>::type* = 0,
+ typename boost::enable_if_convertible<J2, It2>::type* = 0,
+ typename boost::enable_if_convertible<F2, F>::type* = 0)
+ : Functor_base(i.functor()),iter1(i.iter1),iter2(i.iter2) {}
+
+};
+
+template <typename F, typename It1, typename It2> inline
+transforming_pair_iterator<F,It1,It2> make_transforming_pair_iterator(It1 i1, It2 i2, F const&f=F()) {
+ return transforming_pair_iterator<F,It1,It2>(i1,i2,f);
+}
+
+}
+
+#endif // CGAL_TRANSFORMING_PAIR_ITERATOR_H
diff --git a/src/common/include/gudhi_patches/CGAL/typeset.h b/src/common/include/gudhi_patches/CGAL/typeset.h
new file mode 100644
index 00000000..d4e24281
--- /dev/null
+++ b/src/common/include/gudhi_patches/CGAL/typeset.h
@@ -0,0 +1,117 @@
+// Copyright (c) 2014
+// INRIA Saclay-Ile de France (France)
+//
+// This file is part of CGAL (www.cgal.org); you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public License as
+// published by the Free Software Foundation; either version 3 of the License,
+// or (at your option) any later version.
+//
+// Licensees holding a valid commercial license may use this file in
+// accordance with the commercial license agreement provided with the software.
+//
+// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
+// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+//
+// $URL$
+// $Id$
+//
+// Author(s) : Marc Glisse
+
+#ifndef CGAL_TYPESET_H
+#define CGAL_TYPESET_H
+#include <CGAL/config.h>
+#ifdef CGAL_CXX11
+#include <type_traits>
+#else
+#include <boost/type_traits.hpp>
+#endif
+
+// Sometimes using tuple just to list types is overkill (takes forever to
+// instantiate).
+
+namespace CGAL {
+#ifdef CGAL_CXX11
+ template<class...> struct typeset;
+ template<class H,class...U> struct typeset<H,U...> {
+ typedef H head;
+ typedef typeset<U...> tail;
+ typedef typeset type;
+ template<class X> using contains = typename
+ std::conditional<
+ std::is_same<H,X>::value,
+ std::true_type,
+ typename tail::template contains<X>
+ >::type;
+ template<class X> using add = typename
+ std::conditional<
+ contains<X>::value,
+ typeset<H,U...>,
+ typeset<H,U...,X>
+ >::type;
+ };
+ template<> struct typeset<> {
+ typedef typeset type;
+ template<class X> using contains = std::false_type;
+ template<class X> using add = typeset<X>;
+ };
+#else
+ template<class,class> struct typeset;
+ template<class H=void, class T=typename
+ boost::mpl::if_<boost::is_same<H,void>, void, typeset<void, void> >::type >
+ struct typeset {
+ typedef typeset type;
+ typedef H head;
+ typedef T tail;
+ template<class X> struct contains :
+ boost::mpl::if_<boost::is_same<H,X>,boost::true_type,typename tail::template contains<X> >::type
+ {};
+ template<class X,class=void> struct add;
+ //boost::mpl::if_<boost::is_same<H,X>,typeset,typeset<X,typeset> >::type
+ };
+ template<> struct typeset<> {
+ typedef typeset type;
+ template<class X> struct contains : boost::false_type {};
+ template<class X> struct add : CGAL::typeset<X> {};
+ };
+
+ template<class H,class T>
+ template<class X,class>
+ struct typeset<H,T>::add : typeset<H,typename T::template add<X>::type> {};
+ template<class H,class T>
+ template<class V>
+ struct typeset<H,T>::add<H,V> : typeset<H,T> {};
+#endif
+
+ template<class T1, class T2> struct typeset_union_ :
+ typeset_union_<typename T1::template add<typename T2::head>::type, typename T2::tail>
+ {};
+ template<class T> struct typeset_union_ <T, typeset<> > : T {};
+
+ template<class T1, class T2>
+ struct typeset_intersection_ {
+ typedef typename T1::head H;
+ typedef typename typeset_intersection_<typename T1::tail,T2>::type U;
+ typedef typename
+#ifdef CGAL_CXX11
+ std::conditional<T2::template contains<H>::value,
+#else
+ boost::mpl::if_<typename T2::template contains<H>,
+#endif
+ typename U::template add<H>::type, U>::type type;
+ };
+ template<class T>
+ struct typeset_intersection_<typeset<>,T> : typeset<> {};
+
+#ifdef CGAL_CXX11
+ template<class T1, class T2>
+ using typeset_union = typename typeset_union_<T1,T2>::type;
+ template<class T1, class T2>
+ using typeset_intersection = typename typeset_intersection_<T1,T2>::type;
+#else
+ template<class T1, class T2>
+ struct typeset_union : typeset_union_<T1,T2>::type {};
+ template<class T1, class T2>
+ struct typeset_intersection : typeset_intersection_<T1,T2>::type {};
+#endif
+}
+#endif
diff --git a/src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt b/src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt
new file mode 100644
index 00000000..5b9581a0
--- /dev/null
+++ b/src/common/include/gudhi_patches/Tangential_complex_CGAL_patches.txt
@@ -0,0 +1,82 @@
+CGAL/Regular_triangulation_traits_adapter.h
+CGAL/Triangulation_ds_vertex.h
+CGAL/Triangulation_data_structure.h
+CGAL/transforming_pair_iterator.h
+CGAL/NewKernel_d/static_int.h
+CGAL/NewKernel_d/Cartesian_LA_functors.h
+CGAL/NewKernel_d/Cartesian_change_FT.h
+CGAL/NewKernel_d/Wrapper/Vector_d.h
+CGAL/NewKernel_d/Wrapper/Hyperplane_d.h
+CGAL/NewKernel_d/Wrapper/Ref_count_obj.h
+CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h
+CGAL/NewKernel_d/Wrapper/Point_d.h
+CGAL/NewKernel_d/Wrapper/Segment_d.h
+CGAL/NewKernel_d/Wrapper/Weighted_point_d.h
+CGAL/NewKernel_d/Wrapper/Sphere_d.h
+CGAL/NewKernel_d/Cartesian_per_dimension.h
+CGAL/NewKernel_d/Kernel_object_converter.h
+CGAL/NewKernel_d/KernelD_converter.h
+CGAL/NewKernel_d/Vector/sse2.h
+CGAL/NewKernel_d/Vector/avx4.h
+CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h
+CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h
+CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h
+CGAL/NewKernel_d/Vector/array.h
+CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h
+CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h
+CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h
+CGAL/NewKernel_d/Vector/vector.h
+CGAL/NewKernel_d/Vector/v2int.h
+CGAL/NewKernel_d/Vector/mix.h
+CGAL/NewKernel_d/Cartesian_static_filters.h
+CGAL/NewKernel_d/Cartesian_LA_base.h
+CGAL/NewKernel_d/Lazy_cartesian.h
+CGAL/NewKernel_d/Coaffine.h
+CGAL/NewKernel_d/store_kernel.h
+CGAL/NewKernel_d/Dimension_base.h
+CGAL/NewKernel_d/Kernel_3_interface.h
+CGAL/NewKernel_d/Cartesian_complete.h
+CGAL/NewKernel_d/Cartesian_base.h
+CGAL/NewKernel_d/Cartesian_filter_K.h
+CGAL/NewKernel_d/functor_tags.h
+CGAL/NewKernel_d/Filtered_predicate2.h
+CGAL/NewKernel_d/functor_properties.h
+CGAL/NewKernel_d/Define_kernel_types.h
+CGAL/NewKernel_d/LA_eigen/LA.h
+CGAL/NewKernel_d/LA_eigen/constructors.h
+CGAL/NewKernel_d/Types/Aff_transformation.h
+CGAL/NewKernel_d/Types/Sphere.h
+CGAL/NewKernel_d/Types/Hyperplane.h
+CGAL/NewKernel_d/Types/Line.h
+CGAL/NewKernel_d/Types/Ray.h
+CGAL/NewKernel_d/Types/Iso_box.h
+CGAL/NewKernel_d/Types/Weighted_point.h
+CGAL/NewKernel_d/Types/Segment.h
+CGAL/NewKernel_d/Kernel_d_interface.h
+CGAL/NewKernel_d/utils.h
+CGAL/NewKernel_d/Kernel_2_interface.h
+CGAL/NewKernel_d/Cartesian_filter_NT.h
+CGAL/NewKernel_d/function_objects_cartesian.h
+CGAL/Convex_hull.h
+CGAL/Triangulation_ds_full_cell.h
+CGAL/Regular_triangulation.h
+CGAL/Epick_d.h
+CGAL/transforming_iterator.h
+CGAL/iterator_from_indices.h
+CGAL/Delaunay_triangulation.h
+CGAL/IO/Triangulation_off_ostream.h
+CGAL/typeset.h
+CGAL/Triangulation_full_cell.h
+CGAL/Triangulation.h
+CGAL/internal/Static_or_dynamic_array.h
+CGAL/internal/Combination_enumerator.h
+CGAL/internal/Triangulation/utilities.h
+CGAL/internal/Triangulation/Triangulation_ds_iterators.h
+CGAL/internal/Triangulation/Dummy_TDS.h
+CGAL/argument_swaps.h
+CGAL/Epeck_d.h
+CGAL/determinant_of_vectors.h
+CGAL/TDS_full_cell_default_storage_policy.h
+CGAL/TDS_full_cell_mirror_storage_policy.h
+CGAL/Triangulation_face.h
+CGAL/Triangulation_vertex.h
diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt
index 7ccdb752..baa24539 100644
--- a/src/common/test/CMakeLists.txt
+++ b/src/common/test/CMakeLists.txt
@@ -13,12 +13,21 @@ endif()
add_executable ( poffreader_UT test_points_off_reader.cpp )
target_link_libraries(poffreader_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+add_executable ( distancematrixreader_UT test_distance_matrix_reader.cpp )
+target_link_libraries(distancematrixreader_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
# Do not forget to copy test files in current binary dir
file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
# Unitary tests
add_test(poffreader_UT ${CMAKE_CURRENT_BINARY_DIR}/poffreader_UT
# XML format for Jenkins xUnit plugin
--log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/poffreader_UT.xml --log_level=test_suite --report_level=no)
+add_test(distancematrixreader_UT ${CMAKE_CURRENT_BINARY_DIR}/distancematrixreader_UT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/distancematrixreader_UT.xml --log_level=test_suite --report_level=no)
+
diff --git a/src/common/test/test_distance_matrix_reader.cpp b/src/common/test/test_distance_matrix_reader.cpp
new file mode 100644
index 00000000..95a73bd9
--- /dev/null
+++ b/src/common/test/test_distance_matrix_reader.cpp
@@ -0,0 +1,85 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/reader_utils.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "distance_matrix_reader"
+#include <boost/test/unit_test.hpp>
+
+using Distance_matrix = std::vector<std::vector<double>>;
+
+BOOST_AUTO_TEST_CASE( lower_triangular_distance_matrix )
+{
+ Distance_matrix from_lower_triangular;
+ // Read lower_triangular_distance_matrix.csv file where the separator is a ','
+ from_lower_triangular = read_lower_triangular_matrix_from_csv_file<double>("lower_triangular_distance_matrix.csv",
+ ',');
+ for (auto& i : from_lower_triangular) {
+ for (auto j : i) {
+ std::cout << j << " ";
+ }
+ std::cout << std::endl;
+ }
+ std::cout << "from_lower_triangular size = " << from_lower_triangular.size() << std::endl;
+ BOOST_CHECK(from_lower_triangular.size() == 5);
+
+ for (std::size_t i = 0; i < from_lower_triangular.size(); i++) {
+ std::cout << "from_lower_triangular[" << i << "] size = " << from_lower_triangular[i].size() << std::endl;
+ BOOST_CHECK(from_lower_triangular[i].size() == i);
+ }
+ std::vector<double> expected = {1};
+ BOOST_CHECK(from_lower_triangular[1] == expected);
+
+ expected = {2,3};
+ BOOST_CHECK(from_lower_triangular[2] == expected);
+
+ expected = {4,5,6};
+ BOOST_CHECK(from_lower_triangular[3] == expected);
+
+ expected = {7,8,9,10};
+ BOOST_CHECK(from_lower_triangular[4] == expected);
+
+}
+
+BOOST_AUTO_TEST_CASE( full_square_distance_matrix )
+{
+ Distance_matrix from_full_square;
+ // Read full_square_distance_matrix.csv file where the separator is the default one ';'
+ from_full_square = read_lower_triangular_matrix_from_csv_file<double>("full_square_distance_matrix.csv");
+ for (auto& i : from_full_square) {
+ for (auto j : i) {
+ std::cout << j << " ";
+ }
+ std::cout << std::endl;
+ }
+ std::cout << "from_full_square size = " << from_full_square.size() << std::endl;
+ BOOST_CHECK(from_full_square.size() == 7);
+ for (std::size_t i = 0; i < from_full_square.size(); i++) {
+ std::cout << "from_full_square[" << i << "] size = " << from_full_square[i].size() << std::endl;
+ BOOST_CHECK(from_full_square[i].size() == i);
+ }
+}
diff --git a/src/common/test/test_points_off_reader.cpp b/src/common/test/test_points_off_reader.cpp
index b4f71182..0a78d190 100644
--- a/src/common/test/test_points_off_reader.cpp
+++ b/src/common/test/test_points_off_reader.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt
new file mode 100644
index 00000000..9dcb2cf4
--- /dev/null
+++ b/src/cython/CMakeLists.txt
@@ -0,0 +1,243 @@
+cmake_minimum_required(VERSION 2.8)
+project(Cython)
+
+macro( find_the_lib placeholder THE_LIBS )
+ set (THE_LIB_WE_FOUND "NO")
+ foreach(THE_LIB ${THE_LIBS})
+ if(EXISTS ${THE_LIB})
+ get_filename_component(THE_LIB_WE ${THE_LIB} NAME_WE)
+ if (NOT THE_LIB_WE_FOUND)
+ set (THE_LIB_WE_FOUND "YES")
+ set(returnValue "${THE_LIB_WE}")
+ endif(NOT THE_LIB_WE_FOUND)
+ endif(EXISTS ${THE_LIB})
+ endforeach(THE_LIB ${THE_LIBS})
+endmacro( find_the_lib )
+
+# Find the correct Python interpreter. Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 for instance.
+if(PYTHON_EXECUTABLE)
+ if(NOT EXISTS "${PYTHON_EXECUTABLE}")
+ message(FATAL_ERROR "ERROR: ${PYTHON_EXECUTABLE} does not exist.")
+ endif(NOT EXISTS "${PYTHON_EXECUTABLE}")
+endif(PYTHON_EXECUTABLE)
+include(FindPythonInterp)
+
+if(PYTHONINTERP_FOUND)
+ if(PYTHON_VERSION_MAJOR EQUAL 2)
+ FIND_PROGRAM(CYTHON_PATH cython)
+ # Unitary tests are available through py.test
+ find_program( PYTEST_PATH py.test )
+ # Documentation generation is available through sphinx
+ find_program( SPHINX_PATH sphinx-build )
+ elseif(PYTHON_VERSION_MAJOR EQUAL 3)
+ FIND_PROGRAM(CYTHON_PATH cython3)
+ # Unitary tests are available through py.test
+ find_program( PYTEST_PATH py.test )
+ # Documentation generation is available through sphinx
+ set(SPHINX_PATH "${CMAKE_CURRENT_BINARY_DIR}/doc/python3-sphinx-build")
+ else()
+ message(FATAL_ERROR "ERROR: Try to compile the Cython interface. Python version ${PYTHON_VERSION_STRING} is not valid.")
+ endif(PYTHON_VERSION_MAJOR EQUAL 2)
+endif(PYTHONINTERP_FOUND)
+
+if(CYTHON_PATH)
+ message("${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_PATH} - py.test is ${PYTEST_PATH} - Sphinx is ${SPHINX_PATH}")
+
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
+ if(WIN32)
+ set( returnValue "" )
+ find_the_lib (${returnValue} ${Boost_SYSTEM_LIBRARY})
+ set(BOOST_SYSTEM_LIB_NAME ${returnValue})
+ else()
+ set(BOOST_SYSTEM_LIB_NAME "boost_system")
+ endif()
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'${BOOST_SYSTEM_LIB_NAME}', ")
+
+ # Gudhi and CGAL compilation option
+ if(MSVC)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'/fp:strict', ")
+ else(MSVC)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-std=c++11', ")
+ endif(MSVC)
+ if(CMAKE_COMPILER_IS_GNUCXX)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ")
+ endif(CMAKE_COMPILER_IS_GNUCXX)
+ if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-fp-model strict', ")
+ endif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
+ if (DEBUG_TRACES)
+ # For programs to be more verbose
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DDEBUG_TRACES', ")
+ endif()
+
+ if (EIGEN3_FOUND)
+ # No problem, even if no CGAL found
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
+ endif (EIGEN3_FOUND)
+
+ # Copy recursively include, cython, example, doc and test repositories before packages finding
+ # Some tests and doc files are removed in case some packages are not found
+ file(COPY include DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+ file(COPY cython DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+ file(COPY example DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+ file(COPY test DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+ file(COPY doc DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
+ # Developper version for doc images
+ file(GLOB GUDHI_DEV_DOC_IMAGES "${CMAKE_SOURCE_DIR}/src/*/doc/*.png")
+ file(COPY ${GUDHI_DEV_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
+ file(GLOB GUDHI_DEV_DOC_IMAGES "${CMAKE_SOURCE_DIR}/src/*/doc/*.svg")
+ file(COPY ${GUDHI_DEV_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
+ # User version for doc images
+ file(GLOB GUDHI_USER_DOC_IMAGES "${CMAKE_SOURCE_DIR}/doc/*/*.png")
+ file(COPY ${GUDHI_USER_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
+ file(GLOB GUDHI_USER_DOC_IMAGES "${CMAKE_SOURCE_DIR}/doc/*/*.svg")
+ file(COPY ${GUDHI_USER_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
+ # Biblio
+ file(GLOB GUDHI_BIB_FILES "${CMAKE_SOURCE_DIR}/biblio/*.bib")
+ file(COPY ${GUDHI_BIB_FILES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
+ # Cubical complex perseus doc example
+ file(GLOB GUDHI_CUBICAL_PERSEUS_FILES "${CMAKE_SOURCE_DIR}/data/bitmap/*cubicalcomplexdoc.txt")
+ file(COPY ${GUDHI_CUBICAL_PERSEUS_FILES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
+ file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
+ # Persistence graphical tools examples
+ file(COPY "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
+
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ # If CGAL_VERSION >= 4.8.1, include subsampling
+ set(GUDHI_CYTHON_SUBSAMPLING "include 'cython/subsampling.pyx'")
+ set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include 'cython/tangential_complex.pyx'")
+ set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include 'cython/bottleneck_distance.pyx'")
+ else (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ # Remove subsampling unitary tests
+ file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_subsampling.py)
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_ref.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_sum.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_user.rst")
+ # Remove tangential complex and bottleneck unitary tests
+ file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_tangential_complex.py)
+ file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_bottleneck_distance.py)
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_ref.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_sum.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_user.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_ref.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_sum.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_user.rst")
+ endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ # If CGAL_VERSION >= 4.7.0, include alpha
+ set(GUDHI_CYTHON_ALPHA_COMPLEX "include 'cython/alpha_complex.pyx'")
+ else (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ # Remove alpha complex unitary tests
+ file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_alpha_complex.py)
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_ref.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_sum.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_user.rst")
+ endif (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ # If CGAL_VERSION >= 4.6.0, include euclidean versions of witness complex
+ set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX
+ "include 'cython/euclidean_witness_complex.pyx'\ninclude 'cython/euclidean_strong_witness_complex.pyx'\n")
+ else (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ # Remove alpha complex unitary tests
+ file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_euclidean_witness_complex.py)
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/euclidean_witness_complex_ref.rst")
+ file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/euclidean_strong_witness_complex_ref.rst")
+ endif (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+
+ if(CGAL_FOUND)
+ # Add CGAL compilation args
+ if(CGAL_HEADER_ONLY)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ")
+ else(CGAL_HEADER_ONLY)
+ if(WIN32)
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'CGAL-vc140-mt-4.7', ")
+ else(WIN32)
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'CGAL', ")
+ endif(WIN32)
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ")
+ endif(CGAL_HEADER_ONLY)
+ # GMP and GMPXX are not required, but if present, CGAL will link with them.
+ if(GMP_FOUND)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ")
+ if(WIN32)
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'libgmp-10', ")
+ else(WIN32)
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'gmp', ")
+ endif(WIN32)
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ")
+ if(GMPXX_FOUND)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'gmpxx', ")
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ")
+ endif(GMPXX_FOUND)
+ endif(GMP_FOUND)
+ endif(CGAL_FOUND)
+
+ # Specific for Mac
+ if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-mmacosx-version-min=10.9', ")
+ set(GUDHI_CYTHON_EXTRA_LINK_ARGS "${GUDHI_CYTHON_EXTRA_LINK_ARGS}'-mmacosx-version-min=10.9', ")
+ endif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+
+ # Loop on INCLUDE_DIRECTORIES PROPERTY
+ get_property(GUDHI_INCLUDE_DIRECTORIES DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES)
+ foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+ set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${GUDHI_INCLUDE_DIRECTORY}', ")
+ endforeach()
+ set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${CMAKE_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/include', ")
+
+ if (TBB_FOUND)
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'tbb', 'tbbmalloc', ")
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ")
+ set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ")
+ endif()
+
+ # set sphinx-build in make files
+ configure_file(doc/Makefile.in "${CMAKE_CURRENT_BINARY_DIR}/doc/Makefile" @ONLY)
+ configure_file(doc/make.bat.in "${CMAKE_CURRENT_BINARY_DIR}/doc/make.bat" @ONLY)
+
+ # Generate cythonize_gudhi.py file to cythonize Gudhi
+ configure_file(cythonize_gudhi.py.in "${CMAKE_CURRENT_BINARY_DIR}/cythonize_gudhi.py" @ONLY)
+ # Generate gudhi.pyx - Gudhi cython file
+ configure_file(gudhi.pyx.in "${CMAKE_CURRENT_BINARY_DIR}/gudhi.pyx" @ONLY)
+
+ add_custom_command(
+ OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/cythonize_gudhi.py" "build_ext" "--inplace")
+
+ add_custom_target(cython ALL DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests")
+
+ if(UNIX)
+ set( ENV{PYTHONPATH} $ENV{PYTHONPATH}:${CMAKE_CURRENT_BINARY_DIR}/ )
+ endif(UNIX)
+
+ # Unitary tests are available through py.test
+ if(PYTEST_PATH)
+ add_test(
+ NAME gudhi_cython_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} "${PYTEST_PATH}")
+ set_tests_properties(gudhi_cython_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ endif(PYTEST_PATH)
+
+ # Documentation generation is available through sphinx
+ if(SPHINX_PATH)
+ if (UNIX)
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc
+ DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMAND make html doctest)
+ else (UNIX)
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc
+ COMMAND make.bat html doctest)
+ endif (UNIX)
+ endif(SPHINX_PATH)
+endif(CYTHON_PATH)
diff --git a/src/cython/CONVENTIONS b/src/cython/CONVENTIONS
new file mode 100644
index 00000000..804e97f3
--- /dev/null
+++ b/src/cython/CONVENTIONS
@@ -0,0 +1,9 @@
+Gudhi is following PEP8 conventions.
+
+Please refer to:
+https://www.python.org/dev/peps/pep-0008/
+
+A summary:
+ - modules (filenames) should have short, all-lowercase names, and they can contain underscores.
+ - packages (directories) should have short, all-lowercase names, preferably without underscores.
+ - classes should use the CapWords convention. \ No newline at end of file
diff --git a/src/cython/README b/src/cython/README
new file mode 100644
index 00000000..7d2c4491
--- /dev/null
+++ b/src/cython/README
@@ -0,0 +1,3 @@
+
+If you do not want to install the package, just launch the following command to help Python to find the compiled package :
+$> export PYTHONPATH=`pwd`:$PYTHONPATH
diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx
new file mode 100644
index 00000000..a0e8f9b7
--- /dev/null
+++ b/src/cython/cython/alpha_complex.pyx
@@ -0,0 +1,121 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Alpha_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface":
+ Alpha_complex_interface(vector[vector[double]] points)
+ # bool from_file is a workaround for cython to find the correct signature
+ Alpha_complex_interface(string off_file, bool from_file)
+ vector[double] get_point(int vertex)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+
+# AlphaComplex python interface
+cdef class AlphaComplex:
+ """AlphaComplex is a simplicial complex constructed from the finite cells
+ of a Delaunay Triangulation.
+
+ The filtration value of each simplex is computed as the square of the
+ circumradius of the simplex if the circumsphere is empty (the simplex is
+ then said to be Gabriel), and as the minimum of the filtration values of
+ the codimension 1 cofaces that make it not Gabriel otherwise.
+
+ All simplices that have a filtration value strictly greater than a given
+ alpha squared value are not inserted into the complex.
+
+ .. note::
+
+ When Alpha_complex is constructed with an infinite value of alpha, the
+ complex is a Delaunay complex.
+
+ """
+
+ cdef Alpha_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, points=None, off_file=''):
+ """AlphaComplex constructor.
+
+ :param points: A list of points in d-Dimension.
+ :type points: list of list of double
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, points=None, off_file=''):
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ self.thisptr = new Alpha_complex_interface(str.encode(off_file), True)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty Alpha construction
+ points=[]
+ self.thisptr = new Alpha_complex_interface(points)
+
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if AlphaComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int
+ :rtype: list of float
+ :returns: the point.
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
+ def create_simplex_tree(self, max_alpha_square=float('inf')):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square)
+ return simplex_tree
diff --git a/src/cython/cython/bottleneck_distance.pyx b/src/cython/cython/bottleneck_distance.pyx
new file mode 100644
index 00000000..ee3e6ef9
--- /dev/null
+++ b/src/cython/cython/bottleneck_distance.pyx
@@ -0,0 +1,59 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram":
+ double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double)
+ double bottleneck(vector[pair[double, double]], vector[pair[double, double]])
+
+def bottleneck_distance(diagram_1, diagram_2, e=0.0):
+ """This function returns the point corresponding to a given vertex.
+
+ :param diagram_1: The first diagram.
+ :type diagram_1: vector[pair[double, double]]
+ :param diagram_2: The second diagram.
+ :type diagram_2: vector[pair[double, double]]
+ :param e: If `e` is 0, this uses an expensive algorithm to compute the
+ exact distance.
+ If `e` is not 0, it asks for an additive `e`-approximation, and
+ currently also allows a small multiplicative error (the last 2 or 3
+ bits of the mantissa may be wrong). This version of the algorithm takes
+ advantage of the limited precision of `double` and is usually a lot
+ faster to compute, whatever the value of `e`.
+
+ Thus, by default, `e` is the smallest positive double.
+ :type e: float
+ :rtype: float
+ :returns: the bottleneck distance.
+ """
+ if e is 0.0:
+ return bottleneck(diagram_1, diagram_2)
+ else:
+ return bottleneck(diagram_1, diagram_2, e)
diff --git a/src/cython/cython/cubical_complex.pyx b/src/cython/cython/cubical_complex.pyx
new file mode 100644
index 00000000..1ed6bc5e
--- /dev/null
+++ b/src/cython/cython/cubical_complex.pyx
@@ -0,0 +1,197 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>":
+ Bitmap_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells)
+ Bitmap_cubical_complex_base_interface(string perseus_file)
+ int num_simplices()
+ int dimension()
+
+cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
+ cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Cubical_complex::Cubical_complex_interface<>>":
+ Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max)
+ vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ vector[int] betti_numbers()
+ vector[int] persistent_betti_numbers(double from_value, double to_value)
+ vector[pair[double,double]] intervals_in_dimension(int dimension)
+
+# CubicalComplex python interface
+cdef class CubicalComplex:
+ """The CubicalComplex is an example of a structured complex useful in
+ computational mathematics (specially rigorous numerics) and image
+ analysis.
+ """
+ cdef Bitmap_cubical_complex_base_interface * thisptr
+
+ cdef Cubical_complex_persistence_interface * pcohptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, dimensions=None, top_dimensional_cells=None,
+ perseus_file=''):
+ """CubicalComplex constructor from dimensions and
+ top_dimensional_cells or from a perseus file style name.
+
+ :param dimensions: A list of number of top dimensional cells.
+ :type dimensions: list of int
+ :param top_dimensional_cells: A list of top dimensional cells.
+ :type top_dimensional_cells: list of double
+
+ Or
+
+ :param perseus_file: A perseus file style name.
+ :type perseus_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, dimensions=None, top_dimensional_cells=None,
+ perseus_file=''):
+ if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''):
+ self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells)
+ elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''):
+ if os.path.isfile(perseus_file):
+ self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file))
+ else:
+ print("file " + perseus_file + " not found.")
+ else:
+ print("CubicalComplex can be constructed from dimensions and "
+ "top_dimensional_cells or from a perseus file style name.")
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+ if self.pcohptr != NULL:
+ del self.pcohptr
+
+ def __is_defined(self):
+ """Returns true if CubicalComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def __is_persistence_defined(self):
+ """Returns true if Persistence pointer is not NULL.
+ """
+ return self.pcohptr != NULL
+
+ def num_simplices(self):
+ """This function returns the number of simplices of the simplicial
+ complex.
+
+ :returns: int -- the simplicial complex number of simplices.
+ """
+ return self.thisptr.num_simplices()
+
+ def dimension(self):
+ """This function returns the dimension of the simplicial complex.
+
+ :returns: int -- the simplicial complex dimension.
+ """
+ return self.thisptr.dimension()
+
+ def persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function returns the persistence of the simplicial complex.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: list of pairs(dimension, pair(birth, death)) -- the
+ persistence of the simplicial complex.
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ if self.thisptr != NULL:
+ self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True)
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
+ return persistence_result
+
+ def betti_numbers(self):
+ """This function returns the Betti numbers of the simplicial complex.
+
+ :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
+
+ :note: betti_numbers function requires persistence function to be
+ launched first.
+ """
+ cdef vector[int] bn_result
+ if self.pcohptr != NULL:
+ bn_result = self.pcohptr.betti_numbers()
+ return bn_result
+
+ def persistent_betti_numbers(self, from_value, to_value):
+ """This function returns the persistent Betti numbers of the
+ simplicial complex.
+
+ :param from_value: The persistence birth limit to be added in the
+ numbers (persistent birth <= from_value).
+ :type from_value: float.
+ :param to_value: The persistence death limit to be added in the
+ numbers (persistent death > to_value).
+ :type to_value: float.
+
+ :returns: list of int -- The persistent Betti numbers ([B0, B1, ...,
+ Bn]).
+
+ :note: persistent_betti_numbers function requires persistence
+ function to be launched first.
+ """
+ cdef vector[int] pbn_result
+ if self.pcohptr != NULL:
+ pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
+ return pbn_result
+
+ def persistence_intervals_in_dimension(self, dimension):
+ """This function returns the persistence intervals of the simplicial
+ complex in a specific dimension.
+
+ :param dimension: The specific dimension.
+ :type from_value: int.
+ :returns: The persistence intervals.
+ :rtype: list of pair of float
+
+ :note: intervals_in_dim function requires persistence function to be
+ launched first.
+ """
+ cdef vector[pair[double,double]] intervals_result
+ if self.pcohptr != NULL:
+ intervals_result = self.pcohptr.intervals_in_dimension(dimension)
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
+ return intervals_result
diff --git a/src/cython/cython/euclidean_strong_witness_complex.pyx b/src/cython/cython/euclidean_strong_witness_complex.pyx
new file mode 100644
index 00000000..c1523892
--- /dev/null
+++ b/src/cython/cython/euclidean_strong_witness_complex.pyx
@@ -0,0 +1,97 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface":
+ Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+ vector[double] get_point(unsigned vertex)
+
+# EuclideanStrongWitnessComplex python interface
+cdef class EuclideanStrongWitnessComplex:
+ """Constructs strong witness complex for given sets of witnesses and
+ landmarks in Euclidean space.
+ """
+
+ cdef Euclidean_strong_witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, landmarks=None, witnesses=None):
+ """WitnessComplex constructor.
+
+ :param landmarks: A list of landmarks (in the point cloud).
+ :type landmarks: list of list of double
+
+ :param witnesses: The point cloud.
+ :type witnesses: list of list of double
+ """
+
+ # The real cython constructor
+ def __cinit__(self, landmarks=None, witnesses=None):
+ if landmarks is not None and witnesses is not None:
+ self.thisptr = new Euclidean_strong_witness_complex_interface(landmarks, witnesses)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if WitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square, limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square)
+ return simplex_tree
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int.
+ :returns: The point.
+ :rtype: list of float
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
diff --git a/src/cython/cython/euclidean_witness_complex.pyx b/src/cython/cython/euclidean_witness_complex.pyx
new file mode 100644
index 00000000..7c443b6b
--- /dev/null
+++ b/src/cython/cython/euclidean_witness_complex.pyx
@@ -0,0 +1,97 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface":
+ Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+ vector[double] get_point(unsigned vertex)
+
+# EuclideanWitnessComplex python interface
+cdef class EuclideanWitnessComplex:
+ """Constructs (weak) witness complex for given sets of witnesses and
+ landmarks in Euclidean space.
+ """
+
+ cdef Euclidean_witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, landmarks=None, witnesses=None):
+ """WitnessComplex constructor.
+
+ :param landmarks: A list of landmarks (in the point cloud).
+ :type landmarks: list of list of double
+
+ :param witnesses: The point cloud.
+ :type witnesses: list of list of double
+ """
+
+ # The real cython constructor
+ def __cinit__(self, landmarks=None, witnesses=None):
+ if landmarks is not None and witnesses is not None:
+ self.thisptr = new Euclidean_witness_complex_interface(landmarks, witnesses)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if WitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square, limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square)
+ return simplex_tree
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int.
+ :returns: The point.
+ :rtype: list of float
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
diff --git a/src/cython/cython/off_reader.pyx b/src/cython/cython/off_reader.pyx
new file mode 100644
index 00000000..b6e107ef
--- /dev/null
+++ b/src/cython/cython/off_reader.pyx
@@ -0,0 +1,49 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Off_reader_interface.h" namespace "Gudhi":
+ vector[vector[double]] read_points_from_OFF_file(string off_file)
+
+def read_off(off_file=''):
+ """Read points from OFF file.
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :returns: The point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ return read_points_from_OFF_file(str.encode(off_file))
+ else:
+ print("file " + off_file + " not found.")
+
diff --git a/src/cython/cython/periodic_cubical_complex.pyx b/src/cython/cython/periodic_cubical_complex.pyx
new file mode 100644
index 00000000..88cb4395
--- /dev/null
+++ b/src/cython/cython/periodic_cubical_complex.pyx
@@ -0,0 +1,197 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>>":
+ Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells)
+ Periodic_cubical_complex_base_interface(string perseus_file)
+ int num_simplices()
+ int dimension()
+
+cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
+ cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Cubical_complex::Cubical_complex_interface<Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>>>":
+ Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max)
+ vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ vector[int] betti_numbers()
+ vector[int] persistent_betti_numbers(double from_value, double to_value)
+ vector[pair[double,double]] intervals_in_dimension(int dimension)
+
+# PeriodicCubicalComplex python interface
+cdef class PeriodicCubicalComplex:
+ """The PeriodicCubicalComplex is an example of a structured complex useful
+ in computational mathematics (specially rigorous numerics) and image
+ analysis.
+ """
+ cdef Periodic_cubical_complex_base_interface * thisptr
+
+ cdef Periodic_cubical_complex_persistence_interface * pcohptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, dimensions=None, top_dimensional_cells=None,
+ perseus_file=''):
+ """PeriodicCubicalComplex constructor from dimensions and
+ top_dimensional_cells or from a perseus file style name.
+
+ :param dimensions: A list of number of top dimensional cells.
+ :type dimensions: list of int
+ :param top_dimensional_cells: A list of top dimensional cells.
+ :type top_dimensional_cells: list of double
+
+ Or
+
+ :param perseus_file: A perseus file style name.
+ :type perseus_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, dimensions=None, top_dimensional_cells=None,
+ perseus_file=''):
+ if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''):
+ self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells)
+ elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''):
+ if os.path.isfile(perseus_file):
+ self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file))
+ else:
+ print("file " + perseus_file + " not found.")
+ else:
+ print("CubicalComplex can be constructed from dimensions and "
+ "top_dimensional_cells or from a perseus file style name.")
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+ if self.pcohptr != NULL:
+ del self.pcohptr
+
+ def __is_defined(self):
+ """Returns true if PeriodicCubicalComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def __is_persistence_defined(self):
+ """Returns true if Persistence pointer is not NULL.
+ """
+ return self.pcohptr != NULL
+
+ def num_simplices(self):
+ """This function returns the number of simplices of the simplicial
+ complex.
+
+ :returns: int -- the simplicial complex number of simplices.
+ """
+ return self.thisptr.num_simplices()
+
+ def dimension(self):
+ """This function returns the dimension of the simplicial complex.
+
+ :returns: int -- the simplicial complex dimension.
+ """
+ return self.thisptr.dimension()
+
+ def persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function returns the persistence of the simplicial complex.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: list of pairs(dimension, pair(birth, death)) -- the
+ persistence of the simplicial complex.
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ if self.thisptr != NULL:
+ self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True)
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
+ return persistence_result
+
+ def betti_numbers(self):
+ """This function returns the Betti numbers of the simplicial complex.
+
+ :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
+
+ :note: betti_numbers function requires persistence function to be
+ launched first.
+ """
+ cdef vector[int] bn_result
+ if self.pcohptr != NULL:
+ bn_result = self.pcohptr.betti_numbers()
+ return bn_result
+
+ def persistent_betti_numbers(self, from_value, to_value):
+ """This function returns the persistent Betti numbers of the
+ simplicial complex.
+
+ :param from_value: The persistence birth limit to be added in the
+ numbers (persistent birth <= from_value).
+ :type from_value: float.
+ :param to_value: The persistence death limit to be added in the
+ numbers (persistent death > to_value).
+ :type to_value: float.
+
+ :returns: list of int -- The persistent Betti numbers ([B0, B1, ...,
+ Bn]).
+
+ :note: persistent_betti_numbers function requires persistence
+ function to be launched first.
+ """
+ cdef vector[int] pbn_result
+ if self.pcohptr != NULL:
+ pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
+ return pbn_result
+
+ def persistence_intervals_in_dimension(self, dimension):
+ """This function returns the persistence intervals of the simplicial
+ complex in a specific dimension.
+
+ :param dimension: The specific dimension.
+ :type from_value: int.
+ :returns: The persistence intervals.
+ :rtype: list of pair of float
+
+ :note: intervals_in_dim function requires persistence function to be
+ launched first.
+ """
+ cdef vector[pair[double,double]] intervals_result
+ if self.pcohptr != NULL:
+ intervals_result = self.pcohptr.intervals_in_dimension(dimension)
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
+ return intervals_result
diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py
new file mode 100755
index 00000000..a984633e
--- /dev/null
+++ b/src/cython/cython/persistence_graphical_tools.py
@@ -0,0 +1,152 @@
+import matplotlib.pyplot as plt
+import numpy as np
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+def __min_birth_max_death(persistence):
+ """This function returns (min_birth, max_death) from the persistence.
+
+ :param persistence: The persistence to plot.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :returns: (float, float) -- (min_birth, max_death).
+ """
+ # Look for minimum birth date and maximum death date for plot optimisation
+ max_death = 0
+ min_birth = persistence[0][1][0]
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ if float(interval[1][1]) > max_death:
+ max_death = float(interval[1][1])
+ if float(interval[1][0]) > max_death:
+ max_death = float(interval[1][0])
+ if float(interval[1][0]) < min_birth:
+ min_birth = float(interval[1][0])
+ return (min_birth, max_death)
+
+"""
+Only 13 colors for the palette
+"""
+palette = ['#ff0000', '#00ff00', '#0000ff', '#00ffff', '#ff00ff', '#ffff00',
+ '#000000', '#880000', '#008800', '#000088', '#888800', '#880088',
+ '#008888']
+
+def show_palette_values(alpha=0.6):
+ """This function shows palette color values in function of the dimension.
+
+ :param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
+ :type alpha: float.
+ :returns: plot -- An horizontal bar plot of dimensions color.
+ """
+ colors = []
+ for color in palette:
+ colors.append(color)
+
+ y_pos = np.arange(len(palette))
+
+ plt.barh(y_pos, y_pos + 1, align='center', alpha=alpha, color=colors)
+ plt.ylabel('Dimension')
+ plt.title('Dimension palette values')
+
+ plt.show()
+
+def plot_persistence_barcode(persistence, alpha=0.6):
+ """This function plots the persistence bar code.
+
+ :param persistence: The persistence to plot.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
+ :type alpha: float.
+ :returns: plot -- An horizontal bar plot of persistence.
+ """
+ (min_birth, max_death) = __min_birth_max_death(persistence)
+ ind = 0
+ delta = ((max_death - min_birth) / 10.0)
+ # Replace infinity values with max_death + delta for bar code to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+ # Draw horizontal bars in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ # Finite death case
+ plt.barh(ind, (interval[1][1] - interval[1][0]), height=0.8,
+ left = interval[1][0], alpha=alpha,
+ color = palette[interval[0]])
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.barh(ind, (infinity - interval[1][0]), height=0.8,
+ left = interval[1][0], alpha=alpha,
+ color = palette[interval[0]])
+ ind = ind + 1
+
+ plt.title('Persistence barcode')
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, 0, ind])
+ plt.show()
+
+def plot_persistence_diagram(persistence, alpha=0.6):
+ """This function plots the persistence diagram.
+
+ :param persistence: The persistence to plot.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param alpha: alpha value in [0.0, 1.0] for points and horizontal infinity line (default is 0.6).
+ :type alpha: float.
+ :returns: plot -- An diagram plot of persistence.
+ """
+ (min_birth, max_death) = __min_birth_max_death(persistence)
+ ind = 0
+ delta = ((max_death - min_birth) / 10.0)
+ # Replace infinity values with max_death + delta for diagram to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+
+ # line display of equation : birth = death
+ x = np.linspace(axis_start, infinity, 1000)
+ # infinity line and text
+ plt.plot(x, x, color='k', linewidth=1.0)
+ plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
+ plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
+
+ # Draw points in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ # Finite death case
+ plt.scatter(interval[1][0], interval[1][1], alpha=alpha,
+ color = palette[interval[0]])
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.scatter(interval[1][0], infinity, alpha=alpha,
+ color = palette[interval[0]])
+ ind = ind + 1
+
+ plt.title('Persistence diagram')
+ plt.xlabel('Birth')
+ plt.ylabel('Death')
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, axis_start, infinity + delta])
+ plt.show()
diff --git a/src/cython/cython/rips_complex.pyx b/src/cython/cython/rips_complex.pyx
new file mode 100644
index 00000000..ad9b0a4d
--- /dev/null
+++ b/src/cython/cython/rips_complex.pyx
@@ -0,0 +1,125 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Rips_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface":
+ Rips_complex_interface(vector[vector[double]] values, double threshold, bool euclidean)
+ # bool from_file is a workaround for cython to find the correct signature
+ Rips_complex_interface(string file_name, double threshold, bool euclidean, bool from_file)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max)
+
+# RipsComplex python interface
+cdef class RipsComplex:
+ """The data structure is a one skeleton graph, or Rips graph, containing
+ edges when the edge length is less or equal to a given threshold. Edge
+ length is computed from a user given point cloud with a given distance
+ function, or a distance matrix.
+ """
+
+ cdef Rips_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, points=None, off_file='', distance_matrix=None, csv_file='', max_edge_length=float('inf')):
+ """RipsComplex constructor.
+
+ :param max_edge_length: Rips value.
+ :type max_edge_length: int
+
+ :param points: A list of points in d-Dimension.
+ :type points: list of list of double
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ Or
+
+ :param distance_matrix: A distance matrix (full square or lower
+ triangular).
+ :type points: list of list of double
+
+ Or
+
+ :param csv_file: A csv file style name containing a full square or a
+ lower triangular distance matrix.
+ :type csv_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, points=None, off_file='', distance_matrix=None, csv_file='', max_edge_length=float('inf')):
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ self.thisptr = new Rips_complex_interface(str.encode(off_file),
+ max_edge_length,
+ True,
+ True)
+ else:
+ print("file " + off_file + " not found.")
+ elif csv_file is not '':
+ if os.path.isfile(csv_file):
+ self.thisptr = new Rips_complex_interface(str.encode(csv_file),
+ max_edge_length,
+ False,
+ True)
+ else:
+ print("file " + csv_file + " not found.")
+ elif distance_matrix is not None:
+ self.thisptr = new Rips_complex_interface(distance_matrix, max_edge_length, False)
+ else:
+ if points is None:
+ # Empty Rips construction
+ points=[]
+ self.thisptr = new Rips_complex_interface(points, max_edge_length, True)
+
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if RipsComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_dimension=1):
+ """
+ :param max_dimension: graph expansion for rips until this given maximal
+ dimension.
+ :type max_dimension: int
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_dimension)
+ return simplex_tree
diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx
new file mode 100644
index 00000000..148227e1
--- /dev/null
+++ b/src/cython/cython/simplex_tree.pyx
@@ -0,0 +1,427 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp cimport bool
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
+ cdef cppclass Simplex_tree_options_full_featured:
+ pass
+
+ cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface<Gudhi::Simplex_tree_options_full_featured>":
+ Simplex_tree()
+ double filtration()
+ double simplex_filtration(vector[int] simplex)
+ void set_filtration(double filtration)
+ void initialize_filtration()
+ int num_vertices()
+ int num_simplices()
+ void set_dimension(int dimension)
+ int dimension()
+ bint find_simplex(vector[int] simplex)
+ bint insert_simplex_and_subfaces(vector[int] simplex,
+ double filtration)
+ bint insert_simplex(vector[int] simplex, double filtration)
+ vector[pair[vector[int], double]] get_filtered_tree()
+ vector[pair[vector[int], double]] get_skeleton_tree(int dimension)
+ vector[pair[vector[int], double]] get_star(vector[int] simplex)
+ vector[pair[vector[int], double]] get_cofaces(vector[int] simplex,
+ int dimension)
+ void remove_maximal_simplex(vector[int] simplex)
+ void expansion(int max_dim)
+
+cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
+ cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>":
+ Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max)
+ vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ vector[int] betti_numbers()
+ vector[int] persistent_betti_numbers(double from_value, double to_value)
+ vector[pair[double,double]] intervals_in_dimension(int dimension)
+
+# SimplexTree python interface
+cdef class SimplexTree:
+ """The simplex tree is an efficient and flexible data structure for
+ representing general (filtered) simplicial complexes. The data structure
+ is described in Jean-Daniel Boissonnat and Clément Maria. The Simplex
+ Tree: An Efficient Data Structure for General Simplicial Complexes.
+ Algorithmica, pages 1–22, 2014.
+
+ This class is a filtered, with keys, and non contiguous vertices version
+ of the simplex tree.
+ """
+ cdef Simplex_tree_interface_full_featured * thisptr
+
+ cdef Simplex_tree_persistence_interface * pcohptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self):
+ """SimplexTree constructor.
+ """
+
+ # The real cython constructor
+ def __cinit__(self):
+ self.thisptr = new Simplex_tree_interface_full_featured()
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+ if self.pcohptr != NULL:
+ del self.pcohptr
+
+ def __is_defined(self):
+ """Returns true if SimplexTree pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def __is_persistence_defined(self):
+ """Returns true if Persistence pointer is not NULL.
+ """
+ return self.pcohptr != NULL
+
+ def get_filtration(self):
+ """This function returns the main simplicial complex filtration value.
+
+ :returns: The simplicial complex filtration value.
+ :rtype: float
+ """
+ return self.thisptr.filtration()
+
+ def filtration(self, simplex):
+ """This function returns the simplicial complex filtration value for a
+ given N-simplex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: The simplicial complex filtration value.
+ :rtype: float
+ """
+ return self.thisptr.simplex_filtration(simplex)
+
+ def set_filtration(self, filtration):
+ """This function sets the main simplicial complex filtration value.
+
+ :param filtration: The filtration value.
+ :type filtration: float.
+ """
+ self.thisptr.set_filtration(<double> filtration)
+
+ def initialize_filtration(self):
+ """This function initializes and sorts the simplicial complex
+ filtration vector.
+
+ .. note::
+
+ This function must be launched before persistence, betti_numbers,
+ persistent_betti_numbers or get_filtered_tree after inserting or
+ removing simplices.
+ """
+ self.thisptr.initialize_filtration()
+
+ def num_vertices(self):
+ """This function returns the number of vertices of the simplicial
+ complex.
+
+ :returns: The simplicial complex number of vertices.
+ :rtype: int
+ """
+ return self.thisptr.num_vertices()
+
+ def num_simplices(self):
+ """This function returns the number of simplices of the simplicial
+ complex.
+
+ :returns: the simplicial complex number of simplices.
+ :rtype: int
+ """
+ return self.thisptr.num_simplices()
+
+ def dimension(self):
+ """This function returns the dimension of the simplicial complex.
+
+ :returns: the simplicial complex dimension.
+ :rtype: int
+ """
+ return self.thisptr.dimension()
+
+ def set_dimension(self, dimension):
+ """This function sets the dimension of the simplicial complex.
+
+ insert and remove_maximal_simplex functions do not update dimension
+ value of the `SimplexTree`.
+
+ `AlphaComplex`, `RipsComplex`, `TangentialComplex` and `WitnessComplex`
+ automatically sets the correct dimension in their `create_simplex_tree`
+ functions.
+
+ :param dimension: The new dimension value.
+ :type dimension: int.
+ """
+ self.thisptr.set_dimension(<int>dimension)
+
+ def find(self, simplex):
+ """This function returns if the N-simplex was found in the simplicial
+ complex or not.
+
+ :param simplex: The N-simplex to find, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: true if the simplex was found, false otherwise.
+ :rtype: bool
+ """
+ cdef vector[int] complex
+ for i in simplex:
+ complex.push_back(i)
+ return self.thisptr.find_simplex(complex)
+
+ def insert_simplex(self, simplex, filtration=0.0):
+ """This function inserts the given N-simplex with the given filtration
+ value (default value is '0.0').
+
+ :param simplex: The N-simplex to insert, represented by a list of
+ vertex.
+ :type simplex: list of int.
+ :param filtration: The filtration value of the simplex.
+ :type filtration: float.
+ :returns: true if the simplex was found, false otherwise.
+ :rtype: bool
+ """
+ cdef vector[int] complex
+ for i in simplex:
+ complex.push_back(i)
+ return self.thisptr.insert_simplex(complex, <double>filtration)
+
+ def insert_simplex_and_subfaces(self, simplex, filtration=0.0):
+ """This function inserts the given N-simplex and its subfaces with the
+ given filtration value (default value is '0.0').
+
+ :param simplex: The N-simplex to insert, represented by a list of
+ vertex.
+ :type simplex: list of int.
+ :param filtration: The filtration value of the simplex.
+ :type filtration: float.
+ :returns: true if the simplex was found, false otherwise.
+ :rtype: bool
+ """
+ cdef vector[int] complex
+ for i in simplex:
+ complex.push_back(i)
+ return self.thisptr.insert_simplex_and_subfaces(complex,
+ <double>filtration)
+
+ def get_filtered_tree(self):
+ """This function returns the tree sorted by increasing filtration
+ values.
+
+ :returns: The tree sorted by increasing filtration values.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[pair[vector[int], double]] filtered_tree \
+ = self.thisptr.get_filtered_tree()
+ ct = []
+ for filtered_complex in filtered_tree:
+ v = []
+ for vertex in filtered_complex.first:
+ v.append(vertex)
+ ct.append((v, filtered_complex.second))
+ return ct
+
+ def get_skeleton_tree(self, dimension):
+ """This function returns the tree skeleton of a maximum given
+ dimension.
+
+ :param dimension: The skeleton dimension value.
+ :type dimension: int.
+ :returns: The skeleton tree of a maximum dimension.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[pair[vector[int], double]] sk_tree \
+ = self.thisptr.get_skeleton_tree(<int>dimension)
+ ct = []
+ for filtered_complex in sk_tree:
+ v = []
+ for vertex in filtered_complex.first:
+ v.append(vertex)
+ ct.append((v, filtered_complex.second))
+ return ct
+
+ def get_star(self, simplex):
+ """This function returns the stars of a given N-simplex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: The star tree of a simplex.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[int] complex
+ for i in simplex:
+ complex.push_back(i)
+ cdef vector[pair[vector[int], double]] stars \
+ = self.thisptr.get_star(complex)
+ ct = []
+ for filtered_complex in stars:
+ v = []
+ for vertex in filtered_complex.first:
+ v.append(vertex)
+ ct.append((v, filtered_complex.second))
+ return ct
+
+ def get_cofaces(self, simplex, codimension):
+ """This function returns the cofaces of a given N-simplex with a
+ given codimension.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :param codimension: The codimension. If codimension = 0, all cofaces
+ are returned (equivalent of get_star function)
+ :type codimension: int.
+ :returns: The coface tree of a simplex.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[int] complex
+ for i in simplex:
+ complex.push_back(i)
+ cdef vector[pair[vector[int], double]] cofaces \
+ = self.thisptr.get_cofaces(complex, <int>codimension)
+ ct = []
+ for filtered_complex in cofaces:
+ v = []
+ for vertex in filtered_complex.first:
+ v.append(vertex)
+ ct.append((v, filtered_complex.second))
+ return ct
+
+ def remove_maximal_simplex(self, simplex):
+ """This function removes a given maximal N-simplex from the simplicial
+ complex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ """
+ self.thisptr.remove_maximal_simplex(simplex)
+
+ def expansion(self, max_dim):
+ """Expands the Simplex_tree containing only its one skeleton
+ until dimension max_dim.
+
+ The expanded simplicial complex until dimension :math:`d`
+ attached to a graph :math:`G` is the maximal simplicial complex of
+ dimension at most :math:`d` admitting the graph :math:`G` as
+ :math:`1`-skeleton.
+ The filtration value assigned to a simplex is the maximal filtration
+ value of one of its edges.
+
+ The Simplex_tree must contain no simplex of dimension bigger than
+ 1 when calling the method.
+
+ :param max_dim: The maximal dimension.
+ :type max_dim: int.
+ """
+ self.thisptr.expansion(max_dim)
+
+ def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False):
+ """This function returns the persistence of the simplicial complex.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: The persistence of the simplicial complex.
+ :rtype: list of pairs(dimension, pair(birth, death))
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ self.pcohptr = new Simplex_tree_persistence_interface(self.thisptr, persistence_dim_max)
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
+ return persistence_result
+
+ def betti_numbers(self):
+ """This function returns the Betti numbers of the simplicial complex.
+
+ :returns: The Betti numbers ([B0, B1, ..., Bn]).
+ :rtype: list of int
+
+ :note: betti_numbers function requires persistence function to be
+ launched first.
+ """
+ cdef vector[int] bn_result
+ if self.pcohptr != NULL:
+ bn_result = self.pcohptr.betti_numbers()
+ else:
+ print("betti_numbers function requires persistence function"
+ " to be launched first.")
+ return bn_result
+
+ def persistent_betti_numbers(self, from_value, to_value):
+ """This function returns the persistent Betti numbers of the
+ simplicial complex.
+
+ :param from_value: The persistence birth limit to be added in the
+ numbers (persistent birth <= from_value).
+ :type from_value: float.
+ :param to_value: The persistence death limit to be added in the
+ numbers (persistent death > to_value).
+ :type to_value: float.
+
+ :returns: The persistent Betti numbers ([B0, B1, ..., Bn]).
+ :rtype: list of int
+
+ :note: persistent_betti_numbers function requires persistence
+ function to be launched first.
+ """
+ cdef vector[int] pbn_result
+ if self.pcohptr != NULL:
+ pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
+ else:
+ print("persistent_betti_numbers function requires persistence function"
+ " to be launched first.")
+ return pbn_result
+
+ def persistence_intervals_in_dimension(self, dimension):
+ """This function returns the persistence intervals of the simplicial
+ complex in a specific dimension.
+
+ :param dimension: The specific dimension.
+ :type from_value: int.
+ :returns: The persistence intervals.
+ :rtype: list of pair of float
+
+ :note: intervals_in_dim function requires persistence function to be
+ launched first.
+ """
+ cdef vector[pair[double,double]] intervals_result
+ if self.pcohptr != NULL:
+ intervals_result = self.pcohptr.intervals_in_dimension(dimension)
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
+ return intervals_result
diff --git a/src/cython/cython/strong_witness_complex.pyx b/src/cython/cython/strong_witness_complex.pyx
new file mode 100644
index 00000000..770b46f5
--- /dev/null
+++ b/src/cython/cython/strong_witness_complex.pyx
@@ -0,0 +1,81 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface":
+ Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+
+# StrongWitnessComplex python interface
+cdef class StrongWitnessComplex:
+ """Constructs (strong) witness complex for a given table of nearest
+ landmarks with respect to witnesses.
+ """
+
+ cdef Strong_witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, nearest_landmark_table=None):
+ """StrongWitnessComplex constructor.
+
+ :param nearest_landmark_table: A list of nearest landmark.
+ :type nearest_landmark_table: list of list of pair of unsigned and double
+ """
+
+ # The real cython constructor
+ def __cinit__(self, nearest_landmark_table=None):
+ if nearest_landmark_table is not None:
+ self.thisptr = new Strong_witness_complex_interface(nearest_landmark_table)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if StrongWitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square, limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square)
+ return simplex_tree
diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx
new file mode 100644
index 00000000..894a4fbe
--- /dev/null
+++ b/src/cython/cython/subsampling.pyx
@@ -0,0 +1,140 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling":
+ vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points)
+ vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point)
+ vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points)
+ vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point)
+ vector[vector[double]] subsampling_n_random_points(vector[vector[double]] points, unsigned nb_points)
+ vector[vector[double]] subsampling_n_random_points_from_file(string off_file, unsigned nb_points)
+ vector[vector[double]] subsampling_sparsify_points(vector[vector[double]] points, double min_squared_dist)
+ vector[vector[double]] subsampling_sparsify_points_from_file(string off_file, double min_squared_dist)
+
+def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_point = ''):
+ """Subsample by a greedy strategy of iteratively adding the farthest point
+ from the current chosen point set to the subsampling.
+ The iteration starts with the landmark `starting point`.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param nb_points: Number of points of the subsample.
+ :type nb_points: unsigned.
+ :param starting_point: The iteration starts with the landmark `starting \
+ point`,which is the index of the poit to start with. If not set, this \
+ index is choosen randomly.
+ :type starting_point: unsigned.
+ :returns: The subsample point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ if starting_point is '':
+ return subsampling_n_farthest_points_from_file(str.encode(off_file),
+ nb_points)
+ else:
+ return subsampling_n_farthest_points_from_file(str.encode(off_file),
+ nb_points,
+ starting_point)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty points
+ points=[]
+ if starting_point is '':
+ return subsampling_n_farthest_points(points, nb_points)
+ else:
+ return subsampling_n_farthest_points(points, nb_points,
+ starting_point)
+
+def pick_n_random_points(points=None, off_file='', nb_points=0):
+ """Subsample a point set by picking random vertices.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param nb_points: Number of points of the subsample.
+ :type nb_points: unsigned.
+ :returns: The subsample point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ return subsampling_n_random_points_from_file(str.encode(off_file),
+ nb_points)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty points
+ points=[]
+ return subsampling_n_random_points(points, nb_points)
+
+def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
+ """Subsample a point set by picking random vertices.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param min_squared_dist: Number of points of the subsample.
+ :type min_squared_dist: unsigned.
+ :returns: The subsample point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ return subsampling_sparsify_points_from_file(str.encode(off_file),
+ min_squared_dist)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty points
+ points=[]
+ return subsampling_sparsify_points(points, min_squared_dist)
diff --git a/src/cython/cython/tangential_complex.pyx b/src/cython/cython/tangential_complex.pyx
new file mode 100644
index 00000000..d55bb050
--- /dev/null
+++ b/src/cython/cython/tangential_complex.pyx
@@ -0,0 +1,151 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Tangential_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface":
+ Tangential_complex_interface(vector[vector[double]] points)
+ # bool from_file is a workaround for cython to find the correct signature
+ Tangential_complex_interface(string off_file, bool from_file)
+ vector[double] get_point(unsigned vertex)
+ unsigned number_of_vertices()
+ unsigned number_of_simplices()
+ unsigned number_of_inconsistent_simplices()
+ unsigned number_of_inconsistent_stars()
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree)
+ void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit)
+
+# TangentialComplex python interface
+cdef class TangentialComplex:
+ """The class Tangential_complex represents a tangential complex. After the
+ computation of the complex, an optional post-processing called perturbation
+ can be run to attempt to remove inconsistencies.
+ """
+
+ cdef Tangential_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, points=None, off_file=''):
+ """TangentialComplex constructor.
+
+ :param points: A list of points in d-Dimension.
+ :type points: list of list of double
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, points=None, off_file=''):
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ self.thisptr = new Tangential_complex_interface(str.encode(off_file), True)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty tangential construction
+ points=[]
+ self.thisptr = new Tangential_complex_interface(points)
+
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if TangentialComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int.
+ :returns: The point.
+ :rtype: list of float
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
+ def num_vertices(self):
+ """
+ :returns: The number of vertices.
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_vertices()
+
+ def num_simplices(self):
+ """
+ :returns: Total number of simplices in stars (including duplicates that appear in several stars).
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_simplices()
+
+ def num_inconsistent_simplices(self):
+ """
+ :returns: The number of inconsistent simplices.
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_inconsistent_simplices()
+
+ def num_inconsistent_stars(self):
+ """
+ :returns: The number of stars containing at least one inconsistent simplex.
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_inconsistent_stars()
+
+ def create_simplex_tree(self):
+ """Exports the complex into a simplex tree.
+
+ :returns: A simplex tree created from the complex.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr)
+ return simplex_tree
+
+ def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0):
+ """Attempts to fix inconsistencies by perturbing the point positions.
+
+ :param max_perturb: Maximum length of the translations used by the
+ perturbation.
+ :type max_perturb: double
+ :param time_limit: Time limit in seconds. If -1, no time limit is set.
+ :type time_limit: double
+ """
+ self.thisptr.fix_inconsistencies_using_perturbation(max_perturb,
+ time_limit)
diff --git a/src/cython/cython/witness_complex.pyx b/src/cython/cython/witness_complex.pyx
new file mode 100644
index 00000000..96d122bb
--- /dev/null
+++ b/src/cython/cython/witness_complex.pyx
@@ -0,0 +1,81 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface":
+ Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+
+# WitnessComplex python interface
+cdef class WitnessComplex:
+ """Constructs (weak) witness complex for a given table of nearest landmarks
+ with respect to witnesses.
+ """
+
+ cdef Witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, nearest_landmark_table=None):
+ """WitnessComplex constructor.
+
+ :param nearest_landmark_table: A list of nearest landmark.
+ :type nearest_landmark_table: list of list of pair of unsigned and double
+ """
+
+ # The real cython constructor
+ def __cinit__(self, nearest_landmark_table=None):
+ if nearest_landmark_table is not None:
+ self.thisptr = new Witness_complex_interface(nearest_landmark_table)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if WitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square, limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr, max_alpha_square)
+ return simplex_tree
diff --git a/src/cython/cythonize_gudhi.py.in b/src/cython/cythonize_gudhi.py.in
new file mode 100644
index 00000000..5a97e9f3
--- /dev/null
+++ b/src/cython/cythonize_gudhi.py.in
@@ -0,0 +1,48 @@
+from distutils.core import setup, Extension
+from Cython.Build import cythonize
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+gudhi = Extension(
+ "gudhi",
+ sources = ['gudhi.pyx',],
+ language = 'c++',
+ extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@],
+ extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@],
+ libraries=[@GUDHI_CYTHON_LIBRARIES@],
+ library_dirs=[@GUDHI_CYTHON_LIBRARY_DIRS@],
+ include_dirs = [@GUDHI_CYTHON_INCLUDE_DIRS@],
+)
+
+setup(
+ name = 'gudhi',
+ author='Vincent Rouvreau',
+ author_email='gudhi-contact@lists.gforge.inria.fr',
+ version='0.1.0',
+ url='http://gudhi.gforge.inria.fr/',
+ ext_modules = cythonize(gudhi),
+)
diff --git a/src/cython/doc/Makefile.in b/src/cython/doc/Makefile.in
new file mode 100644
index 00000000..526350b3
--- /dev/null
+++ b/src/cython/doc/Makefile.in
@@ -0,0 +1,44 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = @SPHINX_PATH@
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ rm -f examples.inc
+ rm -rf $(BUILDDIR)/*
+
+# GUDHI specific : Examples.inc is generated with generate_examples.py (and deleted on clean)
+
+html:
+ ./generate_examples.py
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html
new file mode 100644
index 00000000..af0eef64
--- /dev/null
+++ b/src/cython/doc/_templates/layout.html
@@ -0,0 +1,270 @@
+{#
+ basic/layout.html
+ ~~~~~~~~~~~~~~~~~
+
+ Master layout template for Sphinx themes.
+
+ :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+#}
+{%- block doctype -%}
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+{%- endblock %}
+{%- set reldelim1 = reldelim1 is not defined and ' &raquo;' or reldelim1 %}
+{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %}
+{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and
+ (sidebars != []) %}
+{%- set url_root = pathto('', 1) %}
+{# XXX necessary? #}
+{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
+{%- if not embedded and docstitle %}
+ {%- set titlesuffix = " &mdash; "|safe + docstitle|e %}
+{%- else %}
+ {%- set titlesuffix = "" %}
+{%- endif %}
+
+{%- macro relbar() %}
+ <div class="related" role="navigation" aria-label="related navigation">
+ <h3>{{ _('Navigation') }}</h3>
+ <ul>
+ {%- for rellink in rellinks %}
+ <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}>
+ <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
+ {{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
+ {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
+ {%- endfor %}
+ {%- block rootrellink %}
+ <li class="nav-item nav-item-0"><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
+ {%- endblock %}
+ {%- for parent in parents %}
+ <li class="nav-item nav-item-{{ loop.index }}"><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
+ {%- endfor %}
+ {%- block relbaritems %} {% endblock %}
+ </ul>
+ </div>
+{%- endmacro %}
+
+{%- macro sidebar() %}
+ {%- if render_sidebar %}
+ <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+ <div class="sphinxsidebarwrapper">
+ {%- block sidebarlogo %}
+ {%- if logo %}
+ <p class="logo"><a href="{{ pathto(master_doc) }}">
+ <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
+ </a></p>
+ {%- endif %}
+ {%- endblock %}
+ {%- if sidebars != None %}
+ {#- new style sidebar: explicitly include/exclude templates #}
+ {%- for sidebartemplate in sidebars %}
+ {%- include sidebartemplate %}
+ {%- endfor %}
+ {%- else %}
+ {#- old style sidebars: using blocks -- should be deprecated #}
+ {%- block sidebartoc %}
+<h2><a href="index.html">GUDHI</a></h2>
+<h2><a href="installation.html">GUDHI installation</a></h2>
+<h2><a href="citation.html">Acknowledging the GUDHI library</a></h2>
+<h2><a href="genindex.html">Index</a></h2>
+<h2><a href="examples.html">Examples</a></h2>
+
+ {%- include "localtoc.html" %}
+ {%- endblock %}
+ {%- block sidebarrel %}
+ {%- include "relations.html" %}
+ {%- endblock %}
+ {%- block sidebarsourcelink %}
+ {%- include "sourcelink.html" %}
+ {%- endblock %}
+ {%- if customsidebar %}
+ {%- include customsidebar %}
+ {%- endif %}
+ {%- block sidebarsearch %}
+ {%- include "searchbox.html" %}
+ {%- endblock %}
+ {%- endif %}
+ </div>
+ </div>
+ {%- endif %}
+{%- endmacro %}
+
+{%- macro script() %}
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '{{ url_root }}',
+ VERSION: '{{ release|e }}',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
+ HAS_SOURCE: {{ has_source|lower }}
+ };
+ </script>
+ {%- for scriptfile in script_files %}
+ <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
+ {%- endfor %}
+{%- endmacro %}
+
+{%- macro css() %}
+<!-- GUDHI website css for header BEGIN -->
+<link rel="stylesheet" type="text/css" href="http://gudhi.gforge.inria.fr/assets/css/styles_feeling_responsive.css" />
+<!-- GUDHI website css for header END -->
+ <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
+ <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
+ {%- for cssfile in css_files %}
+ <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
+ {%- endfor %}
+{%- endmacro %}
+<!-- GUDHI website html class for header BEGIN -->
+<html xmlns="http://www.w3.org/1999/xhtml" class="no-js" lang="en">
+<!-- GUDHI website html class for header END -->
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset={{ encoding }}" />
+ {{ metatags }}
+ {%- block htmltitle %}
+ <title>{{ title|striptags|e }}{{ titlesuffix }}</title>
+ {%- endblock %}
+ {{ css() }}
+ {%- if not embedded %}
+ {{ script() }}
+ {%- if use_opensearch %}
+ <link rel="search" type="application/opensearchdescription+xml"
+ title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}"
+ href="{{ pathto('_static/opensearch.xml', 1) }}"/>
+ {%- endif %}
+ {%- if favicon %}
+ <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
+ {%- endif %}
+ {%- endif %}
+{%- block linktags %}
+ {%- if hasdoc('about') %}
+ <link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" />
+ {%- endif %}
+ {%- if hasdoc('genindex') %}
+ <link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" />
+ {%- endif %}
+ {%- if hasdoc('search') %}
+ <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" />
+ {%- endif %}
+ {%- if hasdoc('copyright') %}
+ <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" />
+ {%- endif %}
+ <link rel="top" title="{{ docstitle|e }}" href="{{ pathto(master_doc) }}" />
+ {%- if parents %}
+ <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}" />
+ {%- endif %}
+ {%- if next %}
+ <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}" />
+ {%- endif %}
+ {%- if prev %}
+ <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}" />
+ {%- endif %}
+{%- endblock %}
+{%- block extrahead %} {% endblock %}
+ </head>
+ <body role="document">
+ <!-- GUDHI website header BEGIN -->
+ <div id="navigation" class="sticky">
+ <nav class="top-bar" role="navigation" data-topbar>
+ <ul class="title-area">
+ <li class="name">
+ <h1 class="show-for-small-only"><a href="http://gudhi.gforge.inria.fr" class="icon-tree"> GUDHI C++ library</a></h1>
+ </li>
+ <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
+ <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
+ </ul>
+ <section class="top-bar-section">
+ <ul class="right">
+ <li class="divider"></li>
+ <li><a href="http://gudhi.gforge.inria.fr/contact/">Contact</a></li>
+ </ul>
+ <ul class="left">
+ <li><a href="http://gudhi.gforge.inria.fr/"> <img src="http://gudhi.gforge.inria.fr/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Project</a>
+ <ul class="dropdown">
+ <li><a href="http://gudhi.gforge.inria.fr/people/">People</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/keepintouch/">Keep in touch</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/partners/">Partners and Funding</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/relatedprojects/">Related projects</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/theyaretalkingaboutus/">They are talking about us</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Download</a>
+ <ul class="dropdown">
+ <li><a href="http://gudhi.gforge.inria.fr/licensing/">Licensing</a></li>
+ <li><a href="https://gforge.inria.fr/frs/?group_id=3865" target="_blank">Get the sources</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Documentation</a>
+ <ul class="dropdown">
+ <li><a href="http://gudhi.gforge.inria.fr/doc/latest/">C++ documentation</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/cython/latest/">Cython documentation</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/cython/latest/installation.html">Cython installation manual</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li><a href="http://gudhi.gforge.inria.fr/interfaces/">Interfaces</a></li>
+ <li class="divider"></li>
+ </ul>
+ </section>
+ </nav>
+ </div><!-- /#navigation -->
+ <!-- GUDHI website header BEGIN -->
+
+
+{%- block header %}{% endblock %}
+
+{%- block relbar1 %}{% endblock %}
+
+{%- block content %}
+ {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %}
+
+ <div class="document">
+ {%- block document %}
+ <div class="documentwrapper">
+ {%- if render_sidebar %}
+ <div class="bodywrapper">
+ {%- endif %}
+ <div class="body" role="main">
+ {% block body %} {% endblock %}
+ </div>
+ {%- if render_sidebar %}
+ </div>
+ {%- endif %}
+ </div>
+ {%- endblock %}
+
+ {%- block sidebar2 %}{{ sidebar() }}{% endblock %}
+ <div class="clearer"></div>
+ </div>
+{%- endblock %}
+
+{%- block relbar2 %}{% endblock %}
+
+{%- block footer %}
+ <div class="footer" role="contentinfo">
+ {%- if show_copyright %}
+ {%- if hasdoc('copyright') %}
+ {% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
+ {%- else %}
+ {% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
+ {%- endif %}
+ {%- endif %}
+ {%- if last_updated %}
+ {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
+ {%- endif %}
+ {%- if show_sphinx %}
+ {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %}
+ {%- endif %}
+ </div>
+{%- endblock %}
+ </body>
+</html>
+
diff --git a/src/cython/doc/alpha_complex_ref.rst b/src/cython/doc/alpha_complex_ref.rst
new file mode 100644
index 00000000..6a122b09
--- /dev/null
+++ b/src/cython/doc/alpha_complex_ref.rst
@@ -0,0 +1,10 @@
+==============================
+Alpha complex reference manual
+==============================
+
+.. autoclass:: gudhi.AlphaComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.AlphaComplex.__init__
diff --git a/src/cython/doc/alpha_complex_sum.rst b/src/cython/doc/alpha_complex_sum.rst
new file mode 100644
index 00000000..8437e901
--- /dev/null
+++ b/src/cython/doc/alpha_complex_sum.rst
@@ -0,0 +1,22 @@
+================================================================= =================================== ===================================
+:Author: Vincent Rouvreau :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+:Requires: CGAL :math:`\geq` 4.7.0 Eigen3
+================================================================= =================================== ===================================
+
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| .. figure:: | Alpha_complex is a simplicial complex constructed from the finite |
+| img/alpha_complex_representation.png | cells of a Delaunay Triangulation. |
+| :alt: Alpha complex representation | |
+| :figclass: align-center | The filtration value of each simplex is computed as the square of the |
+| | circumradius of the simplex if the circumsphere is empty (the simplex |
+| Alpha complex representation | is then said to be Gabriel), and as the minimum of the filtration |
+| | values of the codimension 1 cofaces that make it not Gabriel |
+| | otherwise. All simplices that have a filtration value strictly |
+| | greater than a given alpha squared value are not inserted into the |
+| | complex. |
+| | |
+| | This package requires having CGAL version 4.7 or higher (4.8.1 is |
+| | advised for better perfomances). |
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| :doc:`alpha_complex_user` | :doc:`alpha_complex_ref` |
++----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/src/cython/doc/alpha_complex_user.rst b/src/cython/doc/alpha_complex_user.rst
new file mode 100644
index 00000000..68e53a77
--- /dev/null
+++ b/src/cython/doc/alpha_complex_user.rst
@@ -0,0 +1,205 @@
+=========================
+Alpha complex user manual
+=========================
+Definition
+----------
+
+.. include:: alpha_complex_sum.rst
+
+Alpha_complex is constructing a :doc:`Simplex_tree <simplex_tree_sum>` using
+`Delaunay Triangulation <http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations>`_
+:cite:`cgal:hdj-t-15b` from `CGAL <http://www.cgal.org/>`_ (the Computational Geometry Algorithms Library
+:cite:`cgal:eb-15b`).
+
+Remarks
+^^^^^^^
+When Alpha_complex is constructed with an infinite value of :math:`\alpha`, the complex is a Delaunay complex.
+
+Example from points
+-------------------
+
+This example builds the Delaunay triangulation from the given points, and initializes the alpha complex with it:
+
+.. testcode::
+
+ import gudhi
+ alpha_complex = gudhi.AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]])
+
+ simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0)
+ result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtered_tree():
+ print(filtered_value)
+
+The output is:
+
+.. testoutput::
+
+ Alpha complex is of dimension 2 - 25 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([2], 0.0)
+ ([3], 0.0)
+ ([4], 0.0)
+ ([5], 0.0)
+ ([6], 0.0)
+ ([2, 3], 6.25)
+ ([4, 5], 7.25)
+ ([0, 2], 8.5)
+ ([0, 1], 9.25)
+ ([1, 3], 10.0)
+ ([1, 2], 11.25)
+ ([1, 2, 3], 12.5)
+ ([0, 1, 2], 12.995867768595042)
+ ([5, 6], 13.25)
+ ([2, 4], 20.0)
+ ([4, 6], 22.736686390532547)
+ ([4, 5, 6], 22.736686390532547)
+ ([3, 6], 30.25)
+ ([2, 6], 36.5)
+ ([2, 3, 6], 36.5)
+ ([2, 4, 6], 37.24489795918368)
+ ([0, 4], 59.710743801652896)
+ ([0, 2, 4], 59.710743801652896)
+
+
+Algorithm
+---------
+
+Data structure
+^^^^^^^^^^^^^^
+
+In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation.
+(The filtration value is set to NaN, which stands for unknown value):
+
+.. figure::
+ img/alpha_complex_doc.png
+ :figclass: align-center
+ :alt: Simplex tree structure construction example
+
+ Simplex tree structure construction example
+
+Filtration value computation algorithm
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ **for** i : dimension :math:`\rightarrow` 0 **do**
+ **for all** :math:`\sigma` of dimension i
+ **if** filtration(:math:`\sigma`) is NaN **then**
+ filtration(:math:`\sigma`) = :math:`\alpha^2(\sigma)`
+ **end if**
+
+ *//propagate alpha filtration value*
+
+ **for all** :math:`\tau` face of :math:`\sigma`
+ **if** filtration(:math:`\tau`) is not NaN **then**
+ filtration(:math:`\tau`) = filtration(:math:`\sigma`)
+ **end if**
+ **end for**
+ **end for**
+ **end for**
+
+ make_filtration_non_decreasing()
+
+ prune_above_filtration()
+
+Dimension 2
+^^^^^^^^^^^
+
+From the example above, it means the algorithm looks into each triangle ([0,1,2], [0,2,4], [1,2,3], ...),
+computes the filtration value of the triangle, and then propagates the filtration value as described
+here:
+
+.. figure::
+ img/alpha_complex_doc_420.png
+ :figclass: align-center
+ :alt: Filtration value propagation example
+
+ Filtration value propagation example
+
+Dimension 1
+^^^^^^^^^^^
+
+Then, the algorithm looks into each edge ([0,1], [0,2], [1,2], ...),
+computes the filtration value of the edge (in this case, propagation will have no effect).
+
+Dimension 0
+^^^^^^^^^^^
+
+Finally, the algorithm looks into each vertex ([0], [1], [2], [3], [4], [5] and [6]) and
+sets the filtration value (0 in case of a vertex - propagation will have no effect).
+
+Non decreasing filtration values
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not
+quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
+We fix that up by calling `Simplex_tree::make_filtration_non_decreasing()` (cf.
+`C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
+
+Prune above given filtration value
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()`
+int he `C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
+In the following example, the value is given by the user as argument of the program.
+
+
+Example from OFF file
+^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the Delaunay triangulation from the points given by an OFF file, and initializes the alpha complex
+with it.
+
+
+Then, it is asked to display information about the alpha complex:
+
+.. testcode::
+
+ import gudhi
+ alpha_complex = gudhi.AlphaComplex(off_file='alphacomplexdoc.off')
+ simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0)
+ result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtered_tree():
+ print(filtered_value)
+
+the program output is:
+
+.. testoutput::
+
+ Alpha complex is of dimension 2 - 23 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([2], 0.0)
+ ([3], 0.0)
+ ([4], 0.0)
+ ([5], 0.0)
+ ([6], 0.0)
+ ([2, 3], 6.25)
+ ([4, 5], 7.25)
+ ([0, 2], 8.5)
+ ([0, 1], 9.25)
+ ([1, 3], 10.0)
+ ([1, 2], 11.25)
+ ([1, 2, 3], 12.5)
+ ([0, 1, 2], 12.995867768595042)
+ ([5, 6], 13.25)
+ ([2, 4], 20.0)
+ ([4, 6], 22.736686390532547)
+ ([4, 5, 6], 22.736686390532547)
+ ([3, 6], 30.25)
+ ([2, 6], 36.5)
+ ([2, 3, 6], 36.5)
+ ([2, 4, 6], 37.24489795918368)
+
+==============
+CGAL citations
+==============
+
+.. bibliography:: how_to_cite_cgal.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/cython/doc/bottleneck_distance_sum.rst b/src/cython/doc/bottleneck_distance_sum.rst
new file mode 100644
index 00000000..5c475d0d
--- /dev/null
+++ b/src/cython/doc/bottleneck_distance_sum.rst
@@ -0,0 +1,15 @@
+================================================================= =================================== ===================================
+:Author: François Godi :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+:Requires: CGAL :math:`\geq` 4.8.0
+================================================================= =================================== ===================================
+
++-----------------------------------------------------------------+----------------------------------------------------------------------+
+| .. figure:: | Bottleneck distance measures the similarity between two persistence |
+| img/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a |
+| :figclass: align-center | perfect matching between the points of the two diagrams (+ all the |
+| | diagonal points) such that any couple of matched points are at |
+| Bottleneck distance is the length of | distance at most b. |
+| the longest edge | |
++-----------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`bottleneck_distance_user` | |
++-----------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst
new file mode 100644
index 00000000..3bc170f4
--- /dev/null
+++ b/src/cython/doc/bottleneck_distance_user.rst
@@ -0,0 +1,37 @@
+===============================
+Bottleneck distance user manual
+===============================
+Definition
+----------
+
+.. include:: bottleneck_distance_sum.rst
+
+Function
+--------
+.. autofunction:: gudhi.bottleneck_distance
+
+
+Basic example
+-------------
+
+This example computes the bottleneck distance from 2 persistence diagrams:
+
+.. testcode::
+
+ import gudhi
+
+ diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]]
+ diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]]
+
+ message = "Bottleneck distance approximation=" + repr(gudhi.bottleneck_distance(diag1, diag2, 0.1))
+ print(message)
+
+ message = "Bottleneck distance exact value=" + repr(gudhi.bottleneck_distance(diag1, diag2))
+ print(message)
+
+The output is:
+
+.. testoutput::
+
+ Bottleneck distance approximation=0.8081763781405569
+ Bottleneck distance exact value=0.75
diff --git a/src/cython/doc/citation.rst b/src/cython/doc/citation.rst
new file mode 100644
index 00000000..6cdfb7cc
--- /dev/null
+++ b/src/cython/doc/citation.rst
@@ -0,0 +1,15 @@
+Acknowledging the GUDHI library
+###############################
+
+We kindly ask users to cite the GUDHI library as appropriately as possible in
+their papers, and to mention the use of the GUDHI library on the web pages of
+their projects using GUDHI and provide us with links to these web pages. Feel
+free to contact us in case you have any question or remark on this topic.
+
+We provide GUDHI bibtex entries for the modules of the User and Reference
+Manual, as well as for publications directly related to the GUDHI library.
+
+GUDHI bibtex
+************
+
+.. literalinclude:: how_to_cite_gudhi.bib
diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py
new file mode 100755
index 00000000..42bfd59c
--- /dev/null
+++ b/src/cython/doc/conf.py
@@ -0,0 +1,278 @@
+# -*- coding: utf-8 -*-
+#
+# GUDHI documentation build configuration file, created by
+# sphinx-quickstart on Thu Jun 30 09:55:51 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# Path to Gudhi.so from source path
+sys.path.insert(0, os.path.abspath('..'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'matplotlib.sphinxext.plot_directive',
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.mathjax',
+ 'sphinx.ext.ifconfig',
+ 'sphinx.ext.viewcode',
+ 'sphinxcontrib.bibtex',
+]
+
+todo_include_todos = True
+# plot option : do not show hyperlinks (Source code, png, hires.png, pdf)
+plot_html_show_source_link = False
+plot_html_show_formats = False
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'GUDHI'
+copyright = u'2016, GUDHI Editorial Board'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '2.0'
+# The full version, including alpha/beta/rc tags.
+release = '2.0.0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'classic'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+ "sidebarbgcolor": "#A1ADCD",
+ "sidebartextcolor": "black",
+ "sidebarlinkcolor": "#334D5C",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo =
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon =
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {'installation': 'installation.html'}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'GUDHIdoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ ('index', 'GUDHI.tex', u'GUDHI Documentation',
+ u'Vincent Rouvreau', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'gudhi', u'GUDHI Documentation',
+ [u'Vincent Rouvreau'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'GUDHI', u'GUDHI Documentation',
+ u'Vincent Rouvreau', 'GUDHI', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
diff --git a/src/cython/doc/cubical_complex_ref.rst b/src/cython/doc/cubical_complex_ref.rst
new file mode 100644
index 00000000..84aa4223
--- /dev/null
+++ b/src/cython/doc/cubical_complex_ref.rst
@@ -0,0 +1,9 @@
+Cubical complex reference manual
+################################
+
+.. autoclass:: gudhi.CubicalComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.CubicalComplex.__init__
diff --git a/src/cython/doc/cubical_complex_sum.rst b/src/cython/doc/cubical_complex_sum.rst
new file mode 100644
index 00000000..3ddf6375
--- /dev/null
+++ b/src/cython/doc/cubical_complex_sum.rst
@@ -0,0 +1,15 @@
+================================================================= =================================== ===================================
+:Author: Pawel Dlotko :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+================================================================= =================================== ===================================
+
++-----------------------------------------------------------------+----------------------------------------------------------------------+
+| .. figure:: | The cubical complex is an example of a structured complex useful in |
+| img/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image |
+| :alt: Cubical complex representation | analysis. |
+| :figclass: align-center | |
+| | |
+| Cubical complex representation | |
++-----------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++-----------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/src/cython/doc/cubical_complex_user.rst b/src/cython/doc/cubical_complex_user.rst
new file mode 100644
index 00000000..692acdd9
--- /dev/null
+++ b/src/cython/doc/cubical_complex_user.rst
@@ -0,0 +1,161 @@
+===========================
+Cubical complex user manual
+===========================
+Definition
+----------
+
+===================================== ===================================== =====================================
+:Author: Pawel Dlotko :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3
+===================================== ===================================== =====================================
+
++---------------------------------------------+----------------------------------------------------------------------+
+| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++---------------------------------------------+----------------------------------------------------------------------+
+
+The cubical complex is an example of a structured complex useful in computational mathematics (specially rigorous
+numerics) and image analysis.
+
+An *elementary interval* is an interval of a form :math:`[n,n+1]`, or :math:`[n,n]`, for :math:`n \in \mathcal{Z}`.
+The first one is called *non-degenerate*, while the second one is a *degenerate* interval. A
+*boundary of a elementary interval* is a chain :math:`\partial [n,n+1] = [n+1,n+1]-[n,n]` in case of
+non-degenerated elementary interval and :math:`\partial [n,n] = 0` in case of degenerate elementary interval. An
+*elementary cube* :math:`C` is a product of elementary intervals, :math:`C=I_1 \times \ldots \times I_n`.
+*Embedding dimension* of a cube is n, the number of elementary intervals (degenerate or not) in the product.
+A *dimension of a cube* :math:`C=I_1 \times ... \times I_n` is the number of non degenerate elementary
+intervals in the product. A *boundary of a cube* :math:`C=I_1 \times \ldots \times I_n` is a chain obtained
+in the following way:
+
+.. math::
+
+ \partial C = (\partial I_1 \times \ldots \times I_n) + (I_1 \times \partial I_2 \times \ldots \times I_n) +
+ \ldots + (I_1 \times I_2 \times \ldots \times \partial I_n).
+
+A *cubical complex* :math:`\mathcal{K}` is a collection of cubes closed under operation of taking boundary
+(i.e. boundary of every cube from the collection is in the collection). A cube :math:`C` in cubical complex
+:math:`\mathcal{K}` is *maximal* if it is not in a boundary of any other cube in :math:`\mathcal{K}`. A
+*support* of a cube :math:`C` is the set in :math:`\mathbb{R}^n` occupied by :math:`C` (:math:`n` is the embedding
+dimension of :math:`C`).
+
+Cubes may be equipped with a filtration values in which case we have filtered cubical complex. All the cubical
+complexes considered in this implementation are filtered cubical complexes (although, the range of a filtration may
+be a set of two elements).
+
+For further details and theory of cubical complexes, please consult :cite:`kaczynski2004computational` as well as the
+following paper :cite:`peikert2012topological`.
+
+Data structure.
+---------------
+
+The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in
+:math:`\mathbb{R}^n`. This extra assumption allows for a memory efficient way of storing cubical complexes in a form
+of so called bitmaps. Let
+:math:`R = [b_1,e_1] \times \ldots \times [b_n,e_n]`, for :math:`b_1,...b_n,e_1,...,e_n \in \mathbb{Z}`,
+:math:`b_i \leq d_i` be the considered rectangular region and let :math:`\mathcal{K}` be a filtered
+cubical complex having the rectangle :math:`R` as its support. Note that the structure of the coordinate system gives
+a way a lexicographical ordering of cells of :math:`\mathcal{K}`. This ordering is a base of the presented
+bitmap-based implementation. In this implementation, the whole cubical complex is stored as a vector of the values
+of filtration. This, together with dimension of :math:`\mathcal{K}` and the sizes of :math:`\mathcal{K}` in all
+directions, allows to determine, dimension, neighborhood, boundary and coboundary of every cube
+:math:`C \in \mathcal{K}`.
+
+.. figure::
+ img/Cubical_complex_representation.png
+ :alt: Cubical complex.
+ :figclass: align-center
+
+ Cubical complex.
+
+Note that the cubical complex in the figure above is, in a natural way, a product of one dimensional cubical
+complexes in :math:`\mathbb{R}`. The number of all cubes in each direction is equal :math:`2n+1`, where :math:`n` is
+the number of maximal cubes in the considered direction. Let us consider a cube at the position :math:`k` in the
+bitmap.
+Knowing the sizes of the bitmap, by a series of modulo operation, we can determine which elementary intervals are
+present in the product that gives the cube :math:`C`. In a similar way, we can compute boundary and the coboundary of
+each cube. Further details can be found in the literature.
+
+Input Format.
+-------------
+
+In the current implantation, filtration is given at the maximal cubes, and it is then extended by the lower star
+filtration to all cubes. There are a number of constructors that can be used to construct cubical complex by users
+who want to use the code directly. They can be found in the :doc:`cubical_complex_ref`.
+Currently one input from a text file is used. It uses a format used already in
+`Perseus software <http://www.sas.upenn.edu/~vnanda/perseus/>`_ by Vidit Nanda.
+Below we are providing a description of the format. The first line contains a number d begin the dimension of the
+bitmap (2 in the example below). Next d lines are the numbers of top dimensional cubes in each dimensions (3 and 3
+in the example below). Next, in lexicographical order, the filtration of top dimensional cubes is given (1 4 6 8
+20 4 7 6 5 in the example below).
+
+.. figure::
+ img/exampleBitmap.png
+ :alt: Example of a input data.
+ :figclass: align-center
+
+ Example of a input data.
+
+The input file for the following complex is:
+
+.. literalinclude:: cubicalcomplexdoc.txt
+
+.. centered:: cubicalcomplexdoc.txt
+
+.. testcode::
+
+ import gudhi
+ cubical_complex = gudhi.CubicalComplex(perseus_file='cubicalcomplexdoc.txt')
+ result_str = 'Cubical complex is of dimension ' + repr(cubical_complex.dimension()) + ' - ' + \
+ repr(cubical_complex.num_simplices()) + ' simplices.'
+ print(result_str)
+
+the program output is:
+
+.. testoutput::
+
+ Cubical complex is of dimension 2 - 49 simplices.
+
+Periodic boundary conditions.
+-----------------------------
+
+Often one would like to impose periodic boundary conditions to the cubical complex (cf.
+:doc:`periodic_cubical_complex_ref`).
+Let :math:`I_1\times ... \times I_n` be a box that is decomposed with a cubical complex :math:`\mathcal{K}`.
+Imposing periodic boundary conditions in the direction i, means that the left and the right side of a complex
+:math:`\mathcal{K}` are considered the same. In particular, if for a bitmap :math:`\mathcal{K}` periodic boundary
+conditions are imposed in all directions, then complex :math:`\mathcal{K}` became n-dimensional torus. One can use
+various constructors from the file Bitmap_cubical_complex_periodic_boundary_conditions_base.h to construct cubical
+complex with periodic boundary conditions. One can also use Perseus style input files. To indicate periodic boundary
+conditions in a given direction, then number of top dimensional cells in this direction have to be multiplied by -1.
+For instance:
+
+.. literalinclude:: periodiccubicalcomplexdoc.txt
+
+.. centered:: periodiccubicalcomplexdoc.txt
+
+Indicate that we have imposed periodic boundary conditions in the direction x, but not in the direction y.
+
+.. testcode::
+
+ import gudhi
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='periodiccubicalcomplexdoc.txt')
+ result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \
+ repr(periodic_cc.num_simplices()) + ' simplices.'
+ print(result_str)
+
+the program output is:
+
+.. testoutput::
+
+ Periodic cubical complex is of dimension 2 - 42 simplices.
+
+Examples.
+---------
+
+End user programs are available in cython/example/ folder.
+
+Bibliography
+************
+
+.. bibliography:: bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/cython/doc/euclidean_strong_witness_complex_ref.rst b/src/cython/doc/euclidean_strong_witness_complex_ref.rst
new file mode 100644
index 00000000..bebf0f9a
--- /dev/null
+++ b/src/cython/doc/euclidean_strong_witness_complex_ref.rst
@@ -0,0 +1,10 @@
+=================================================
+Euclidean strong witness complex reference manual
+=================================================
+
+.. autoclass:: gudhi.EuclideanStrongWitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.EuclideanStrongWitnessComplex.__init__
diff --git a/src/cython/doc/euclidean_witness_complex_ref.rst b/src/cython/doc/euclidean_witness_complex_ref.rst
new file mode 100644
index 00000000..29b8806f
--- /dev/null
+++ b/src/cython/doc/euclidean_witness_complex_ref.rst
@@ -0,0 +1,10 @@
+==========================================
+Euclidean witness complex reference manual
+==========================================
+
+.. autoclass:: gudhi.EuclideanWitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.EuclideanWitnessComplex.__init__
diff --git a/src/cython/doc/examples.rst b/src/cython/doc/examples.rst
new file mode 100644
index 00000000..a89e0596
--- /dev/null
+++ b/src/cython/doc/examples.rst
@@ -0,0 +1,4 @@
+Examples
+########
+
+.. include:: examples.inc
diff --git a/src/cython/doc/generate_examples.py b/src/cython/doc/generate_examples.py
new file mode 100755
index 00000000..d64d506c
--- /dev/null
+++ b/src/cython/doc/generate_examples.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+from os import listdir
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+"""
+generate_examples.py generates examples.inc to be included in examples.rst.
+Refer to Makefile and make.bat to see if it is correctly launched.
+"""
+
+output_file = open('examples.inc','w')
+
+output_file.write('.. only:: builder_html\n\n')
+
+for file in listdir('../example/'):
+ output_file.write(" * :download:`" + file + " <../example/" + file + ">`\n")
+
+output_file.close()
diff --git a/src/cython/doc/img/graphical_tools_representation.png b/src/cython/doc/img/graphical_tools_representation.png
new file mode 100644
index 00000000..9759f7ba
--- /dev/null
+++ b/src/cython/doc/img/graphical_tools_representation.png
Binary files differ
diff --git a/src/cython/doc/index.rst b/src/cython/doc/index.rst
new file mode 100644
index 00000000..fca63d65
--- /dev/null
+++ b/src/cython/doc/index.rst
@@ -0,0 +1,87 @@
+GUDHI documentation
+###################
+
+.. image:: img/Gudhi_banner.png
+ :align: center
+
+Introduction
+************
+
+The Gudhi library (Geometry Understanding in Higher Dimensions) is a generic
+open source `C++ library <http://gudhi.gforge.inria.fr/doc/latest/>`_, with a
+`Cython interface <http://gudhi.gforge.inria.fr/cython/latest/>`_, for
+Computational Topology and Topological Data Analysis
+(`TDA <https://en.wikipedia.org/wiki/Topological_data_analysis>`_).
+The GUDHI library intends to help the development of new algorithmic solutions
+in TDA and their transfer to applications. It provides robust, efficient,
+flexible and easy to use implementations of state-of-the-art algorithms and
+data structures.
+
+The current release of the GUDHI library includes:
+
+* Data structures to represent, construct and manipulate simplicial complexes.
+* Algorithms to compute persistent homology and multi-field persistent homology.
+* Simplication of simplicial complexes by edge contraction.
+
+All data-structures are generic and several of their aspects can be
+parameterized via template classes. We refer to :cite:`gudhilibrary_ICMS14`
+for a detailed description of the design of the library.
+
+Data structures
+***************
+
+Alpha complex
+=============
+
+.. include:: alpha_complex_sum.rst
+
+Cubical complex
+===============
+
+.. include:: cubical_complex_sum.rst
+
+Rips complex
+============
+
+.. include:: rips_complex_sum.rst
+
+Simplex tree
+============
+
+.. include:: simplex_tree_sum.rst
+
+Tangential complex
+==================
+
+.. include:: tangential_complex_sum.rst
+
+Witness complex
+===============
+
+.. include:: witness_complex_sum.rst
+
+
+Toolbox
+*******
+
+Bottleneck distance
+===================
+
+.. include:: bottleneck_distance_sum.rst
+
+Persistence cohomology
+======================
+
+.. include:: persistent_cohomology_sum.rst
+
+Persistence graphical tools
+===========================
+
+.. include:: persistence_graphical_tools_sum.rst
+
+Bibliography
+************
+
+.. bibliography:: bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst
new file mode 100644
index 00000000..e7d8c210
--- /dev/null
+++ b/src/cython/doc/installation.rst
@@ -0,0 +1,171 @@
+Installation
+############
+
+Compiling
+*********
+
+The library uses c++11 and requires `Boost <http://www.boost.org/>`_ with version
+1.48.0 or more recent. It is a multi-platform library and compiles on Linux,
+Mac OSX and Visual Studio 2015.
+It also requires cmake to generate makefiles, and cython to compile the
+library.
+
+GUDHI Cythonization
+===================
+
+To build the GUDHI cython module, run the following commands in a terminal:
+
+.. code-block:: bash
+
+ cd /path-to-gudhi/
+ mkdir build
+ cd build/
+ cmake ..
+ make cython
+
+A list of examples is available here.
+
+Test suites
+===========
+
+To test your build, `py.test <http://doc.pytest.org>`_ is required. Run the
+following command in a terminal:
+
+.. code-block:: bash
+
+ cd /path-to-gudhi/build/src/cython
+ # For windows, you have to set PYTHONPATH environment variable
+ export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/src/cython'
+ py.test
+
+Documentation
+=============
+
+To build the documentation, `sphinx-doc <http://http://www.sphinx-doc.org>`_ is
+required. Please refer to *conf.py* file to see which
+`sphinx-doc <http://http://www.sphinx-doc.org>`_ modules are required to
+generate the documentation. Run the following commands in a terminal:
+
+.. code-block:: bash
+
+ make sphinx
+
+Optional third-party library
+****************************
+
+CGAL
+====
+
+The :doc:`Alpha complex </alpha_complex_user>`,
+:doc:`Tangential complex </tangential_complex_user>` and
+:doc:`Witness complex </witness_complex_user>` data structures, and
+:doc:`Bottleneck distance </bottleneck_distance_user>` requires CGAL, which is a
+C++ library which provides easy access to efficient and reliable geometric
+algorithms.
+
+Having CGAL version 4.6.0 or higher installed is recommended. The procedure to
+install this library according to your operating system is detailed
+`here <http://doc.cgal.org/latest/Manual/installation.html>`_.
+
+The following examples require the Computational Geometry Algorithms Library:
+
+.. only:: builder_html
+
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+The following example requires CGAL version ≥ 4.7.0:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+
+The following example requires CGAL version ≥ 4.8.0:
+
+.. only:: builder_html
+
+ * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+
+Eigen3
+======
+
+The :doc:`Alpha complex </alpha_complex_user>`,
+:doc:`Tangential complex </tangential_complex_user>` and
+:doc:`Witness complex </witness_complex_user>` data structures and few
+examples requires `Eigen3 <http://eigen.tuxfamily.org/>`_, a C++ template
+library for linear algebra: matrices, vectors, numerical solvers, and related
+algorithms.
+
+The following examples require the `Eigen3 <http://eigen.tuxfamily.org/>`_:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+Matplotlib
+==========
+
+The :doc:`persistence graphical tools </persistence_graphical_tools_user>`
+module requires `Matplotlib <http://matplotlib.org>`_, a Python 2D plotting
+library which produces publication quality figures in a variety of hardcopy
+formats and interactive environments across platforms.
+
+The following examples require the `Matplotlib <http://matplotlib.org>`_:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+Numpy
+=====
+
+The :doc:`persistence graphical tools </persistence_graphical_tools_user>`
+module requires `NumPy <http://numpy.org>`_, a fundamental package for
+scientific computing with Python.
+
+The following examples require the `NumPy <http://numpy.org>`_:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+Threading Building Blocks
+=========================
+
+`Intel® TBB <https://www.threadingbuildingblocks.org/>`_ lets you easily write
+parallel C++ programs that take full advantage of multicore performance, that
+are portable and composable, and that have future-proof scalability.
+
+Having Intel® TBB installed is recommended to parallelize and accelerate some
+GUDHI computations.
+
+Bug reports and contributions
+*****************************
+
+Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
+
+ Contact: gudhi-users@lists.gforge.inria.fr
+
+GUDHI is open to external contributions. If you want to join our development team, please contact us.
diff --git a/src/cython/doc/make.bat.in b/src/cython/doc/make.bat.in
new file mode 100644
index 00000000..ff1a6d56
--- /dev/null
+++ b/src/cython/doc/make.bat.in
@@ -0,0 +1,67 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=@SPHINX_PATH@
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ del examples.inc
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+
+%SPHINXBUILD% 2> nul
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+:: GUDHI specific : Examples.inc is generated with generate_examples.py (and deleted on clean)
+
+if "%1" == "html" (
+ generate_examples.py
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/src/cython/doc/periodic_cubical_complex_ref.rst b/src/cython/doc/periodic_cubical_complex_ref.rst
new file mode 100644
index 00000000..c6190a1b
--- /dev/null
+++ b/src/cython/doc/periodic_cubical_complex_ref.rst
@@ -0,0 +1,9 @@
+Periodic cubical complex reference manual
+#########################################
+
+.. autoclass:: gudhi.PeriodicCubicalComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.PeriodicCubicalComplex.__init__
diff --git a/src/cython/doc/persistence_graphical_tools_ref.rst b/src/cython/doc/persistence_graphical_tools_ref.rst
new file mode 100644
index 00000000..27c2f68a
--- /dev/null
+++ b/src/cython/doc/persistence_graphical_tools_ref.rst
@@ -0,0 +1,8 @@
+============================================
+Persistence graphical tools reference manual
+============================================
+
+.. autofunction:: gudhi.__min_birth_max_death
+.. autofunction:: gudhi.show_palette_values
+.. autofunction:: gudhi.plot_persistence_barcode
+.. autofunction:: gudhi.plot_persistence_diagram
diff --git a/src/cython/doc/persistence_graphical_tools_sum.rst b/src/cython/doc/persistence_graphical_tools_sum.rst
new file mode 100644
index 00000000..d602daa7
--- /dev/null
+++ b/src/cython/doc/persistence_graphical_tools_sum.rst
@@ -0,0 +1,12 @@
+================================================================= =================================== ===================================
+:Author: Vincent Rouvreau :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+:Requires: Matplotlib Numpy
+================================================================= =================================== ===================================
+
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
+| .. figure:: | These graphical tools comes on top of persistence results and allows |
+| img/graphical_tools_representation.png | the user to build easily barcode and persistence diagram. |
+| | |
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
+| :doc:`persistence_graphical_tools_user` | :doc:`persistence_graphical_tools_ref` |
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
diff --git a/src/cython/doc/persistence_graphical_tools_user.rst b/src/cython/doc/persistence_graphical_tools_user.rst
new file mode 100644
index 00000000..f713e971
--- /dev/null
+++ b/src/cython/doc/persistence_graphical_tools_user.rst
@@ -0,0 +1,67 @@
+=======================================
+Persistence graphical tools user manual
+=======================================
+Definition
+----------
+.. include:: persistence_graphical_tools_sum.rst
+
+
+Show palette values
+-------------------
+
+This function is useful to show the color palette values of dimension:
+
+
+.. testcode::
+
+ import gudhi
+ gudhi.show_palette_values(alpha=1.0)
+
+.. plot::
+
+ import gudhi
+ gudhi.show_palette_values(alpha=1.0)
+
+Show persistence as a barcode
+-----------------------------
+
+This function can display the persistence result as a barcode:
+
+.. testcode::
+
+ import gudhi
+
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='3d_torus.txt')
+ diag = periodic_cc.persistence()
+ gudhi.plot_persistence_barcode(diag)
+
+.. plot::
+
+ import gudhi
+
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='3d_torus.txt')
+ diag = periodic_cc.persistence()
+ gudhi.plot_persistence_barcode(diag)
+
+Show persistence as a diagram
+-----------------------------
+
+This function can display the persistence result as a diagram:
+
+.. testcode::
+
+ import gudhi
+
+ rips_complex = gudhi.RipsComplex(off_file='tore3D_300.off', max_edge_length=2.0)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
+ diag = simplex_tree.persistence()
+ gudhi.plot_persistence_diagram(diag)
+
+.. plot::
+
+ import gudhi
+
+ rips_complex = gudhi.RipsComplex(off_file='tore3D_300.off', max_edge_length=2.0)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
+ diag = simplex_tree.persistence()
+ gudhi.plot_persistence_diagram(diag)
diff --git a/src/cython/doc/persistent_cohomology_sum.rst b/src/cython/doc/persistent_cohomology_sum.rst
new file mode 100644
index 00000000..d1f79cb4
--- /dev/null
+++ b/src/cython/doc/persistent_cohomology_sum.rst
@@ -0,0 +1,27 @@
+================================================================= =================================== ===================================
+:Author: Clément Maria :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+================================================================= =================================== ===================================
+
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
+| .. figure:: | The theory of homology consists in attaching to a topological space |
+| img/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological |
+| :figclass: align-center | features like connected components, holes, cavities, etc. Persistent |
+| | homology studies the evolution -- birth, life and death -- of these |
+| Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the |
+| Torus | theory is essentially composed of three elements: topological spaces, |
+| | their homology groups and an evolution scheme. |
+| | |
+| | Computation of persistent cohomology using the algorithm of |
+| | :cite:`DBLP:journals/dcg/SilvaMV11` and |
+| | :cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed |
+| | Annotation Matrix implementation of |
+| | :cite:`DBLP:conf/esa/BoissonnatDM13`. |
+| | |
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
+| :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence |
+| | feature for reference: |
+| | |
+| | * :doc:`simplex_tree_ref` |
+| | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
diff --git a/src/cython/doc/persistent_cohomology_user.rst b/src/cython/doc/persistent_cohomology_user.rst
new file mode 100644
index 00000000..69be3b86
--- /dev/null
+++ b/src/cython/doc/persistent_cohomology_user.rst
@@ -0,0 +1,115 @@
+=================================
+Persistent cohomology user manual
+=================================
+Definition
+----------
+===================================== ===================================== =====================================
+:Author: Clément Maria :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3
+===================================== ===================================== =====================================
+
++---------------------------------------------+----------------------------------------------------------------------+
+| :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence |
+| | feature for reference: |
+| | |
+| | * :doc:`simplex_tree_ref` |
++---------------------------------------------+----------------------------------------------------------------------+
+
+
+Computation of persistent cohomology using the algorithm of :cite:`DBLP:journals/dcg/SilvaMV11` and
+:cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed Annotation Matrix implementation of
+:cite:`DBLP:conf/esa/BoissonnatDM13`.
+
+The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing global
+topological features like connected components, holes, cavities, etc. Persistent homology studies the evolution --
+birth, life and death -- of these features when the topological space is changing. Consequently, the theory is
+essentially composed of three elements:
+
+* topological spaces
+* their homology groups
+* an evolution scheme.
+
+Topological Spaces
+------------------
+
+Topological spaces are represented by simplicial complexes.
+Let :math:`V = \{1, \cdots ,|V|\}` be a set of *vertices*.
+A *simplex* :math:`\sigma` is a subset of vertices :math:`\sigma \subseteq V`.
+A *simplicial complex* :math:`\mathbf{K}` on :math:`V` is a collection of simplices :math:`\{\sigma\}`,
+:math:`\sigma \subseteq V`, such that :math:`\tau \subseteq \sigma \in \mathbf{K} \Rightarrow \tau \in \mathbf{K}`.
+The dimension :math:`n=|\sigma|-1` of :math:`\sigma` is its number of elements minus 1.
+A *filtration* of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying
+:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`.
+
+Homology
+--------
+
+For a ring :math:`\mathcal{R}`, the group of *n-chains*, denoted :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})`, of
+:math:`\mathbf{K}` is the group of formal sums of n-simplices with :math:`\mathcal{R}` coefficients. The
+*boundary operator* is a linear operator
+:math:`\partial_n: \mathbf{C}_n(\mathbf{K},\mathcal{R}) \rightarrow \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R})`
+such that :math:`\partial_n \sigma = \partial_n [v_0, \cdots , v_n] = \sum_{i=0}^n (-1)^{i}[v_0,\cdots ,\widehat{v_i}, \cdots,v_n]`,
+where :math:`\widehat{v_i}` means :math:`v_i` is omitted from the list. The chain groups form a sequence:
+
+.. math::
+
+ \cdots \ \ \mathbf{C}_n(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_n\ }
+ \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R}) \xrightarrow{\partial_{n-1}} \cdots \xrightarrow{\ \partial_2 \ }
+ \mathbf{C}_1(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_1 \ } \mathbf{C}_0(\mathbf{K},\mathcal{R})
+
+of finitely many groups :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})` and homomorphisms :math:`\partial_n`, indexed by
+the dimension :math:`n \geq 0`. The boundary operators satisfy the property :math:`\partial_n \circ \partial_{n+1}=0`
+for every :math:`n > 0` and we define the homology groups:
+
+.. math::
+
+ \mathbf{H}_n(\mathbf{K},\mathcal{R}) = \ker \partial_n / \mathrm{im} \ \partial_{n+1}
+
+We refer to :cite:`Munkres-elementsalgtop1984` for an introduction to homology
+theory and to :cite:`DBLP:books/daglib/0025666` for an introduction to persistent homology.
+
+Indexing Scheme
+---------------
+
+"Changing" a simplicial complex consists in applying a simplicial map. An *indexing scheme* is a directed graph
+together with a traversal order, such that two consecutive nodes in the graph are connected by an arrow (either forward
+or backward).
+The nodes represent simplicial complexes and the directed edges simplicial maps.
+
+From the computational point of view, there are two types of indexing schemes of interest in persistent homology:
+
+* linear ones
+ :math:`\bullet \longrightarrow \bullet \longrightarrow \cdots \longrightarrow \bullet \longrightarrow \bullet`
+ in persistent homology :cite:`DBLP:journals/dcg/ZomorodianC05`,
+* zigzag ones
+ :math:`\bullet \longrightarrow \bullet \longleftarrow \cdots \longrightarrow \bullet \longleftarrow \bullet`
+ in zigzag persistent homology :cite:`DBLP:journals/focm/CarlssonS10`.
+
+These indexing schemes have a natural left-to-right traversal order, and we describe them with ranges and iterators.
+In the current release of the Gudhi library, only the linear case is implemented.
+
+In the following, we consider the case where the indexing scheme is induced by a filtration.
+
+Ordering the simplices by increasing filtration values (breaking ties so as a simplex appears after its subsimplices of
+same filtration value) provides an indexing scheme.
+
+Examples
+--------
+
+We provide several example files: run these examples with -h for details on their use.
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+
+Bibliography
+************
+
+.. bibliography:: bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/cython/doc/pyplots/barcode_persistence.py b/src/cython/doc/pyplots/barcode_persistence.py
new file mode 100755
index 00000000..c06ac5a7
--- /dev/null
+++ b/src/cython/doc/pyplots/barcode_persistence.py
@@ -0,0 +1,5 @@
+import gudhi
+
+periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='../3d_torus.txt')
+diag = periodic_cc.persistence()
+gudhi.plot_persistence_barcode(diag)
diff --git a/src/cython/doc/pyplots/diagram_persistence.py b/src/cython/doc/pyplots/diagram_persistence.py
new file mode 100755
index 00000000..b4714fe3
--- /dev/null
+++ b/src/cython/doc/pyplots/diagram_persistence.py
@@ -0,0 +1,5 @@
+import gudhi
+
+alpha_complex = gudhi.AlphaComplex(off_file='../tore3D_300.off')
+diag = alpha_complex.persistence()
+gudhi.plot_persistence_diagram(diag)
diff --git a/src/cython/doc/pyplots/show_palette_values.py b/src/cython/doc/pyplots/show_palette_values.py
new file mode 100755
index 00000000..e72a55fd
--- /dev/null
+++ b/src/cython/doc/pyplots/show_palette_values.py
@@ -0,0 +1,2 @@
+import gudhi
+gudhi.show_palette_values(alpha=1.0)
diff --git a/src/cython/doc/python3-sphinx-build b/src/cython/doc/python3-sphinx-build
new file mode 100755
index 00000000..44b94169
--- /dev/null
+++ b/src/cython/doc/python3-sphinx-build
@@ -0,0 +1,11 @@
+#!/usr/bin/python3
+
+"""
+Emulate sphinx-build for python3
+"""
+
+from sys import exit, argv
+from sphinx import main
+
+if __name__ == '__main__':
+ exit(main(argv))
diff --git a/src/cython/doc/rips_complex_ref.rst b/src/cython/doc/rips_complex_ref.rst
new file mode 100644
index 00000000..b17dc4e0
--- /dev/null
+++ b/src/cython/doc/rips_complex_ref.rst
@@ -0,0 +1,10 @@
+=============================
+Rips complex reference manual
+=============================
+
+.. autoclass:: gudhi.RipsComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.RipsComplex.__init__
diff --git a/src/cython/doc/rips_complex_sum.rst b/src/cython/doc/rips_complex_sum.rst
new file mode 100644
index 00000000..2b65fc19
--- /dev/null
+++ b/src/cython/doc/rips_complex_sum.rst
@@ -0,0 +1,17 @@
+================================================================= =================================== ===================================
+:Author: Clément Maria, Pawel Dlotko, Vincent Rouvreau :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+================================================================= =================================== ===================================
+
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton |
+| img/rips_complex_representation.png | graph. |
+| :figclass: align-center | |
+| | The filtration value of each edge is computed from a user-given |
+| Rips complex representation | distance function and is inserted until a user-given threshold |
+| | value. |
+| | |
+| | This complex can be built from a point cloud and a distance function, |
+| | or from a distance matrix. |
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| :doc:`rips_complex_user` | :doc:`rips_complex_ref` |
++----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/src/cython/doc/rips_complex_user.rst b/src/cython/doc/rips_complex_user.rst
new file mode 100644
index 00000000..027c3bf7
--- /dev/null
+++ b/src/cython/doc/rips_complex_user.rst
@@ -0,0 +1,237 @@
+=========================
+Rips complex user manual
+=========================
+Definition
+----------
+
+======================================================= ===================================== =====================================
+:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+======================================================= ===================================== =====================================
+
++-------------------------------------------+----------------------------------------------------------------------+
+| :doc:`rips_complex_user` | :doc:`rips_complex_ref` |
++-------------------------------------------+----------------------------------------------------------------------+
+
+`Rips complex <https://en.wikipedia.org/wiki/Vietoris%E2%80%93Rips_complex>`_ is a one skeleton graph that allows to
+construct a simplicial complex from it. The input can be a point cloud with a given distance function, or a distance
+matrix.
+
+The filtration value of each edge is computed from a user-given distance function, or directly from the distance
+matrix.
+
+All edges that have a filtration value strictly greater than a given threshold value are not inserted into the complex.
+
+When creating a simplicial complex from this one skeleton graph, Rips inserts the one skeleton graph into the data
+structure, and then expands the simplicial complex when required.
+
+Vertex name correspond to the index of the point in the given range (aka. the point cloud).
+
+.. figure::
+ img/rips_complex_representation.png
+ :align: center
+
+ Rips-complex one skeleton graph representation
+
+On this example, as edges (4,5), (4,6) and (5,6) are in the complex, simplex (4,5,6) is added with the filtration value
+set with :math:`max(filtration(4,5), filtration(4,6), filtration(5,6))`. And so on for simplex (0,1,2,3).
+
+If the Rips_complex interfaces are not detailed enough for your need, please refer to rips_persistence_step_by_step.cpp
+example, where the graph construction over the Simplex_tree is more detailed.
+
+Point cloud
+-----------
+
+Example from a point cloud
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the one skeleton graph from the given points, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the simplicial complex.
+
+.. testcode::
+
+ import gudhi
+ rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]],
+ max_edge_length=12.0)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtered_tree():
+ print(filtered_value)
+
+When launching (Rips maximal distance between 2 points is 12.0, is expanded
+until dimension 1 - one skeleton graph in other words), the output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([2], 0.0)
+ ([3], 0.0)
+ ([4], 0.0)
+ ([5], 0.0)
+ ([6], 0.0)
+ ([2, 3], 5.0)
+ ([4, 5], 5.385164807134504)
+ ([0, 2], 5.830951894845301)
+ ([0, 1], 6.082762530298219)
+ ([1, 3], 6.324555320336759)
+ ([1, 2], 6.708203932499369)
+ ([5, 6], 7.280109889280518)
+ ([2, 4], 8.94427190999916)
+ ([0, 3], 9.433981132056603)
+ ([4, 6], 9.486832980505138)
+ ([3, 6], 11.0)
+
+Example from OFF file
+^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the :doc:`Rips_complex <rips_complex_ref>` from the given
+points in an OFF file, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the Rips complex.
+
+
+.. testcode::
+
+ import gudhi
+ rips_complex = gudhi.RipsComplex(off_file='alphacomplexdoc.off', max_edge_length=12.0)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtered_tree():
+ print(filtered_value)
+
+the program output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([2], 0.0)
+ ([3], 0.0)
+ ([4], 0.0)
+ ([5], 0.0)
+ ([6], 0.0)
+ ([2, 3], 5.0)
+ ([4, 5], 5.385164807134504)
+ ([0, 2], 5.830951894845301)
+ ([0, 1], 6.082762530298219)
+ ([1, 3], 6.324555320336759)
+ ([1, 2], 6.708203932499369)
+ ([5, 6], 7.280109889280518)
+ ([2, 4], 8.94427190999916)
+ ([0, 3], 9.433981132056603)
+ ([4, 6], 9.486832980505138)
+ ([3, 6], 11.0)
+
+Distance matrix
+---------------
+
+Example from a distance matrix
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the one skeleton graph from the given distance matrix, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the simplicial complex.
+
+.. testcode::
+
+ import gudhi
+ rips_complex = gudhi.RipsComplex(distance_matrix=[[],
+ [6.0827625303],
+ [5.8309518948, 6.7082039325],
+ [9.4339811321, 6.3245553203, 5],
+ [13.0384048104, 15.6524758425, 8.94427191, 12.0415945788],
+ [18.0277563773, 19.6468827044, 13.152946438, 14.7648230602, 5.3851648071],
+ [17.88854382, 17.1172427686, 12.0830459736, 11, 9.4868329805, 7.2801098893]],
+ max_edge_length=12.0)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtered_tree():
+ print(filtered_value)
+
+When launching (Rips maximal distance between 2 points is 12.0, is expanded
+until dimension 1 - one skeleton graph in other words), the output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([2], 0.0)
+ ([3], 0.0)
+ ([4], 0.0)
+ ([5], 0.0)
+ ([6], 0.0)
+ ([2, 3], 5.0)
+ ([4, 5], 5.3851648071)
+ ([0, 2], 5.8309518948)
+ ([0, 1], 6.0827625303)
+ ([1, 3], 6.3245553203)
+ ([1, 2], 6.7082039325)
+ ([5, 6], 7.2801098893)
+ ([2, 4], 8.94427191)
+ ([0, 3], 9.4339811321)
+ ([4, 6], 9.4868329805)
+ ([3, 6], 11.0)
+
+Example from csv file
+^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the :doc:`Rips_complex <rips_complex_ref>` from the given
+points in an OFF file, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the Rips complex.
+
+
+.. testcode::
+
+ import gudhi
+ rips_complex = gudhi.RipsComplex(csv_file='full_square_distance_matrix.csv', max_edge_length=12.0)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtered_tree():
+ print(filtered_value)
+
+the program output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([2], 0.0)
+ ([3], 0.0)
+ ([4], 0.0)
+ ([5], 0.0)
+ ([6], 0.0)
+ ([2, 3], 5.0)
+ ([4, 5], 5.3851648071)
+ ([0, 2], 5.8309518948)
+ ([0, 1], 6.0827625303)
+ ([1, 3], 6.3245553203)
+ ([1, 2], 6.7082039325)
+ ([5, 6], 7.2801098893)
+ ([2, 4], 8.94427191)
+ ([0, 3], 9.4339811321)
+ ([4, 6], 9.4868329805)
+ ([3, 6], 11.0)
diff --git a/src/cython/doc/simplex_tree_ref.rst b/src/cython/doc/simplex_tree_ref.rst
new file mode 100644
index 00000000..6d196843
--- /dev/null
+++ b/src/cython/doc/simplex_tree_ref.rst
@@ -0,0 +1,10 @@
+=============================
+Simplex tree reference manual
+=============================
+
+.. autoclass:: gudhi.SimplexTree
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.SimplexTree.__init__
diff --git a/src/cython/doc/simplex_tree_sum.rst b/src/cython/doc/simplex_tree_sum.rst
new file mode 100644
index 00000000..3174fb62
--- /dev/null
+++ b/src/cython/doc/simplex_tree_sum.rst
@@ -0,0 +1,14 @@
+================================================================= =================================== ===================================
+:Author: Clément Maria :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+================================================================= =================================== ===================================
+
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| .. figure:: | The simplex tree is an efficient and flexible data structure for |
+| img/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. |
+| :alt: Simplex tree representation | |
+| :figclass: align-center | The data structure is described in |
+| | :cite:`boissonnatmariasimplextreealgorithmica` |
+| Simplex tree representation | |
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| :doc:`simplex_tree_user` | :doc:`simplex_tree_ref` |
++----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/src/cython/doc/simplex_tree_user.rst b/src/cython/doc/simplex_tree_user.rst
new file mode 100644
index 00000000..cbfd34a7
--- /dev/null
+++ b/src/cython/doc/simplex_tree_user.rst
@@ -0,0 +1,69 @@
+========================
+Simplex tree user manual
+========================
+Definition
+----------
+
+.. include:: simplex_tree_sum.rst
+
+A simplicial complex :math:`\mathbf{K}` on a set of vertices :math:`V = \{1, \cdots ,|V|\}` is a collection of
+simplices :math:`\{\sigma\}`, :math:`\sigma \subseteq V` such that
+:math:`\tau \subseteq \sigma \in \mathbf{K} \rightarrow \tau \in \mathbf{K}`. The dimension :math:`n=|\sigma|-1` of
+:math:`\sigma` is its number of elements minus `1`.
+
+A filtration of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying
+:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`. Ordering the simplices by increasing filtration
+values (breaking ties so as a simplex appears after its subsimplices of same filtration value) provides an indexing
+scheme.
+
+
+Implementation
+--------------
+
+There are two implementation of complexes. The first on is the Simplex_tree data structure.
+The simplex tree is an efficient and flexible data structure for representing general (filtered) simplicial complexes.
+The data structure is described in :cite`boissonnatmariasimplextreealgorithmica`.
+
+The second one is the Hasse_complex. The Hasse complex is a data structure representing explicitly all co-dimension 1
+incidence relations in a complex. It is consequently faster when accessing the boundary of a simplex, but is less
+compact and harder to construct from scratch.
+
+Example
+-------
+
+.. testcode::
+
+ import gudhi
+ st = gudhi.SimplexTree()
+ if st.insert_simplex([0, 1]):
+ print("[0, 1] inserted")
+ if st.insert_simplex_and_subfaces([0, 1, 2], filtration=4.0):
+ print("[0, 1, 2] inserted")
+ if st.find([0, 1]):
+ print("[0, 1] found")
+ result_str = 'num_vertices=' + repr(st.num_vertices())
+ print(result_str)
+ result_str = 'num_simplices=' + repr(st.num_simplices())
+ print(result_str)
+ print("skeleton_tree(2) =")
+ for sk_value in st.get_skeleton_tree(2):
+ print(sk_value)
+
+
+The output is:
+
+.. testoutput::
+
+ [0, 1] inserted
+ [0, 1, 2] inserted
+ [0, 1] found
+ num_vertices=3
+ num_simplices=7
+ skeleton_tree(2) =
+ ([0, 1, 2], 4.0)
+ ([0, 1], 0.0)
+ ([0, 2], 4.0)
+ ([0], 0.0)
+ ([1, 2], 4.0)
+ ([1], 4.0)
+ ([2], 4.0)
diff --git a/src/cython/doc/strong_witness_complex_ref.rst b/src/cython/doc/strong_witness_complex_ref.rst
new file mode 100644
index 00000000..4ed4fe46
--- /dev/null
+++ b/src/cython/doc/strong_witness_complex_ref.rst
@@ -0,0 +1,10 @@
+=======================================
+Strong witness complex reference manual
+=======================================
+
+.. autoclass:: gudhi.StrongWitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.StrongWitnessComplex.__init__
diff --git a/src/cython/doc/tangential_complex_ref.rst b/src/cython/doc/tangential_complex_ref.rst
new file mode 100644
index 00000000..35589475
--- /dev/null
+++ b/src/cython/doc/tangential_complex_ref.rst
@@ -0,0 +1,10 @@
+===================================
+Tangential complex reference manual
+===================================
+
+.. autoclass:: gudhi.TangentialComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.TangentialComplex.__init__
diff --git a/src/cython/doc/tangential_complex_sum.rst b/src/cython/doc/tangential_complex_sum.rst
new file mode 100644
index 00000000..2b05bc10
--- /dev/null
+++ b/src/cython/doc/tangential_complex_sum.rst
@@ -0,0 +1,15 @@
+================================================================= =================================== ===================================
+:Author: Clément Jamin :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+:Requires: CGAL :math:`\geq` 4.8.0 Eigen3
+================================================================= =================================== ===================================
+
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to |
+| img/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- |
+| :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from |
+| | an unknown manifold. The running time depends only linearly on the |
+| **Tangential complex representation** | extrinsic dimension :math:`d` and exponentially on the intrinsic |
+| | dimension :math:`k`. |
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| :doc:`tangential_complex_user` | :doc:`tangential_complex_ref` |
++----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/src/cython/doc/tangential_complex_user.rst b/src/cython/doc/tangential_complex_user.rst
new file mode 100644
index 00000000..6a7e6e41
--- /dev/null
+++ b/src/cython/doc/tangential_complex_user.rst
@@ -0,0 +1,195 @@
+==============================
+Tangential complex user manual
+==============================
+.. include:: tangential_complex_sum.rst
+
+Definition
+----------
+
+A Tangential Delaunay complex is a simplicial complex designed to reconstruct a
+:math:`k`-dimensional smooth manifold embedded in :math:`d`-dimensional
+Euclidean space. The input is a point sample coming from an unknown manifold,
+which means that the points lie close to a structure of "small" intrinsic
+dimension. The running time depends only linearly on the extrinsic dimension
+:math:`d` and exponentially on the intrinsic dimension :math:`k`.
+
+An extensive description of the Tangential complex can be found in
+:cite:`tangentialcomplex2014`.
+
+What is a Tangential Complex?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Let us start with the description of the Tangential complex of a simple
+example, with :math:`k = 1` and :math:`d = 2`. The input data is 4 points
+:math:`P` located on a curve embedded in 2D.
+
+.. figure:: img/tc_example_01.png
+ :alt: The input
+ :figclass: align-center
+
+ The input
+
+For each point :math:`p`, estimate its tangent subspace :math:`T_p` (e.g.
+using PCA).
+
+.. figure:: img/tc_example_02.png
+ :alt: The estimated normals
+ :figclass: align-center
+
+ The estimated normals
+
+
+Let us add the Voronoi diagram of the points in orange. For each point
+:math:`p`, construct its star in the Delaunay triangulation of :math:`P`
+restricted to :math:`T_p`.
+
+.. figure:: img/tc_example_03.png
+ :alt: The Voronoi diagram
+ :figclass: align-center
+
+ The Voronoi diagram
+
+The Tangential Delaunay complex is the union of those stars.
+
+In practice, neither the ambient Voronoi diagram nor the ambient Delaunay
+triangulation is computed. Instead, local :math:`k`-dimensional regular
+triangulations are computed with a limited number of points as we only need the
+star of each point. More details can be found in :cite:`tangentialcomplex2014`.
+
+Inconsistencies
+^^^^^^^^^^^^^^^
+Inconsistencies between the stars can occur. An inconsistency occurs when a
+simplex is not in the star of all its vertices.
+
+Let us take the same example.
+
+.. figure:: img/tc_example_07_before.png
+ :alt: Before
+ :figclass: align-center
+
+ Before
+
+Let us slightly move the tangent subspace :math:`T_q`
+
+.. figure:: img/tc_example_07_after.png
+ :alt: After
+ :figclass: align-center
+
+ After
+
+Now, the star of :math:`Q` contains :math:`QP`, but the star of :math:`P` does
+not contain :math:`QP`. We have an inconsistency.
+
+.. figure:: img/tc_example_08.png
+ :alt: After
+ :figclass: align-center
+
+ After
+
+One way to solve inconsistencies is to randomly perturb the positions of the
+points involved in an inconsistency. In the current implementation, this
+perturbation is done in the tangent subspace of each point. The maximum
+perturbation radius is given as a parameter to the constructor.
+
+In most cases, we recommend to provide a point set where the minimum distance
+between any two points is not too small. This can be achieved using the
+functions provided by the Subsampling module. Then, a good value to start with
+for the maximum perturbation radius would be around half the minimum distance
+between any two points. The Example with perturbation below shows an example of
+such a process.
+
+In most cases, this process is able to dramatically reduce the number of
+inconsistencies, but is not guaranteed to succeed.
+
+Output
+^^^^^^
+The result of the computation is exported as a Simplex_tree. It is the union of
+the stars of all the input points. A vertex in the Simplex Tree is the index of
+the point in the range provided by the user. The point corresponding to a
+vertex can also be obtained through the Tangential_complex::get_point function.
+Note that even if the positions of the points are perturbed, their original
+positions are kept (e.g. Tangential_complex::get_point returns the original
+position of the point).
+
+The result can be obtained after the computation of the Tangential complex
+itself and/or after the perturbation process.
+
+
+Simple example
+--------------
+
+This example builds the Tangential complex of point set read in an OFF file.
+
+.. testcode::
+
+ import gudhi
+ tc = gudhi.TangentialComplex(off_file='alphacomplexdoc.off')
+ result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \
+ ' simplices - ' + repr(tc.num_vertices()) + ' vertices.'
+ print(result_str)
+
+ st = tc.create_simplex_tree()
+ result_str = 'Simplex tree is of dimension ' + repr(st.dimension()) + \
+ ' - ' + repr(st.num_simplices()) + ' simplices - ' + \
+ repr(st.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in st.get_filtered_tree():
+ print(filtered_value)
+
+The output is:
+
+.. testoutput::
+
+ Tangential contains 12 simplices - 7 vertices.
+ Simplex tree is of dimension 1 - 15 simplices - 7 vertices.
+ ([0], 0.0)
+ ([1], 0.0)
+ ([0, 1], 0.0)
+ ([2], 0.0)
+ ([0, 2], 0.0)
+ ([1, 2], 0.0)
+ ([3], 0.0)
+ ([1, 3], 0.0)
+ ([4], 0.0)
+ ([2, 4], 0.0)
+ ([5], 0.0)
+ ([4, 5], 0.0)
+ ([6], 0.0)
+ ([3, 6], 0.0)
+ ([5, 6], 0.0)
+
+
+Example with perturbation
+-------------------------
+
+This example builds the Tangential complex of a point set, then tries to solve
+inconsistencies by perturbing the positions of points involved in inconsistent
+simplices.
+
+.. testcode::
+
+ import gudhi
+ tc = gudhi.TangentialComplex(points=[[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])
+ result_str = 'Tangential contains ' + repr(tc.num_vertices()) + ' vertices.'
+ print(result_str)
+
+ if tc.num_inconsistent_simplices() > 0:
+ print('Tangential contains inconsistencies.')
+
+ tc.fix_inconsistencies_using_perturbation(10, 60)
+ if tc.num_inconsistent_simplices() == 0:
+ print('Inconsistencies has been fixed.')
+
+The output is:
+
+.. testoutput::
+
+ Tangential contains 4 vertices.
+ Inconsistencies has been fixed.
+
+Bibliography
+************
+
+.. bibliography:: bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/cython/doc/todos.rst b/src/cython/doc/todos.rst
new file mode 100644
index 00000000..78972a4c
--- /dev/null
+++ b/src/cython/doc/todos.rst
@@ -0,0 +1,5 @@
+==========
+To be done
+==========
+
+.. todolist::
diff --git a/src/cython/doc/witness_complex_ref.rst b/src/cython/doc/witness_complex_ref.rst
new file mode 100644
index 00000000..c78760cb
--- /dev/null
+++ b/src/cython/doc/witness_complex_ref.rst
@@ -0,0 +1,10 @@
+================================
+Witness complex reference manual
+================================
+
+.. autoclass:: gudhi.WitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.WitnessComplex.__init__
diff --git a/src/cython/doc/witness_complex_sum.rst b/src/cython/doc/witness_complex_sum.rst
new file mode 100644
index 00000000..b65522ba
--- /dev/null
+++ b/src/cython/doc/witness_complex_sum.rst
@@ -0,0 +1,17 @@
+================================================================= =================================== ===================================
+:Author: Siargey Kachanovich :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+:Euclidean version requires: CGAL :math:`\geq` 4.6.0 Eigen3
+================================================================= =================================== ===================================
+
++-----------------------------------------------------------------+----------------------------------------------------------------------+
+| .. image:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on |
+| img/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. |
+| | |
+| | The data structure is described in |
+| | :cite:`boissonnatmariasimplextreealgorithmica`. |
++-----------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`witness_complex_user` | * :doc:`witness_complex_ref` |
+| | * :doc:`strong_witness_complex_ref` |
+| | * :doc:`euclidean_witness_complex_ref` |
+| | * :doc:`euclidean_strong_witness_complex_ref` |
++-----------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/src/cython/doc/witness_complex_user.rst b/src/cython/doc/witness_complex_user.rst
new file mode 100644
index 00000000..07945361
--- /dev/null
+++ b/src/cython/doc/witness_complex_user.rst
@@ -0,0 +1,135 @@
+===========================
+Witness complex user manual
+===========================
+Definition
+----------
+
+.. include:: witness_complex_sum.rst
+
+
+Definitions
+-----------
+
+Witness complex is a simplicial complex defined on two sets of points in :math:`\mathbb{R}^D`:
+
+- :math:`W` set of **witnesses** and
+- :math:`L` set of **landmarks**.
+
+Even though often the set of landmarks :math:`L` is a subset of the set of witnesses :math:`W`, it is not a requirement
+for the current implementation.
+
+Landmarks are the vertices of the simplicial complex and witnesses help to decide on which simplices are inserted via a
+predicate "is witnessed".
+
+De Silva and Carlsson in their paper :cite:`de2004topological` differentiate **weak witnessing** and
+**strong witnessing**:
+
+- *weak*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l) \leq d(w,l')`
+- *strong*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l) \leq d(w,l')`
+
+where :math:`d(.,.)` is a distance function.
+
+Both definitions can be relaxed by a real value :math:`\alpha`:
+
+- *weak*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2`
+- *strong*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2`
+
+which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and
+**strong relaxed witness complex** respectively.
+
+.. figure:: img/swit.svg
+ :alt: Strongly witnessed simplex
+ :figclass: align-center
+
+ Strongly witnessed simplex
+
+
+In particular case of 0-relaxation, weak complex corresponds to **witness complex** introduced in
+:cite:`de2004topological`, whereas 0-relaxed strong witness complex consists of just vertices and is not very
+interesting. Hence for small relaxation weak version is preferable.
+However, to capture the homotopy type (for example using Gudhi::persistent_cohomology::Persistent_cohomology) it is
+often necessary to work with higher filtration values. In this case strong relaxed witness complex is faster to compute
+and offers similar results.
+
+Implementation
+--------------
+
+The two complexes described above are implemented in the corresponding classes
+
+- :doc:`witness_complex_ref`
+- :doc:`strong_witness_complex_ref`
+- :doc:`euclidean_witness_complex_ref`
+- :doc:`euclidean_strong_witness_complex_ref`
+
+The construction of the Euclidean versions of complexes follow the same scheme:
+
+1. Construct a search tree on landmarks.
+2. Construct lists of nearest landmarks for each witness.
+3. Construct the witness complex for nearest landmark lists.
+
+In the non-Euclidean classes, the lists of nearest landmarks are supposed to be given as input.
+
+The constructors take on the steps 1 and 2, while the function 'create_complex' executes the step 3.
+
+Constructing weak relaxed witness complex from an off file
+----------------------------------------------------------
+
+Let's start with a simple example, which reads an off point file and computes a weak witness complex.
+
+.. code-block:: python
+
+ import gudhi
+ import argparse
+
+ parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/witness_complex_diagram_persistence_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2'
+ '- Constructs a alpha complex with the '
+ 'points from the given OFF file.')
+ parser.add_argument("-f", "--file", type=str, required=True)
+ parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
+ parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
+ parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+
+ args = parser.parse_args()
+
+ with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("EuclideanWitnessComplex creation from points read in a OFF file")
+
+ witnesses = gudhi.read_off(off_file=args.file)
+ landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks)
+
+ message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \
+ " - Number of landmarks=" + repr(args.number_of_landmarks)
+ print(message)
+
+ witness_complex = gudhi.EuclideanWitnessComplex(witnesses=witnesses, landmarks=landmarks)
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square,
+ limit_dimension=args.limit_dimension)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
+
+
+Example2: Computing persistence using strong relaxed witness complex
+--------------------------------------------------------------------
+
+Here is an example of constructing a strong witness complex filtration and computing persistence on it:
+
+* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+
+Bibliography
+************
+
+.. bibliography:: bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..adedc7d2
--- /dev/null
+++ b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='AlphaComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/alpha_complex_diagram_persistence_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off -a 0.6'
+ '- Constructs a alpha complex with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5)
+parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("AlphaComplex creation from points read in a OFF file")
+
+ message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square)
+ print(message)
+
+ alpha_complex = gudhi.AlphaComplex(off_file=args.file)
+ simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ gudhi.plot_persistence_diagram(diag)
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/alpha_complex_from_points_example.py b/src/cython/example/alpha_complex_from_points_example.py
new file mode 100755
index 00000000..688edb65
--- /dev/null
+++ b/src/cython/example/alpha_complex_from_points_example.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+from gudhi import AlphaComplex, SimplexTree
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+print("#####################################################################")
+print("AlphaComplex creation from points")
+alpha_complex = AlphaComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]])
+simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0)
+
+if simplex_tree.find([0, 1]):
+ print("[0, 1] Found !!")
+else:
+ print("[0, 1] Not found...")
+
+if simplex_tree.find([4]):
+ print("[4] Found !!")
+else:
+ print("[4] Not found...")
+
+if simplex_tree.insert([0, 1, 2], filtration=4.0):
+ print("[0, 1, 2] Inserted !!")
+else:
+ print("[0, 1, 2] Not inserted...")
+
+if simplex_tree.insert([0, 1, 4], filtration=4.0):
+ print("[0, 1, 4] Inserted !!")
+else:
+ print("[0, 1, 4] Not inserted...")
+
+if simplex_tree.find([4]):
+ print("[4] Found !!")
+else:
+ print("[4] Not found...")
+
+print("dimension=", simplex_tree.dimension())
+print("filtered_tree=", simplex_tree.get_filtered_tree())
+print("star([0])=", simplex_tree.get_star([0]))
+print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1))
+
+print("point[0]=", alpha_complex.get_point(0))
+print("point[5]=", alpha_complex.get_point(5))
diff --git a/src/cython/example/alpha_rips_persistence_bottleneck_distance.py b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py
new file mode 100755
index 00000000..ab5fc1e9
--- /dev/null
+++ b/src/cython/example/alpha_rips_persistence_bottleneck_distance.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+import math
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='AlphaComplex and RipsComplex '
+ 'persistence creation from points read in '
+ 'a OFF file. Bottleneck distance computation'
+ ' on each dimension',
+ epilog='Example: '
+ 'example/alpha_rips_persistence_bottleneck_distance.py '
+ '-f ../data/points/tore3D_1307.off -t 0.15 -d 3')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-t", "--threshold", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+
+args = parser.parse_args()
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("RipsComplex creation from points read in a OFF file")
+
+ message = "RipsComplex with max_edge_length=" + repr(args.threshold)
+ print(message)
+
+ rips_complex = gudhi.RipsComplex(off_file=args.file,
+ max_edge_length=args.threshold)
+
+ rips_stree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+ message = "Number of simplices=" + repr(rips_stree.num_simplices())
+ print(message)
+
+ rips_diag = rips_stree.persistence()
+
+ print("#####################################################################")
+ print("AlphaComplex creation from points read in a OFF file")
+
+ message = "AlphaComplex with max_edge_length=" + repr(args.threshold)
+ print(message)
+
+ alpha_complex = gudhi.AlphaComplex(off_file=args.file)
+ alpha_stree = alpha_complex.create_simplex_tree(max_alpha_square=(args.threshold * args.threshold))
+
+ message = "Number of simplices=" + repr(alpha_stree.num_simplices())
+ print(message)
+
+ alpha_diag = alpha_stree.persistence()
+
+ max_b_distance = 0.0
+ for dim in range(args.max_dimension):
+ # Alpha persistence values needs to be transform because filtration
+ # values are alpha square values
+ funcs = [math.sqrt, math.sqrt]
+ alpha_intervals = []
+ for interval in alpha_stree.persistence_intervals_in_dimension(dim):
+ alpha_intervals.append(map(lambda func,value: func(value), funcs, interval))
+
+ rips_intervals = rips_stree.persistence_intervals_in_dimension(dim)
+ bottleneck_distance = gudhi.bottleneck_distance(rips_intervals, alpha_intervals)
+ message = "In dimension " + repr(dim) + ", bottleneck distance = " + repr(bottleneck_distance)
+ print(message)
+ max_b_distance = max(bottleneck_distance, max_b_distance)
+
+ print("================================================================================")
+ message = "Bottleneck distance is " + repr(max_b_distance)
+ print(message)
+
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/bottleneck_basic_example.py b/src/cython/example/bottleneck_basic_example.py
new file mode 100755
index 00000000..31cecb29
--- /dev/null
+++ b/src/cython/example/bottleneck_basic_example.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Francois Godi, Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Francois Godi, Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+import gudhi
+
+diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]]
+
+diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]]
+
+message = "diag1=" + repr(diag1)
+print(message)
+
+message = "diag2=" + repr(diag2)
+print(message)
+
+message = "Bottleneck distance approximation=" + repr(gudhi.bottleneck_distance(diag1, diag2, 0.1))
+print(message)
+
+message = "Bottleneck distance exact value=" + repr(gudhi.bottleneck_distance(diag1, diag2))
+print(message)
+
diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..2474fc87
--- /dev/null
+++ b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='EuclideanStrongWitnessComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/witness_complex_diagram_persistence_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2'
+ '- Constructs a strong witness complex with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
+parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
+parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("EuclideanStrongWitnessComplex creation from points read in a OFF file")
+
+ witnesses = gudhi.read_off(off_file=args.file)
+ landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks)
+
+ message = "EuclideanStrongWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \
+ " - Number of landmarks=" + repr(args.number_of_landmarks)
+ print(message)
+
+ witness_complex = gudhi.EuclideanStrongWitnessComplex(witnesses=witnesses, landmarks=landmarks)
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square,
+ limit_dimension=args.limit_dimension)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ gudhi.plot_persistence_diagram(diag)
+
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..5a75417b
--- /dev/null
+++ b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/witness_complex_diagram_persistence_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2'
+ '- Constructs a weak witness complex with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
+parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
+parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("EuclideanWitnessComplex creation from points read in a OFF file")
+
+ witnesses = gudhi.read_off(off_file=args.file)
+ landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks)
+
+ message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \
+ " - Number of landmarks=" + repr(args.number_of_landmarks)
+ print(message)
+
+ witness_complex = gudhi.EuclideanWitnessComplex(witnesses=witnesses, landmarks=landmarks)
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square,
+ limit_dimension=args.limit_dimension)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ gudhi.plot_persistence_diagram(diag)
+
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py
new file mode 100755
index 00000000..bc3b16ec
--- /dev/null
+++ b/src/cython/example/gudhi_graphical_tools_example.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+print("#####################################################################")
+print("Show palette colors values for dimension")
+
+gudhi.show_palette_values()
+
+print("#####################################################################")
+print("Show barcode persistence example")
+
+persistence = [(2, (1.0, float('inf'))), (1, (1.4142135623730951, float('inf'))),
+ (1, (1.4142135623730951, float('inf'))), (0, (0.0, float('inf'))),
+ (0, (0.0, 1.0)), (0, (0.0, 1.0)), (0, (0.0, 1.0))]
+gudhi.plot_persistence_barcode(persistence)
+
+print("#####################################################################")
+print("Show diagram persistence example")
+
+gudhi.plot_persistence_diagram(persistence)
diff --git a/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
new file mode 100755
index 00000000..db530161
--- /dev/null
+++ b/src/cython/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+def is_file_perseus(file):
+ num_lines = open(file).read().count('\n')
+ try:
+ f = open(file)
+ num_dim = int(f.readline())
+ coeff = 1
+ for dim in range(0, num_dim):
+ try:
+ line = int(f.readline())
+ coeff *= abs(line)
+ except ValueError:
+ return False
+ if num_lines == (1 + num_dim + coeff):
+ return True
+ else:
+ return False
+ except ValueError:
+ return False
+
+parser = argparse.ArgumentParser(description='Periodic cubical complex from a '
+ 'perseus file style name.',
+ epilog='Example: '
+ './periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py'
+ ' -f ../data/bitmap/CubicalTwoSphere.txt')
+
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument('--no-barcode', default=False, action='store_true' , help='Flag for not to display the barcodes')
+
+args = parser.parse_args()
+
+if is_file_perseus(args.file):
+ print("#####################################################################")
+ print("PeriodicCubicalComplex creation")
+ periodic_cubical_complex = gudhi.PeriodicCubicalComplex(perseus_file=args.file)
+
+ print("persistence(homology_coeff_field=3, min_persistence=0)=")
+ diag = periodic_cubical_complex.persistence(homology_coeff_field=3, min_persistence=0)
+ print(diag)
+
+ print("betti_numbers()=")
+ print(periodic_cubical_complex.betti_numbers())
+ if args.no_barcode == False:
+ gudhi.plot_persistence_barcode(diag)
+else:
+ print(args.file, "is not a valid perseus style file")
diff --git a/src/cython/example/random_cubical_complex_persistence_example.py b/src/cython/example/random_cubical_complex_persistence_example.py
new file mode 100755
index 00000000..1c55f777
--- /dev/null
+++ b/src/cython/example/random_cubical_complex_persistence_example.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+import gudhi
+import numpy
+import argparse
+import operator
+
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Random cubical complex.',
+ epilog='Example: '
+ './random_cubical_complex_persistence_example.py'
+ ' 10 10 10 - Constructs a random cubical '
+ 'complex in a dimension [10, 10, 10] (aka. '
+ '1000 random top dimensional cells).')
+parser.add_argument('dimension', type=int, nargs="*",
+ help='Cubical complex dimensions')
+
+args = parser.parse_args()
+dimension_multiplication = reduce(operator.mul, args.dimension, 1)
+
+if dimension_multiplication > 1:
+ print("#####################################################################")
+ print("CubicalComplex creation")
+ cubical_complex = gudhi.CubicalComplex(dimensions=args.dimension,
+ top_dimensional_cells = numpy.random.rand(dimension_multiplication))
+
+ print("persistence(homology_coeff_field=2, min_persistence=0)=")
+ print(cubical_complex.persistence(homology_coeff_field=2, min_persistence=0))
+
+ print("betti_numbers()=")
+ print(cubical_complex.betti_numbers()) \ No newline at end of file
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
new file mode 100755
index 00000000..664eb5c4
--- /dev/null
+++ b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='RipsComplex creation from '
+ 'a distance matrix read in a OFF file.',
+ epilog='Example: '
+ 'example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py '
+ '-f ../data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3'
+ '- Constructs a Rips complex with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("RipsComplex creation from distance matrix read in a csv file")
+
+ message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
+ print(message)
+
+ rips_complex = gudhi.RipsComplex(off_file=args.file, max_edge_length=args.max_edge_length)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ gudhi.plot_persistence_diagram(diag)
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..4c21b98e
--- /dev/null
+++ b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='RipsComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/rips_complex_diagram_persistence_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off -a 0.6'
+ '- Constructs a Rips complex with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("RipsComplex creation from points read in a OFF file")
+
+ message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
+ print(message)
+
+ rips_complex = gudhi.RipsComplex(off_file=args.file, max_edge_length=args.max_edge_length)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ gudhi.plot_persistence_diagram(diag)
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/rips_complex_from_points_example.py b/src/cython/example/rips_complex_from_points_example.py
new file mode 100755
index 00000000..9b7fc79d
--- /dev/null
+++ b/src/cython/example/rips_complex_from_points_example.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+print("#####################################################################")
+print("RipsComplex creation from points")
+rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]],
+ max_edge_length=42)
+
+simplex_tree = rips.create_simplex_tree(max_dimension=1)
+
+print("filtered_tree=", simplex_tree.get_filtered_tree())
+print("star([0])=", simplex_tree.get_star([0]))
+print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1))
diff --git a/src/cython/example/rips_persistence_diagram.py b/src/cython/example/rips_persistence_diagram.py
new file mode 100755
index 00000000..4e5cd2c8
--- /dev/null
+++ b/src/cython/example/rips_persistence_diagram.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Marc Glisse
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Marc Glisse"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+print("#####################################################################")
+print("RipsComplex creation from points")
+rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]],
+ max_edge_length=42)
+
+simplex_tree = rips.create_simplex_tree(max_dimension=1)
+
+
+diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0)
+print("diag=", diag)
+
+gudhi.plot_persistence_diagram(diag)
diff --git a/src/cython/example/simplex_tree_example.py b/src/cython/example/simplex_tree_example.py
new file mode 100755
index 00000000..bf5f17a2
--- /dev/null
+++ b/src/cython/example/simplex_tree_example.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+print("#####################################################################")
+print("SimplexTree creation from insertion")
+
+st = gudhi.SimplexTree()
+
+if st.insert([0, 1]):
+ print("Inserted !!")
+else:
+ print("Not inserted...")
+
+if st.find([0, 1]):
+ print("Found !!")
+else:
+ print("Not found...")
+
+if st.insert([0, 1, 2], filtration=4.0):
+ print("Inserted !!")
+else:
+ print("Not inserted...")
+
+# FIXME: Remove this line
+st.set_dimension(3)
+print("dimension=", st.dimension())
+
+st.set_filtration(4.0)
+st.initialize_filtration()
+print("filtration=", st.get_filtration())
+print("filtration[1, 2]=", st.filtration([1, 2]))
+print("filtration[4, 2]=", st.filtration([4, 2]))
+
+print("num_simplices=", st.num_simplices())
+print("num_vertices=", st.num_vertices())
+
+print("skeleton_tree[2]=", st.get_skeleton_tree(2))
+print("skeleton_tree[1]=", st.get_skeleton_tree(1))
+print("skeleton_tree[0]=", st.get_skeleton_tree(0))
diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
new file mode 100755
index 00000000..4845eb47
--- /dev/null
+++ b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='TangentialComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/tangential_complex_plain_homology_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off'
+ '- Constructs a tangential complex with the '
+ 'points from the given OFF file')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("TangentialComplex creation from points read in a OFF file")
+
+ tc = gudhi.TangentialComplex(off_file=args.file)
+ st = tc.create_simplex_tree()
+
+ message = "Number of simplices=" + repr(st.num_simplices())
+ print(message)
+
+ diag = st.persistence(persistence_dim_max = True)
+
+ print("betti_numbers()=")
+ print(st.betti_numbers())
+
+ if args.no_diagram == False:
+ gudhi.plot_persistence_diagram(diag)
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/cython/example/witness_complex_from_nearest_landmark_table.py b/src/cython/example/witness_complex_from_nearest_landmark_table.py
new file mode 100755
index 00000000..92ed970b
--- /dev/null
+++ b/src/cython/example/witness_complex_from_nearest_landmark_table.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+from gudhi import StrongWitnessComplex, SimplexTree
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+print("#####################################################################")
+print("WitnessComplex creation from nearest landmark table")
+nearest_landmark_table = [[[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]],
+ [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]],
+ [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]],
+ [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]],
+ [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]]]
+
+witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table)
+simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1)
+
+message = "Number of simplices: " + repr(simplex_tree.num_simplices())
+print(message)
+
+diag = simplex_tree.persistence(min_persistence=-0.1, homology_coeff_field=11)
+print(diag)
diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in
new file mode 100644
index 00000000..34d7c3b5
--- /dev/null
+++ b/src/cython/gudhi.pyx.in
@@ -0,0 +1,39 @@
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+include "cython/off_reader.pyx"
+include "cython/simplex_tree.pyx"
+include "cython/rips_complex.pyx"
+include "cython/cubical_complex.pyx"
+include "cython/periodic_cubical_complex.pyx"
+include "cython/persistence_graphical_tools.py"
+include "cython/witness_complex.pyx"
+include "cython/strong_witness_complex.pyx"
+@GUDHI_CYTHON_ALPHA_COMPLEX@
+@GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX@
+@GUDHI_CYTHON_SUBSAMPLING@
+@GUDHI_CYTHON_TANGENTIAL_COMPLEX@
+@GUDHI_CYTHON_BOTTLENECK_DISTANCE@
diff --git a/src/cython/include/Alpha_complex_interface.h b/src/cython/include/Alpha_complex_interface.h
new file mode 100644
index 00000000..d47db71f
--- /dev/null
+++ b/src/cython/include/Alpha_complex_interface.h
@@ -0,0 +1,82 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_ALPHA_COMPLEX_INTERFACE_H_
+#define INCLUDE_ALPHA_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Alpha_complex.h>
+#include <CGAL/Epick_d.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace alpha_complex {
+
+class Alpha_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+
+ public:
+ Alpha_complex_interface(const std::vector<std::vector<double>>& points) {
+ alpha_complex_ = new Alpha_complex<Dynamic_kernel>(points);
+ }
+
+ Alpha_complex_interface(const std::string& off_file_name, bool from_file = true) {
+ alpha_complex_ = new Alpha_complex<Dynamic_kernel>(off_file_name);
+ }
+
+ ~Alpha_complex_interface() {
+ delete alpha_complex_;
+ }
+
+ std::vector<double> get_point(int vh) {
+ std::vector<double> vd;
+ try {
+ Point_d ph = alpha_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ } catch (std::out_of_range outofrange) {
+ // std::out_of_range is thrown in case not found. Other exceptions must be re-thrown
+ }
+ return vd;
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square) {
+ alpha_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Alpha_complex<Dynamic_kernel>* alpha_complex_;
+};
+
+} // namespace alpha_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_ALPHA_COMPLEX_INTERFACE_H_
diff --git a/src/cython/include/Bottleneck_distance_interface.h b/src/cython/include/Bottleneck_distance_interface.h
new file mode 100644
index 00000000..d5fbf6ea
--- /dev/null
+++ b/src/cython/include/Bottleneck_distance_interface.h
@@ -0,0 +1,53 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_
+#define INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_
+
+#include <gudhi/Bottleneck.h>
+
+#include <iostream>
+#include <vector>
+#include <utility> // for std::pair
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+ // bottleneck_distance function renamed for the python function can be called bottleneck_dstance
+ double bottleneck(const std::vector<std::pair<double, double>>& diag1,
+ const std::vector<std::pair<double, double>>& diag2,
+ double e) {
+ return bottleneck_distance(diag1, diag2, e);
+ }
+
+ double bottleneck(const std::vector<std::pair<double, double>>& diag1,
+ const std::vector<std::pair<double, double>>& diag2) {
+ return bottleneck_distance(diag1, diag2);
+ }
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+
+#endif // INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_
diff --git a/src/cython/include/Cubical_complex_interface.h b/src/cython/include/Cubical_complex_interface.h
new file mode 100644
index 00000000..7c0148f1
--- /dev/null
+++ b/src/cython/include/Cubical_complex_interface.h
@@ -0,0 +1,56 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_
+#define INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Bitmap_cubical_complex.h>
+#include <gudhi/Bitmap_cubical_complex_base.h>
+#include <gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace cubical_complex {
+
+template<typename CubicalComplexOptions = Bitmap_cubical_complex_base<double>>
+class Cubical_complex_interface : public Bitmap_cubical_complex<CubicalComplexOptions> {
+ public:
+ Cubical_complex_interface(const std::vector<unsigned>& dimensions,
+ const std::vector<double>& top_dimensional_cells)
+ : Bitmap_cubical_complex<CubicalComplexOptions>(dimensions, top_dimensional_cells) {
+ }
+
+ Cubical_complex_interface(const std::string& perseus_file)
+ : Bitmap_cubical_complex<CubicalComplexOptions>(perseus_file.c_str()) {
+ }
+};
+
+} // namespace cubical_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_
+
diff --git a/src/cython/include/Euclidean_strong_witness_complex_interface.h b/src/cython/include/Euclidean_strong_witness_complex_interface.h
new file mode 100644
index 00000000..b9dd8177
--- /dev/null
+++ b/src/cython/include/Euclidean_strong_witness_complex_interface.h
@@ -0,0 +1,93 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_strong_witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <CGAL/Epick_d.h>
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+
+class Euclidean_strong_witness_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+
+ typedef typename Simplex_tree<>::Simplex_key Simplex_key;
+
+ public:
+ Euclidean_strong_witness_complex_interface(const std::vector<std::vector<double>>& landmarks,
+ const std::vector<std::vector<double>>& witnesses) {
+ landmarks_.reserve(landmarks.size());
+ for (auto& landmark : landmarks)
+ landmarks_.emplace_back(landmark.begin(), landmark.end());
+ witness_complex_ = new Euclidean_strong_witness_complex<Dynamic_kernel>(landmarks_, witnesses);
+ }
+
+ ~Euclidean_strong_witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square,
+ std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ std::vector<double> get_point(unsigned vh) {
+ std::vector<double> vd;
+ if (vh < landmarks_.size()) {
+ Point_d ph = witness_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ }
+ return vd;
+ }
+
+ private:
+ std::vector<Point_d> landmarks_;
+ Euclidean_strong_witness_complex<Dynamic_kernel>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+
diff --git a/src/cython/include/Euclidean_witness_complex_interface.h b/src/cython/include/Euclidean_witness_complex_interface.h
new file mode 100644
index 00000000..2a09b3b5
--- /dev/null
+++ b/src/cython/include/Euclidean_witness_complex_interface.h
@@ -0,0 +1,92 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <CGAL/Epick_d.h>
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+
+class Euclidean_witness_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+
+ typedef typename Simplex_tree<>::Simplex_key Simplex_key;
+
+ public:
+ Euclidean_witness_complex_interface(const std::vector<std::vector<double>>& landmarks,
+ const std::vector<std::vector<double>>& witnesses) {
+ landmarks_.reserve(landmarks.size());
+ for (auto& landmark : landmarks)
+ landmarks_.emplace_back(landmark.begin(), landmark.end());
+ witness_complex_ = new Euclidean_witness_complex<Dynamic_kernel>(landmarks_, witnesses);
+ }
+
+ ~Euclidean_witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ std::vector<double> get_point(unsigned vh) {
+ std::vector<double> vd;
+ if (vh < landmarks_.size()) {
+ Point_d ph = witness_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ }
+ return vd;
+ }
+
+ private:
+ std::vector<Point_d> landmarks_;
+ Euclidean_witness_complex<Dynamic_kernel>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_
+
diff --git a/src/Bottleneck/example/random_diagrams.cpp b/src/cython/include/Off_reader_interface.h
index 71f152a6..0ca55500 100644
--- a/src/Bottleneck/example/random_diagrams.cpp
+++ b/src/cython/include/Off_reader_interface.h
@@ -2,9 +2,9 @@
* (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
- * Author(s): Francois Godi
+ * Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2016 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,21 +20,23 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include "gudhi/Graph_matching.h"
+#ifndef INCLUDE_OFF_READER_INTERFACE_H_
+#define INCLUDE_OFF_READER_INTERFACE_H_
+
+#include <gudhi/Points_off_io.h>
+
#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
-using namespace Gudhi::bottleneck;
-
-int main() {
- int n = 100;
- std::vector< std::pair<double, double> > v1, v2;
- for (int i = 0; i < n; i++) {
- int a = rand() % n;
- v1.emplace_back(a, a + rand() % (n - a));
- int b = rand() % n;
- v2.emplace_back(b, b + rand() % (n - b));
- }
- // v1 and v2 are persistence diagrams containing each 100 randoms points.
- double b = bottleneck_distance(v1, v2, 0);
- std::cout << b << std::endl;
+std::vector<std::vector<double>> read_points_from_OFF_file(const std::string& off_file) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ return off_reader.get_point_cloud();
}
+
+} // namespace Gudhi
+
+#endif // INCLUDE_OFF_READER_INTERFACE_H_
+
diff --git a/src/cython/include/Persistent_cohomology_interface.h b/src/cython/include/Persistent_cohomology_interface.h
new file mode 100644
index 00000000..55028fd0
--- /dev/null
+++ b/src/cython/include/Persistent_cohomology_interface.h
@@ -0,0 +1,95 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_
+#define INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_
+
+#include <gudhi/Persistent_cohomology.h>
+
+#include <vector>
+#include <utility> // for std::pair
+#include <algorithm> // for sort
+
+namespace Gudhi {
+
+template<class FilteredComplex>
+class Persistent_cohomology_interface : public
+persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp> {
+ private:
+ /*
+ * Compare two intervals by dimension, then by length.
+ */
+ struct cmp_intervals_by_dim_then_length {
+ explicit cmp_intervals_by_dim_then_length(FilteredComplex * sc)
+ : sc_(sc) { }
+
+ template<typename Persistent_interval>
+ bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) {
+ if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2)))
+ return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1))
+ > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2)));
+ else
+ return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2)));
+ }
+ FilteredComplex* sc_;
+ };
+
+ public:
+ Persistent_cohomology_interface(FilteredComplex* stptr)
+ : persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp>(*stptr),
+ stptr_(stptr) { }
+
+ Persistent_cohomology_interface(FilteredComplex* stptr, bool persistence_dim_max)
+ : persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>(*stptr, persistence_dim_max),
+ stptr_(stptr) { }
+
+ std::vector<std::pair<int, std::pair<double, double>>> get_persistence(int homology_coeff_field,
+ double min_persistence) {
+ persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::init_coefficients(homology_coeff_field);
+ persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::compute_persistent_cohomology(min_persistence);
+
+ // Custom sort and output persistence
+ cmp_intervals_by_dim_then_length cmp(stptr_);
+ auto persistent_pairs = persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::get_persistent_pairs();
+ std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp);
+
+ std::vector<std::pair<int, std::pair<double, double>>> persistence;
+ for (auto pair : persistent_pairs) {
+ persistence.push_back(std::make_pair(stptr_->dimension(get<0>(pair)),
+ std::make_pair(stptr_->filtration(get<0>(pair)),
+ stptr_->filtration(get<1>(pair)))));
+ }
+ return persistence;
+ }
+
+ private:
+ // A copy
+ FilteredComplex* stptr_;
+};
+
+} // namespace Gudhi
+
+#endif // INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_
diff --git a/src/cython/include/Rips_complex_interface.h b/src/cython/include/Rips_complex_interface.h
new file mode 100644
index 00000000..1879bd74
--- /dev/null
+++ b/src/cython/include/Rips_complex_interface.h
@@ -0,0 +1,86 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_RIPS_COMPLEX_INTERFACE_H_
+#define INCLUDE_RIPS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/reader_utils.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <utility> // std::pair
+#include <string>
+
+namespace Gudhi {
+
+namespace rips_complex {
+
+class Rips_complex_interface {
+ using Point_d = std::vector<double>;
+ using Distance_matrix = std::vector<std::vector<Simplex_tree_interface<>::Filtration_value>>;
+
+ public:
+ Rips_complex_interface(const std::vector<std::vector<double>>& values, double threshold, bool euclidean) {
+ if (euclidean) {
+ // Rips construction where values is a vector of points
+ rips_complex_ = new Rips_complex<Simplex_tree_interface<>::Filtration_value>(values, threshold,
+ Euclidean_distance());
+ } else {
+ // Rips construction where values is a distance matrix
+ rips_complex_ = new Rips_complex<Simplex_tree_interface<>::Filtration_value>(values, threshold);
+ }
+ }
+
+ Rips_complex_interface(const std::string& file_name, double threshold, bool euclidean, bool from_file = true) {
+ if (euclidean) {
+ // Rips construction where file_name is an OFF file
+ Gudhi::Points_off_reader<Point_d> off_reader(file_name);
+ rips_complex_ = new Rips_complex<Simplex_tree_interface<>::Filtration_value>(off_reader.get_point_cloud(),
+ threshold, Euclidean_distance());
+ } else {
+ // Rips construction where values is a distance matrix
+ Distance_matrix distances =
+ read_lower_triangular_matrix_from_csv_file<Simplex_tree_interface<>::Filtration_value>(file_name);
+ rips_complex_ = new Rips_complex<Simplex_tree_interface<>::Filtration_value>(distances, threshold);
+ }
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, int dim_max) {
+ rips_complex_->create_complex(*simplex_tree, dim_max);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Rips_complex<Simplex_tree_interface<>::Filtration_value>* rips_complex_;
+};
+
+} // namespace rips_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_RIPS_COMPLEX_INTERFACE_H_
diff --git a/src/cython/include/Simplex_tree_interface.h b/src/cython/include/Simplex_tree_interface.h
new file mode 100644
index 00000000..4266b3ef
--- /dev/null
+++ b/src/cython/include/Simplex_tree_interface.h
@@ -0,0 +1,149 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_SIMPLEX_TREE_INTERFACE_H_
+#define INCLUDE_SIMPLEX_TREE_INTERFACE_H_
+
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Points_off_io.h>
+
+#include "Persistent_cohomology_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <utility> // std::pair
+
+namespace Gudhi {
+
+template<typename SimplexTreeOptions = Simplex_tree_options_full_featured>
+class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
+ public:
+ using Base = Simplex_tree<SimplexTreeOptions>;
+ using Filtration_value = typename Base::Filtration_value;
+ using Vertex_handle = typename Base::Vertex_handle;
+ using Simplex_handle = typename Base::Simplex_handle;
+ using Insertion_result = typename std::pair<Simplex_handle, bool>;
+ using Simplex = std::vector<Vertex_handle>;
+ using Complex = std::vector<std::pair<Simplex, Filtration_value>>;
+
+ public:
+ bool find_simplex(const Simplex& vh) {
+ return (Base::find(vh) != Base::null_simplex());
+ }
+
+ bool insert_simplex(const Simplex& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex(simplex, filtration);
+ Base::initialize_filtration();
+ return (result.second);
+ }
+
+ bool insert_simplex_and_subfaces(const Simplex& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
+ Base::initialize_filtration();
+ return (result.second);
+ }
+
+ // Do not interface this function, only used in strong witness interface for complex creation
+ bool insert_simplex(const std::vector<std::size_t>& complex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex(complex, filtration);
+ Base::initialize_filtration();
+ return (result.second);
+ }
+
+ // Do not interface this function, only used in strong witness interface for complex creation
+ bool insert_simplex_and_subfaces(const std::vector<std::size_t>& complex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex_and_subfaces(complex, filtration);
+ Base::initialize_filtration();
+ return (result.second);
+ }
+
+ Filtration_value simplex_filtration(const Simplex& simplex) {
+ return Base::filtration(Base::find(simplex));
+ }
+
+ void remove_maximal_simplex(const Simplex& simplex) {
+ Base::remove_maximal_simplex(Base::find(simplex));
+ Base::initialize_filtration();
+ }
+
+ Complex get_filtered_tree() {
+ Complex filtered_tree;
+ for (auto f_simplex : Base::filtration_simplex_range()) {
+ Simplex simplex;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ simplex.insert(simplex.begin(), vertex);
+ }
+ filtered_tree.push_back(std::make_pair(simplex, Base::filtration(f_simplex)));
+ }
+ return filtered_tree;
+ }
+
+ Complex get_skeleton_tree(int dimension) {
+ Complex skeleton_tree;
+ for (auto f_simplex : Base::skeleton_simplex_range(dimension)) {
+ Simplex simplex;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ simplex.insert(simplex.begin(), vertex);
+ }
+ skeleton_tree.push_back(std::make_pair(simplex, Base::filtration(f_simplex)));
+ }
+ return skeleton_tree;
+ }
+
+ Complex get_star(const Simplex& simplex) {
+ Complex star;
+ for (auto f_simplex : Base::star_simplex_range(Base::find(simplex))) {
+ Simplex simplex_star;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ simplex_star.insert(simplex_star.begin(), vertex);
+ }
+ std::cout << std::endl;
+ star.push_back(std::make_pair(simplex_star, Base::filtration(f_simplex)));
+ }
+ return star;
+ }
+
+ Complex get_cofaces(const Simplex& simplex, int dimension) {
+ Complex cofaces;
+ for (auto f_simplex : Base::cofaces_simplex_range(Base::find(simplex), dimension)) {
+ Simplex simplex_coface;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ simplex_coface.insert(simplex_coface.begin(), vertex);
+ }
+ std::cout << std::endl;
+ cofaces.push_back(std::make_pair(simplex_coface, Base::filtration(f_simplex)));
+ }
+ return cofaces;
+ }
+
+ void create_persistence(Gudhi::Persistent_cohomology_interface<Base>* pcoh) {
+ pcoh = new Gudhi::Persistent_cohomology_interface<Base>(*this);
+ }
+};
+
+} // namespace Gudhi
+
+#endif // INCLUDE_SIMPLEX_TREE_INTERFACE_H_
diff --git a/src/cython/include/Strong_witness_complex_interface.h b/src/cython/include/Strong_witness_complex_interface.h
new file mode 100644
index 00000000..d05eaac5
--- /dev/null
+++ b/src/cython/include/Strong_witness_complex_interface.h
@@ -0,0 +1,73 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Strong_witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+class Strong_witness_complex_interface {
+ using Nearest_landmark_range = std::vector<std::pair<std::size_t, double>>;
+ using Nearest_landmark_table = std::vector<Nearest_landmark_range>;
+
+ public:
+ Strong_witness_complex_interface(const Nearest_landmark_table& nlt) {
+ witness_complex_ = new Strong_witness_complex<Nearest_landmark_table>(nlt);
+ }
+
+ ~Strong_witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
+ std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree,
+ double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Strong_witness_complex<Nearest_landmark_table>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_
diff --git a/src/cython/include/Subsampling_interface.h b/src/cython/include/Subsampling_interface.h
new file mode 100644
index 00000000..1c6032c0
--- /dev/null
+++ b/src/cython/include/Subsampling_interface.h
@@ -0,0 +1,119 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_SUBSAMPLING_INTERFACE_H_
+#define INCLUDE_SUBSAMPLING_INTERFACE_H_
+
+#include <gudhi/choose_n_farthest_points.h>
+#include <gudhi/pick_n_random_points.h>
+#include <gudhi/sparsify_point_set.h>
+#include <gudhi/Points_off_io.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace subsampling {
+
+using Subsampling_dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+using Subsampling_point_d = Subsampling_dynamic_kernel::Point_d;
+using Subsampling_ft = Subsampling_dynamic_kernel::FT;
+
+// ------ choose_n_farthest_points ------
+std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector<std::vector<double>>& points,
+ unsigned nb_points) {
+ std::vector<std::vector<double>> landmarks;
+ Subsampling_dynamic_kernel k;
+ choose_n_farthest_points(k, points, nb_points, std::back_inserter(landmarks));
+
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector<std::vector<double>>& points,
+ unsigned nb_points, unsigned starting_point) {
+ std::vector<std::vector<double>> landmarks;
+ Subsampling_dynamic_kernel k;
+ choose_n_farthest_points(k, points, nb_points, starting_point, std::back_inserter(landmarks));
+
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points_from_file(const std::string& off_file,
+ unsigned nb_points) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_farthest_points(points, nb_points);
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points_from_file(const std::string& off_file,
+ unsigned nb_points, unsigned starting_point) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_farthest_points(points, nb_points, starting_point);
+}
+
+// ------ pick_n_random_points ------
+std::vector<std::vector<double>> subsampling_n_random_points(const std::vector<std::vector<double>>& points,
+ unsigned nb_points) {
+ std::vector<std::vector<double>> landmarks;
+ pick_n_random_points(points, nb_points, std::back_inserter(landmarks));
+
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_random_points_from_file(const std::string& off_file,
+ unsigned nb_points) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_random_points(points, nb_points);
+}
+
+// ------ sparsify_point_set ------
+std::vector<std::vector<double>> subsampling_sparsify_points(const std::vector<std::vector<double>>& points,
+ double min_squared_dist) {
+ std::vector<Subsampling_point_d> input, output;
+ for (auto point : points)
+ input.push_back(Subsampling_point_d(point.size(), point.begin(), point.end()));
+ Subsampling_dynamic_kernel k;
+ sparsify_point_set(k, input, min_squared_dist, std::back_inserter(output));
+
+ std::vector<std::vector<double>> landmarks;
+ for (auto point : output)
+ landmarks.push_back(std::vector<double>(point.cartesian_begin(), point.cartesian_end()));
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_sparsify_points_from_file(const std::string& off_file,
+ double min_squared_dist) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_sparsify_points(points, min_squared_dist);
+}
+
+} // namespace subsampling
+
+} // namespace Gudhi
+
+#endif // INCLUDE_SUBSAMPLING_INTERFACE_H_
diff --git a/src/cython/include/Tangential_complex_interface.h b/src/cython/include/Tangential_complex_interface.h
new file mode 100644
index 00000000..5e9dc0e4
--- /dev/null
+++ b/src/cython/include/Tangential_complex_interface.h
@@ -0,0 +1,123 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_
+#define INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/Points_off_io.h>
+#include <CGAL/Epick_d.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <string>
+
+namespace Gudhi {
+
+namespace tangential_complex {
+
+class Tangential_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+ using TC = Tangential_complex<Dynamic_kernel, CGAL::Dynamic_dimension_tag, CGAL::Parallel_tag>;
+
+ public:
+ Tangential_complex_interface(const std::vector<std::vector<double>>& points) {
+ Dynamic_kernel k;
+ unsigned intrisic_dim = 0;
+ if (points.size() > 0)
+ intrisic_dim = points[0].size() - 1;
+
+ tangential_complex_ = new TC(points, intrisic_dim, k);
+ tangential_complex_->compute_tangential_complex();
+ num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices();
+ }
+
+ Tangential_complex_interface(const std::string& off_file_name, bool from_file = true) {
+ Gudhi::Points_off_reader<Point_d> off_reader(off_file_name);
+ Dynamic_kernel k;
+ unsigned intrisic_dim = 0;
+ std::vector<Point_d> points = off_reader.get_point_cloud();
+ if (points.size() > 0)
+ intrisic_dim = points[0].size() - 1;
+
+ tangential_complex_ = new TC(points, intrisic_dim, k);
+ tangential_complex_->compute_tangential_complex();
+ num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices();
+ }
+
+ ~Tangential_complex_interface() {
+ delete tangential_complex_;
+ }
+
+ std::vector<double> get_point(unsigned vh) {
+ std::vector<double> vd;
+ if (vh < tangential_complex_->number_of_vertices()) {
+ Point_d ph = tangential_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ }
+ return vd;
+ }
+
+ unsigned number_of_vertices() {
+ return tangential_complex_->number_of_vertices();
+ }
+
+ unsigned number_of_simplices() {
+ return num_inconsistencies_.num_simplices;
+ }
+
+ unsigned number_of_inconsistent_simplices() {
+ return num_inconsistencies_.num_inconsistent_simplices;
+ }
+
+ unsigned number_of_inconsistent_stars() {
+ return num_inconsistencies_.num_inconsistent_stars;
+ }
+
+ void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) {
+ tangential_complex_->fix_inconsistencies_using_perturbation(max_perturb, time_limit);
+ num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices();
+ }
+
+ void create_simplex_tree(Simplex_tree<>* simplex_tree) {
+ int max_dim = tangential_complex_->create_complex<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>(*simplex_tree);
+ // FIXME
+ simplex_tree->set_dimension(max_dim);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ TC* tangential_complex_;
+ TC::Num_inconsistencies num_inconsistencies_;
+};
+
+} // namespace tangential_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_
diff --git a/src/cython/include/Witness_complex_interface.h b/src/cython/include/Witness_complex_interface.h
new file mode 100644
index 00000000..6501cc35
--- /dev/null
+++ b/src/cython/include/Witness_complex_interface.h
@@ -0,0 +1,74 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+class Witness_complex_interface {
+ using Nearest_landmark_range = std::vector<std::pair<std::size_t, double>>;
+ using Nearest_landmark_table = std::vector<Nearest_landmark_range>;
+
+ public:
+ Witness_complex_interface(const Nearest_landmark_table& nlt) {
+ witness_complex_ = new Witness_complex<Nearest_landmark_table>(nlt);
+ }
+
+ ~Witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
+ std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree,
+ double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Witness_complex<Nearest_landmark_table>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_WITNESS_COMPLEX_INTERFACE_H_
+
diff --git a/src/cython/test/test_alpha_complex.py b/src/cython/test/test_alpha_complex.py
new file mode 100755
index 00000000..2625d529
--- /dev/null
+++ b/src/cython/test/test_alpha_complex.py
@@ -0,0 +1,86 @@
+from gudhi import AlphaComplex, SimplexTree
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_empty_alpha():
+ alpha_complex = AlphaComplex(points=[[0,0]])
+ assert alpha_complex.__is_defined() == True
+
+def test_infinite_alpha():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ alpha_complex = AlphaComplex(points=point_list)
+ assert alpha_complex.__is_defined() == True
+
+ simplex_tree = alpha_complex.create_simplex_tree()
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 11
+ assert simplex_tree.num_vertices() == 4
+
+ assert simplex_tree.get_filtered_tree() == \
+ [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0),
+ ([0, 1], 0.25), ([0, 2], 0.25), ([1, 3], 0.25),
+ ([2, 3], 0.25), ([1, 2], 0.5), ([0, 1, 2], 0.5),
+ ([1, 2, 3], 0.5)]
+ assert simplex_tree.get_star([0]) == \
+ [([0], 0.0), ([0, 1], 0.25), ([0, 1, 2], 0.5),
+ ([0, 2], 0.25)]
+ assert simplex_tree.get_cofaces([0], 1) == \
+ [([0, 1], 0.25), ([0, 2], 0.25)]
+
+ assert point_list[0] == alpha_complex.get_point(0)
+ assert point_list[1] == alpha_complex.get_point(1)
+ assert point_list[2] == alpha_complex.get_point(2)
+ assert point_list[3] == alpha_complex.get_point(3)
+ assert alpha_complex.get_point(4) == []
+ assert alpha_complex.get_point(125) == []
+
+def test_filtered_alpha():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ filtered_alpha = AlphaComplex(points=point_list)
+
+ simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25)
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
+
+ assert point_list[0] == filtered_alpha.get_point(0)
+ assert point_list[1] == filtered_alpha.get_point(1)
+ assert point_list[2] == filtered_alpha.get_point(2)
+ assert point_list[3] == filtered_alpha.get_point(3)
+ assert filtered_alpha.get_point(4) == []
+ assert filtered_alpha.get_point(125) == []
+
+ assert simplex_tree.get_filtered_tree() == \
+ [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0),
+ ([0, 1], 0.25), ([0, 2], 0.25), ([1, 3], 0.25),
+ ([2, 3], 0.25)]
+ assert simplex_tree.get_star([0]) == \
+ [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)]
+ assert simplex_tree.get_cofaces([0], 1) == \
+ [([0, 1], 0.25), ([0, 2], 0.25)]
diff --git a/src/cython/test/test_bottleneck_distance.py b/src/cython/test/test_bottleneck_distance.py
new file mode 100755
index 00000000..3d982d34
--- /dev/null
+++ b/src/cython/test/test_bottleneck_distance.py
@@ -0,0 +1,35 @@
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_basic_bottleneck():
+ diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]]
+ diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]]
+
+ assert(gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569)
+ assert(gudhi.bottleneck_distance(diag1, diag2) == 0.75)
diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py
new file mode 100755
index 00000000..c8df8089
--- /dev/null
+++ b/src/cython/test/test_cubical_complex.py
@@ -0,0 +1,86 @@
+from gudhi import CubicalComplex
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_empty_constructor():
+ # Try to create an empty CubicalComplex
+ cub = CubicalComplex()
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+def test_non_existing_perseus_file_constructor():
+ # Try to open a non existing file
+ cub = CubicalComplex(perseus_file='pouetpouettralala.toubiloubabdou')
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+def test_dimension_or_perseus_file_constructor():
+ # Create test file
+ test_file = open('CubicalOneSphere.txt', 'w')
+ test_file.write('2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n')
+ test_file.close()
+ # CubicalComplex can be constructed from dimensions and
+ # top_dimensional_cells OR from a perseus file style name.
+ cub = CubicalComplex(dimensions=[3, 3],
+ top_dimensional_cells = [1,2,3,4,5,6,7,8,9],
+ perseus_file='CubicalOneSphere.txt')
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+ cub = CubicalComplex(top_dimensional_cells = [1,2,3,4,5,6,7,8,9],
+ perseus_file='CubicalOneSphere.txt')
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+ cub = CubicalComplex(dimensions=[3, 3],
+ perseus_file='CubicalOneSphere.txt')
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+def test_dimension_constructor():
+ cub = CubicalComplex(dimensions=[3, 3],
+ top_dimensional_cells = [1,2,3,4,5,6,7,8,9])
+ assert cub.__is_defined() == True
+ assert cub.__is_persistence_defined() == False
+ assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, 1.8446744073709552e+19))]
+ assert cub.__is_persistence_defined() == True
+ assert cub.betti_numbers() == [1, 0]
+ assert cub.persistent_betti_numbers(0, 1000) == [0, 0]
+
+def test_dimension_constructor():
+ # Create test file
+ test_file = open('CubicalOneSphere.txt', 'w')
+ test_file.write('2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n')
+ test_file.close()
+ cub = CubicalComplex(perseus_file='CubicalOneSphere.txt')
+ assert cub.__is_defined() == True
+ assert cub.__is_persistence_defined() == False
+ assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, 1.8446744073709552e+19))]
+ assert cub.__is_persistence_defined() == True
+ assert cub.betti_numbers() == [1, 0, 0]
+ assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0]
diff --git a/src/cython/test/test_euclidean_witness_complex.py b/src/cython/test/test_euclidean_witness_complex.py
new file mode 100755
index 00000000..0947cc09
--- /dev/null
+++ b/src/cython/test/test_euclidean_witness_complex.py
@@ -0,0 +1,71 @@
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_empty_euclidean_witness_complex():
+ euclidean_witness = gudhi.EuclideanWitnessComplex()
+ assert euclidean_witness.__is_defined() == False
+
+def test_witness_complex():
+ point_cloud = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0], [9.0, 6.0],
+ [0.0, 14.0], [2.0, 19.0], [9.0, 17.0]]
+ landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]]
+ euclidean_witness_complex = gudhi.EuclideanWitnessComplex(landmarks=landmarks, witnesses = point_cloud)
+ simplex_tree = euclidean_witness_complex.create_simplex_tree(max_alpha_square=4.1)
+
+ assert landmarks[0] == euclidean_witness_complex.get_point(0)
+ assert landmarks[1] == euclidean_witness_complex.get_point(1)
+ assert landmarks[2] == euclidean_witness_complex.get_point(2)
+
+ assert simplex_tree.get_filtered_tree() == [([0], 0.0), ([1], 0.0),
+ ([0, 1], 0.0), ([2], 0.0), ([0, 2], 0.0), ([1, 2], 0.0),
+ ([0, 1, 2], 0.0)]
+
+def test_empty_euclidean_strong_witness_complex():
+ euclidean_strong_witness = gudhi.EuclideanStrongWitnessComplex()
+ assert euclidean_strong_witness.__is_defined() == False
+
+def test_strong_witness_complex():
+ point_cloud = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0], [9.0, 6.0],
+ [0.0, 14.0], [2.0, 19.0], [9.0, 17.0]]
+ landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]]
+ euclidean_strong_witness_complex = gudhi.EuclideanStrongWitnessComplex(landmarks=landmarks, witnesses = point_cloud)
+ simplex_tree = euclidean_strong_witness_complex.create_simplex_tree(max_alpha_square=14.9)
+
+ assert landmarks[0] == euclidean_strong_witness_complex.get_point(0)
+ assert landmarks[1] == euclidean_strong_witness_complex.get_point(1)
+ assert landmarks[2] == euclidean_strong_witness_complex.get_point(2)
+
+ assert simplex_tree.get_filtered_tree() == [([0], 0.0), ([1], 0.0), ([2], 0.0)]
+
+ simplex_tree = euclidean_strong_witness_complex.create_simplex_tree(max_alpha_square=100.0)
+
+ assert simplex_tree.get_filtered_tree() == [([0], 0.0), ([1], 0.0),
+ ([2], 0.0), ([1, 2], 15.0), ([0, 2], 34.0), ([0, 1], 37.0),
+ ([0, 1, 2], 37.0)]
+
diff --git a/src/cython/test/test_rips_complex.py b/src/cython/test/test_rips_complex.py
new file mode 100755
index 00000000..286a645b
--- /dev/null
+++ b/src/cython/test/test_rips_complex.py
@@ -0,0 +1,111 @@
+from gudhi import RipsComplex
+from math import sqrt
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_empty_rips():
+ rips_complex = RipsComplex()
+ assert rips_complex.__is_defined() == True
+
+def test_rips_from_points():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ rips_complex = RipsComplex(points=point_list, max_edge_length=42)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 10
+ assert simplex_tree.num_vertices() == 4
+
+ assert simplex_tree.get_filtered_tree() == \
+ [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0),
+ ([0, 1], 1.0), ([0, 2], 1.0), ([1, 3], 1.0),
+ ([2, 3], 1.0), ([1, 2], 1.4142135623730951),
+ ([0, 3], 1.4142135623730951)]
+ assert simplex_tree.get_star([0]) == \
+ [([0], 0.0), ([0, 1], 1.0), ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951)]
+ assert simplex_tree.get_cofaces([0], 1) == \
+ [([0, 1], 1.0), ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951)]
+
+def test_filtered_rips_from_points():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0)
+
+ simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
+
+def test_rips_from_distance_matrix():
+ distance_matrix = [[0],
+ [1, 0],
+ [1, sqrt(2), 0],
+ [sqrt(2), 1, 1, 0]]
+ rips_complex = RipsComplex(distance_matrix=distance_matrix, max_edge_length=42)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 10
+ assert simplex_tree.num_vertices() == 4
+
+ assert simplex_tree.get_filtered_tree() == \
+ [([0], 0.0), ([1], 0.0), ([2], 0.0), ([3], 0.0),
+ ([0, 1], 1.0), ([0, 2], 1.0), ([1, 3], 1.0),
+ ([2, 3], 1.0), ([1, 2], 1.4142135623730951),
+ ([0, 3], 1.4142135623730951)]
+ assert simplex_tree.get_star([0]) == \
+ [([0], 0.0), ([0, 1], 1.0), ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951)]
+ assert simplex_tree.get_cofaces([0], 1) == \
+ [([0, 1], 1.0), ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951)]
+
+def test_filtered_rips_from_distance_matrix():
+ distance_matrix = [[0],
+ [1, 0],
+ [1, sqrt(2), 0],
+ [sqrt(2), 1, 1, 0]]
+ filtered_rips = RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0)
+
+ simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py
new file mode 100755
index 00000000..af5b639a
--- /dev/null
+++ b/src/cython/test/test_simplex_tree.py
@@ -0,0 +1,133 @@
+from gudhi import SimplexTree
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_insertion():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # insert test
+ assert st.insert_simplex([0, 1]) == True
+ assert st.insert_simplex_and_subfaces([0, 1, 2], filtration=4.0) == True
+ # FIXME: Remove this line
+ st.set_dimension(2)
+ assert st.num_simplices() == 7
+ assert st.num_vertices() == 3
+
+ # find test
+ assert st.find([0, 1, 2]) == True
+ assert st.find([0, 1]) == True
+ assert st.find([0, 2]) == True
+ assert st.find([0]) == True
+ assert st.find([1]) == True
+ assert st.find([2]) == True
+ assert st.find([3]) == False
+ assert st.find([0, 3]) == False
+ assert st.find([1, 3]) == False
+ assert st.find([2, 3]) == False
+
+ # filtration test
+ st.set_filtration(5.0)
+ st.initialize_filtration()
+ assert st.get_filtration() == 5.0
+ assert st.filtration([0, 1, 2]) == 4.0
+ assert st.filtration([0, 2]) == 4.0
+ assert st.filtration([1, 2]) == 4.0
+ assert st.filtration([2]) == 4.0
+ assert st.filtration([0, 1]) == 0.0
+ assert st.filtration([0]) == 0.0
+ assert st.filtration([1]) == 4.0
+
+ # skeleton_tree test
+ assert st.get_skeleton_tree(2) == \
+ [([0, 1, 2], 4.0), ([0, 1], 0.0), ([0, 2], 4.0),
+ ([0], 0.0), ([1, 2], 4.0), ([1], 4.0), ([2], 4.0)]
+ assert st.get_skeleton_tree(1) == \
+ [([0, 1], 0.0), ([0, 2], 4.0), ([0], 0.0),
+ ([1, 2], 4.0), ([1], 4.0), ([2], 4.0)]
+ assert st.get_skeleton_tree(0) == \
+ [([0], 0.0), ([1], 4.0), ([2], 4.0)]
+
+ # remove_maximal_simplex test
+ assert st.get_cofaces([0, 1, 2], 1) == []
+ st.remove_maximal_simplex([0, 1, 2])
+ assert st.get_skeleton_tree(2) == \
+ [([0, 1], 0.0), ([0, 2], 4.0), ([0], 0.0),
+ ([1, 2], 4.0), ([1], 4.0), ([2], 4.0)]
+ assert st.find([0, 1, 2]) == False
+ assert st.find([0, 1]) == True
+ assert st.find([0, 2]) == True
+ assert st.find([0]) == True
+ assert st.find([1]) == True
+ assert st.find([2]) == True
+
+ st.initialize_filtration()
+ assert st.persistence() == [(1, (4.0, float('inf'))), (0, (0.0, float('inf')))]
+ assert st.__is_persistence_defined() == True
+ assert st.betti_numbers() == [1, 1]
+ assert st.persistent_betti_numbers(-0.1, 10000.0) == [0, 0]
+ assert st.persistent_betti_numbers(0.0, 10000.0) == [1, 0]
+ assert st.persistent_betti_numbers(3.9, 10000.0) == [1, 0]
+ assert st.persistent_betti_numbers(4.0, 10000.0) == [1, 1]
+ assert st.persistent_betti_numbers(9999.0, 10000.0) == [1, 1]
+
+def test_expansion():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # insert test
+ assert st.insert_simplex_and_subfaces([3, 2], 0.1) == True
+ assert st.insert_simplex_and_subfaces([2, 0], 0.2) == True
+ assert st.insert_simplex_and_subfaces([1, 0], 0.3) == True
+ assert st.insert_simplex_and_subfaces([3, 1], 0.4) == True
+ assert st.insert_simplex_and_subfaces([2, 1], 0.5) == True
+ assert st.insert_simplex_and_subfaces([6, 5], 0.6) == True
+ assert st.insert_simplex_and_subfaces([4, 2], 0.7) == True
+ assert st.insert_simplex_and_subfaces([3, 0], 0.8) == True
+ assert st.insert_simplex_and_subfaces([6, 4], 0.9) == True
+ assert st.insert_simplex_and_subfaces([6, 3], 1.0) == True
+
+ assert st.num_vertices() == 7
+ assert st.num_simplices() == 17
+ assert st.get_filtered_tree() == [([2], 0.1), ([3], 0.1), ([2, 3], 0.1),
+ ([0], 0.2), ([0, 2], 0.2), ([1], 0.3), ([0, 1], 0.3), ([1, 3], 0.4),
+ ([1, 2], 0.5), ([5], 0.6), ([6], 0.6), ([5, 6], 0.6), ([4], 0.7),
+ ([2, 4], 0.7), ([0, 3], 0.8), ([4, 6], 0.9), ([3, 6], 1.0)]
+
+ st.expansion(3)
+ assert st.num_vertices() == 7
+ assert st.num_simplices() == 22
+ st.initialize_filtration()
+
+ assert st.get_filtered_tree() == [([2], 0.1), ([3], 0.1), ([2, 3], 0.1),
+ ([0], 0.2), ([0, 2], 0.2), ([1], 0.3), ([0, 1], 0.3), ([1, 3], 0.4),
+ ([1, 2], 0.5), ([0, 1, 2], 0.5), ([1, 2, 3], 0.5), ([5], 0.6), ([6], 0.6),
+ ([5, 6], 0.6), ([4], 0.7), ([2, 4], 0.7), ([0, 3], 0.8), ([0, 1, 3], 0.8),
+ ([0, 2, 3], 0.8), ([0, 1, 2, 3], 0.8), ([4, 6], 0.9), ([3, 6], 1.0)]
diff --git a/src/cython/test/test_subsampling.py b/src/cython/test/test_subsampling.py
new file mode 100755
index 00000000..2caf4ddb
--- /dev/null
+++ b/src/cython/test/test_subsampling.py
@@ -0,0 +1,133 @@
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_write_off_file_for_tests():
+ file = open("subsample.off", "w")
+ file.write("nOFF\n")
+ file.write("2 7 0 0\n")
+ file.write("1.0 1.0\n")
+ file.write("7.0 0.0\n")
+ file.write("4.0 6.0\n")
+ file.write("9.0 6.0\n")
+ file.write("0.0 14.0\n")
+ file.write("2.0 19.0\n")
+ file.write("9.0 17.0\n")
+ file.close()
+
+def test_simple_choose_n_farthest_points_with_a_starting_point():
+ point_set = [[0,1], [0,0], [1,0], [1,1]]
+ i = 0
+ for point in point_set:
+ # The iteration starts with the given starting point
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 1, starting_point = i)
+ assert sub_set[0] == point_set[i]
+ i = i + 1
+
+ # The iteration finds then the farthest
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 1)
+ assert sub_set[1] == point_set[3]
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 3)
+ assert sub_set[1] == point_set[1]
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 0)
+ assert sub_set[1] == point_set[2]
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 2)
+ assert sub_set[1] == point_set[0]
+
+ # Test the limits
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 0, starting_point = 0) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 1, starting_point = 0) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 0, starting_point = 1) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 1, starting_point = 1) == []
+
+ # From off file test
+ for i in range (0, 7):
+ assert len(gudhi.choose_n_farthest_points(off_file = 'subsample.off', nb_points = i, starting_point = i)) == i
+
+def test_simple_choose_n_farthest_points_randomed():
+ point_set = [[0,1], [0,0], [1,0], [1,1]]
+ # Test the limits
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 0) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 1) == []
+ assert gudhi.choose_n_farthest_points(points = point_set, nb_points = 0) == []
+
+ # Go furter than point set on purpose
+ for iter in range(1,10):
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = iter)
+ for sub in sub_set:
+ found = False
+ for point in point_set:
+ if point == sub:
+ found = True
+ # Check each sub set point is existing in the point set
+ assert found == True
+
+ # From off file test
+ for i in range (0, 7):
+ assert len(gudhi.choose_n_farthest_points(off_file = 'subsample.off', nb_points = i)) == i
+
+def test_simple_pick_n_random_points():
+ point_set = [[0,1], [0,0], [1,0], [1,1]]
+ # Test the limits
+ assert gudhi.pick_n_random_points(points = [], nb_points = 0) == []
+ assert gudhi.pick_n_random_points(points = [], nb_points = 1) == []
+ assert gudhi.pick_n_random_points(points = point_set, nb_points = 0) == []
+
+ # Go furter than point set on purpose
+ for iter in range(1,10):
+ sub_set = gudhi.pick_n_random_points(points = point_set, nb_points = iter)
+ print(5)
+ for sub in sub_set:
+ found = False
+ for point in point_set:
+ if point == sub:
+ found = True
+ # Check each sub set point is existing in the point set
+ assert found == True
+
+ # From off file test
+ for i in range (0, 7):
+ assert len(gudhi.pick_n_random_points(off_file = 'subsample.off', nb_points = i)) == i
+
+def test_simple_sparsify_points():
+ point_set = [[0,1], [0,0], [1,0], [1,1]]
+ # Test the limits
+ # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 0.0) == []
+ # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 10.0) == []
+ assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 0.0) == point_set
+ assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 1.0) == point_set
+ assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 2.0) == [[0,1], [1,0]]
+ assert gudhi.sparsify_point_set(points = point_set, min_squared_dist = 2.01) == [[0,1]]
+
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 0.0)) == 7
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 30.0)) == 5
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 40.0)) == 4
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 90.0)) == 3
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 100.0)) == 2
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 325.0)) == 2
+ assert len(gudhi.sparsify_point_set(off_file = 'subsample.off', min_squared_dist = 325.01)) == 1
diff --git a/src/cython/test/test_tangential_complex.py b/src/cython/test/test_tangential_complex.py
new file mode 100755
index 00000000..c191baa4
--- /dev/null
+++ b/src/cython/test/test_tangential_complex.py
@@ -0,0 +1,52 @@
+from gudhi import TangentialComplex, SimplexTree
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_tangential():
+ point_list = [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]
+ tc = TangentialComplex(points=point_list)
+ assert tc.__is_defined() == True
+ assert tc.num_vertices() == 4
+
+ st = tc.create_simplex_tree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ assert st.num_simplices() == 6
+ assert st.num_vertices() == 4
+
+ assert st.get_filtered_tree() == \
+ [([0], 0.0), ([1], 0.0), ([2], 0.0), ([0, 2], 0.0), ([3], 0.0), ([1, 3], 0.0)]
+ assert st.get_cofaces([0], 1) == [([0, 2], 0.0)]
+
+ assert point_list[0] == tc.get_point(0)
+ assert point_list[1] == tc.get_point(1)
+ assert point_list[2] == tc.get_point(2)
+ assert point_list[3] == tc.get_point(3)
+ assert tc.get_point(4) == []
+ assert tc.get_point(125) == []
diff --git a/src/cython/test/test_witness_complex.py b/src/cython/test/test_witness_complex.py
new file mode 100755
index 00000000..7d1fb6be
--- /dev/null
+++ b/src/cython/test/test_witness_complex.py
@@ -0,0 +1,62 @@
+from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_empty_witness_complex():
+ witness = WitnessComplex()
+ assert witness.__is_defined() == False
+
+def test_witness_complex():
+ nearest_landmark_table = [[[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]],
+ [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]],
+ [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]],
+ [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]],
+ [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]]]
+
+ witness_complex = WitnessComplex(nearest_landmark_table=nearest_landmark_table)
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1)
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 31
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1, limit_dimension=2)
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 25
+
+def test_strong_witness_complex():
+ nearest_landmark_table = [[[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]],
+ [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]],
+ [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]],
+ [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]],
+ [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]]]
+
+ strong_witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table)
+ simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1)
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 31
+ simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1, limit_dimension=2)
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 25
diff --git a/src/debian/changelog b/src/debian/changelog
deleted file mode 100644
index 32b3f6f9..00000000
--- a/src/debian/changelog
+++ /dev/null
@@ -1,5 +0,0 @@
-gudhi (1.3.0-1) unstable; urgency=low
-
- * Initial release.
-
- -- Marc Glisse <marc.glisse@inria.fr> Sat, 26 Mar 2016 10:51:01 +0100
diff --git a/src/debian/compat b/src/debian/compat
deleted file mode 100644
index ec635144..00000000
--- a/src/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-9
diff --git a/src/debian/control b/src/debian/control
deleted file mode 100644
index 838234a9..00000000
--- a/src/debian/control
+++ /dev/null
@@ -1,26 +0,0 @@
-Source: gudhi
-Priority: optional
-Maintainer: Marc Glisse <marc.glisse@normalesup.org>
-Build-Depends: debhelper (>= 9), cmake, libboost-dev
-Standards-Version: 3.9.6
-Section: libs
-Homepage: http://gudhi.gforge.inria.fr/
-#Vcs-Git: git://anonscm.debian.org/collab-maint/gudhi.git
-#Vcs-Browser: https://anonscm.debian.org/gitweb/?p=collab-maint/gudhi.git;a=summary
-
-Package: libgudhi-dev
-Section: libdevel
-Architecture: all
-Depends: libboost-dev, ${misc:Depends}
-Recommends: libcgal-dev
-Description: Gudhi library for topological data analysis
- The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic
- open source C++ library for Computational Topology and Topological Data
- Analysis (TDA).
- .
- The current release of the GUDHI library includes:
- * Data structures to represent, construct and manipulate simplicial and
- cubical complexes, including alpha-complex, witness complex, Rips complex.
- * Algorithms to compute persistent homology and multi-field persistent
- homology.
- * Simplication of simplicial complexes by edge contraction.
diff --git a/src/debian/copyright b/src/debian/copyright
deleted file mode 100644
index 2e1f88cd..00000000
--- a/src/debian/copyright
+++ /dev/null
@@ -1,28 +0,0 @@
-Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: gudhi
-Upstream-Contact: gudhi-users@lists.gforge.inria.fr
-Source: <url://http://gudhi.gforge.inria.fr/>
-
-Files: *
-Copyright: 2014-2016 Inria Sophia Antipolis-Méditerranée
- 2014-2016 Inria Saclay - Ile de France
- 2014-2016 Université Nice Sophia Antipolis
-License: GPL-3.0+
-
-License: GPL-3.0+
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
- .
- This package is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
- .
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>.
- .
- On Debian systems, the complete text of the GNU General
- Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
-
diff --git a/src/debian/docs b/src/debian/docs
deleted file mode 100644
index 878a2ba1..00000000
--- a/src/debian/docs
+++ /dev/null
@@ -1,2 +0,0 @@
-Conventions.txt
-README
diff --git a/src/debian/rules b/src/debian/rules
deleted file mode 100755
index c9b049af..00000000
--- a/src/debian/rules
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/make -f
-# See debhelper(7) (uncomment to enable)
-# output every command that modifies files on the build system.
-#export DH_VERBOSE = 1
-
-# see EXAMPLES in dpkg-buildflags(1) and read /usr/share/dpkg/*
-DPKG_EXPORT_BUILDFLAGS = 1
-include /usr/share/dpkg/default.mk
-
-# see FEATURE AREAS in dpkg-buildflags(1)
-#export DEB_BUILD_MAINT_OPTIONS = hardening=+all
-
-# see ENVIRONMENT in dpkg-buildflags(1)
-# package maintainers to append CFLAGS
-#export DEB_CFLAGS_MAINT_APPEND = -Wall -pedantic
-# package maintainers to append LDFLAGS
-#export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
-
-
-# main packaging script based on dh7 syntax
-%:
- dh $@
-
-# dh_make generated override targets
-# This is example for Cmake (See https://bugs.debian.org/641051 )
-#override_dh_auto_configure:
-# dh_auto_configure -- \
-# -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH)
diff --git a/src/debian/source/format b/src/debian/source/format
deleted file mode 100644
index 163aaf8d..00000000
--- a/src/debian/source/format
+++ /dev/null
@@ -1 +0,0 @@
-3.0 (quilt)