summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h2
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp4
-rw-r--r--src/Alpha_complex/utilities/CMakeLists.txt65
-rw-r--r--src/Alpha_complex/utilities/README177
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_helper.h (renamed from src/Persistent_cohomology/example/alpha_complex_3d_helper.h)8
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp)119
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_persistence.cpp (renamed from src/Persistent_cohomology/example/alpha_complex_persistence.cpp)63
-rw-r--r--src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp)121
-rw-r--r--src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp308
-rw-r--r--src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp)164
-rw-r--r--src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp)113
-rw-r--r--src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h55
-rw-r--r--src/Bitmap_cubical_complex/example/CMakeLists.txt26
-rw-r--r--src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp19
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h130
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h13
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h419
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h200
-rw-r--r--src/Bitmap_cubical_complex/test/Bitmap_test.cpp1476
-rw-r--r--src/Bitmap_cubical_complex/utilities/CMakeLists.txt29
-rw-r--r--src/Bitmap_cubical_complex/utilities/README18
-rw-r--r--src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp (renamed from src/Bitmap_cubical_complex/example/Bitmap_cubical_complex.cpp)24
-rw-r--r--src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp (renamed from src/Bitmap_cubical_complex/example/Bitmap_cubical_complex_periodic_boundary_conditions.cpp)29
-rw-r--r--src/Bottleneck_distance/benchmark/CMakeLists.txt4
-rw-r--r--src/Bottleneck_distance/example/CMakeLists.txt20
-rw-r--r--src/Bottleneck_distance/example/README19
-rw-r--r--src/Bottleneck_distance/include/gudhi/Neighbors_finder.h8
-rw-r--r--src/Bottleneck_distance/test/CMakeLists.txt4
-rw-r--r--src/Bottleneck_distance/utilities/CMakeLists.txt16
-rw-r--r--src/Bottleneck_distance/utilities/README10
-rw-r--r--src/Bottleneck_distance/utilities/bottleneck_distance.cpp (renamed from src/Bottleneck_distance/example/bottleneck_read_file_example.cpp)16
-rw-r--r--src/CMakeLists.txt1
-rw-r--r--src/Contraction/example/CMakeLists.txt3
-rw-r--r--src/Contraction/example/Garland_heckbert.cpp5
-rw-r--r--src/Doxyfile8
-rw-r--r--src/GudhUI/CMakeLists.txt34
-rw-r--r--src/Hasse_complex/include/gudhi/Hasse_complex.h11
-rw-r--r--src/Nerve_GIC/doc/COPYRIGHT19
-rw-r--r--src/Nerve_GIC/doc/GIC.jpgbin0 -> 457905 bytes
-rw-r--r--src/Nerve_GIC/doc/GIC.pdfbin0 -> 26073 bytes
-rw-r--r--src/Nerve_GIC/doc/Intro_graph_induced_complex.h216
-rw-r--r--src/Nerve_GIC/doc/coordGICvisu.pdfbin0 -> 20745 bytes
-rw-r--r--src/Nerve_GIC/doc/coordGICvisu2.jpgbin0 -> 1259868 bytes
-rw-r--r--src/Nerve_GIC/doc/funcGICvisu.jpgbin0 -> 71647 bytes
-rw-r--r--src/Nerve_GIC/doc/gicvisu.jpgbin0 -> 167192 bytes
-rw-r--r--src/Nerve_GIC/doc/gicvoronoivisu.jpgbin0 -> 37785 bytes
-rw-r--r--src/Nerve_GIC/doc/nerve.pngbin0 -> 45129 bytes
-rw-r--r--src/Nerve_GIC/doc/nervevisu.jpgbin0 -> 127619 bytes
-rw-r--r--src/Nerve_GIC/example/CMakeLists.txt29
-rw-r--r--src/Nerve_GIC/example/CoordGIC.cpp93
-rw-r--r--src/Nerve_GIC/example/FuncGIC.cpp94
-rw-r--r--src/Nerve_GIC/example/GIC.cpp95
-rwxr-xr-xsrc/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py72
-rw-r--r--src/Nerve_GIC/example/Nerve.cpp95
-rw-r--r--src/Nerve_GIC/example/Nerve.txt43
-rw-r--r--src/Nerve_GIC/example/VoronoiGIC.cpp90
-rwxr-xr-xsrc/Nerve_GIC/example/km.py390
-rw-r--r--src/Nerve_GIC/example/km.py.COPYRIGHT26
-rw-r--r--src/Nerve_GIC/include/gudhi/GIC.h1166
-rw-r--r--src/Nerve_GIC/test/CMakeLists.txt14
-rw-r--r--src/Nerve_GIC/test/data/cloud6
-rw-r--r--src/Nerve_GIC/test/data/cover3
-rw-r--r--src/Nerve_GIC/test/data/graph3
-rw-r--r--src/Nerve_GIC/test/test_GIC.cpp90
-rw-r--r--src/Persistent_cohomology/doc/Intro_persistent_cohomology.h39
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt52
-rw-r--r--src/Persistent_cohomology/example/README121
-rw-r--r--src/Persistent_cohomology/example/persistence_from_file.cpp3
-rw-r--r--src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp4
-rw-r--r--src/Persistent_cohomology/example/plain_homology.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp59
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h13
-rw-r--r--src/Persistent_cohomology/test/betti_numbers_unit_test.cpp4
-rw-r--r--src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp5
-rw-r--r--src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp3
-rw-r--r--src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp2
-rw-r--r--src/Rips_complex/test/test_rips_complex.cpp2
-rw-r--r--src/Rips_complex/utilities/CMakeLists.txt21
-rw-r--r--src/Rips_complex/utilities/README74
-rw-r--r--src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp (renamed from src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp)2
-rw-r--r--src/Rips_complex/utilities/rips_persistence.cpp (renamed from src/Persistent_cohomology/example/rips_persistence.cpp)0
-rw-r--r--src/Simplex_tree/doc/Intro_simplex_tree.h9
-rw-r--r--src/Simplex_tree/example/CMakeLists.txt17
-rw-r--r--src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp234
-rw-r--r--src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp2
-rw-r--r--src/Simplex_tree/example/graph_expansion_with_blocker.cpp79
-rw-r--r--src/Simplex_tree/example/mini_simplex_tree.cpp2
-rw-r--r--src/Simplex_tree/example/simple_simplex_tree.cpp38
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h213
-rw-r--r--src/Simplex_tree/test/CMakeLists.txt22
-rw-r--r--src/Simplex_tree/test/README2
-rw-r--r--src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp235
-rw-r--r--src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp136
-rw-r--r--src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp427
-rw-r--r--src/Simplex_tree/test/simplex_tree_unit_test.cpp295
-rw-r--r--src/Spatial_searching/doc/Intro_spatial_searching.h2
-rw-r--r--src/Spatial_searching/example/example_spatial_searching.cpp22
-rw-r--r--src/Spatial_searching/include/gudhi/Kd_tree_search.h46
-rw-r--r--src/Spatial_searching/test/test_Kd_tree_search.cpp24
-rw-r--r--src/Subsampling/include/gudhi/sparsify_point_set.h2
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h18
-rw-r--r--src/Witness_complex/doc/Witness_complex_doc.h14
-rw-r--r--src/Witness_complex/example/CMakeLists.txt32
-rw-r--r--src/Witness_complex/example/example_strong_witness_complex_off.cpp8
-rw-r--r--src/Witness_complex/example/example_witness_complex_off.cpp6
-rw-r--r--src/Witness_complex/example/example_witness_complex_sphere.cpp4
-rw-r--r--src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h2
-rw-r--r--src/Witness_complex/include/gudhi/Euclidean_witness_complex.h2
-rw-r--r--src/Witness_complex/include/gudhi/Strong_witness_complex.h7
-rw-r--r--src/Witness_complex/include/gudhi/Witness_complex.h1
-rw-r--r--src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp2
-rw-r--r--src/Witness_complex/utilities/CMakeLists.txt28
-rw-r--r--src/Witness_complex/utilities/README74
-rw-r--r--src/Witness_complex/utilities/strong_witness_persistence.cpp (renamed from src/Witness_complex/example/example_strong_witness_persistence.cpp)6
-rw-r--r--src/Witness_complex/utilities/weak_witness_persistence.cpp (renamed from src/Witness_complex/example/example_witness_complex_persistence.cpp)6
-rw-r--r--src/cmake/modules/GUDHI_doxygen_target.cmake5
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake14
-rw-r--r--src/cmake/modules/GUDHI_user_version_target.cmake6
-rw-r--r--src/common/doc/file_formats.h83
-rw-r--r--src/common/doc/main_page.h165
-rw-r--r--src/common/include/gudhi/graph_simplicial_complex.h63
-rw-r--r--src/common/include/gudhi/reader_utils.h4
-rw-r--r--src/common/test/test_distance_matrix_reader.cpp4
-rw-r--r--src/common/test/test_persistence_intervals_reader.cpp37
-rw-r--r--src/common/utilities/README26
-rw-r--r--src/common/utilities/off_file_from_shape_generator.cpp2
-rw-r--r--src/cython/CMakeLists.txt247
-rw-r--r--src/cython/cython/periodic_cubical_complex.pyx14
-rwxr-xr-xsrc/cython/cython/persistence_graphical_tools.py76
-rw-r--r--src/cython/cython/persistence_representations_intervals.pyx318
-rw-r--r--src/cython/cython/persistence_representations_landscapes.pyx378
-rw-r--r--src/cython/cython/persistence_representations_landscapes_on_grid.pyx388
-rw-r--r--src/cython/cython/reader_utils.pyx95
-rw-r--r--src/cython/cython/simplex_tree.pyx153
-rw-r--r--src/cython/doc/Makefile.in44
-rw-r--r--src/cython/doc/_templates/layout.html1
-rw-r--r--src/cython/doc/alpha_complex_sum.rst2
-rw-r--r--src/cython/doc/alpha_complex_user.rst11
-rw-r--r--src/cython/doc/bottleneck_distance_sum.rst2
-rw-r--r--src/cython/doc/bottleneck_distance_user.rst4
-rw-r--r--src/cython/doc/citation.rst2
-rwxr-xr-xsrc/cython/doc/conf.py90
-rw-r--r--src/cython/doc/cubical_complex_sum.rst22
-rw-r--r--src/cython/doc/cubical_complex_user.rst20
-rw-r--r--src/cython/doc/examples.rst19
-rw-r--r--src/cython/doc/fileformats.rst33
-rwxr-xr-xsrc/cython/doc/generate_examples.py43
-rw-r--r--src/cython/doc/index.rst8
-rw-r--r--src/cython/doc/installation.rst23
-rw-r--r--src/cython/doc/make.bat.in67
-rw-r--r--src/cython/doc/persistence_graphical_tools_user.rst31
-rw-r--r--src/cython/doc/persistent_cohomology_sum.rst2
-rw-r--r--src/cython/doc/persistent_cohomology_user.rst2
-rwxr-xr-xsrc/cython/doc/pyplots/barcode_persistence.py6
-rwxr-xr-xsrc/cython/doc/pyplots/diagram_persistence.py9
-rwxr-xr-xsrc/cython/doc/pyplots/show_palette_values.py3
-rwxr-xr-xsrc/cython/doc/python3-sphinx-build.py (renamed from src/cython/doc/python3-sphinx-build)2
-rw-r--r--src/cython/doc/reader_utils_ref.rst11
-rw-r--r--src/cython/doc/rips_complex_sum.rst2
-rw-r--r--src/cython/doc/rips_complex_user.rst8
-rw-r--r--src/cython/doc/simplex_tree_sum.rst2
-rw-r--r--src/cython/doc/tangential_complex_sum.rst4
-rw-r--r--src/cython/doc/tangential_complex_user.rst17
-rw-r--r--src/cython/doc/witness_complex_sum.rst26
-rw-r--r--src/cython/doc/witness_complex_user.rst4
-rwxr-xr-xsrc/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xsrc/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py5
-rwxr-xr-xsrc/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py5
-rwxr-xr-xsrc/cython/example/gudhi_graphical_tools_example.py9
-rwxr-xr-xsrc/cython/example/persistence_representations_diagrams_example.py70
-rwxr-xr-xsrc/cython/example/persistence_representations_landscapes_example.py63
-rwxr-xr-xsrc/cython/example/persistence_representations_landscapes_on_grid_example.py110
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py4
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xsrc/cython/example/rips_persistence_diagram.py3
-rwxr-xr-xsrc/cython/example/simplex_tree_example.py3
-rwxr-xr-xsrc/cython/example/tangential_complex_plain_homology_from_off_file_example.py4
-rw-r--r--src/cython/gudhi.pyx.in23
-rw-r--r--src/cython/include/Cubical_complex_interface.h6
-rw-r--r--src/cython/include/PSSK_interface.h63
-rw-r--r--src/cython/include/Persistence_heat_maps_interface.h156
-rw-r--r--src/cython/include/Persistence_intervals_interface.h59
-rw-r--r--src/cython/include/Persistence_intervals_with_distances_interface.h43
-rw-r--r--src/cython/include/Persistence_landscape_interface.h200
-rw-r--r--src/cython/include/Persistence_landscape_on_grid_interface.h207
-rw-r--r--src/cython/include/Persistence_vectors_interface.h121
-rw-r--r--src/cython/include/Reader_utils_interface.h56
-rw-r--r--src/cython/include/Rips_complex_interface.h6
-rw-r--r--src/cython/include/Simplex_tree_interface.h4
-rw-r--r--src/cython/include/Tangential_complex_interface.h2
-rw-r--r--src/cython/setup.py.in2
-rwxr-xr-xsrc/cython/test/test_cubical_complex.py10
-rwxr-xr-xsrc/cython/test/test_persistence_representations_intervals.py87
-rwxr-xr-xsrc/cython/test/test_persistence_representations_landscapes.py139
-rwxr-xr-xsrc/cython/test/test_persistence_representations_landscapes_on_grid.py120
-rwxr-xr-xsrc/cython/test/test_reader_utils.py88
-rwxr-xr-xsrc/cython/test/test_simplex_tree.py39
197 files changed, 7787 insertions, 5234 deletions
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index 1ff95c3d..5f7d7622 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -268,8 +268,6 @@ class Alpha_complex {
return false; // ----- >>
}
- complex.set_dimension(triangulation_->maximal_dimension());
-
// --------------------------------------------------------------------------------------------
// Simplex_tree construction from loop on triangulation finite full cells list
if (triangulation_->number_of_vertices() > 0) {
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 7380547f..166373fe 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -159,7 +159,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
BOOST_CHECK(simplex_tree.num_simplices() == 15);
std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
- BOOST_CHECK(simplex_tree.dimension() == 4);
+ BOOST_CHECK(simplex_tree.dimension() == 3);
std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree.num_vertices() == 4);
@@ -232,7 +232,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
BOOST_CHECK(simplex_tree.num_simplices() == 10);
std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
- BOOST_CHECK(simplex_tree.dimension() == 4);
+ BOOST_CHECK(simplex_tree.dimension() == 1);
std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree.num_vertices() == 4);
diff --git a/src/Alpha_complex/utilities/CMakeLists.txt b/src/Alpha_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..79d9e7dd
--- /dev/null
+++ b/src/Alpha_complex/utilities/CMakeLists.txt
@@ -0,0 +1,65 @@
+cmake_minimum_required(VERSION 2.6)
+project(Alpha_complex_utilities)
+
+if(CGAL_FOUND)
+ add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp)
+ target_link_libraries(alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ add_executable(exact_alpha_complex_3d_persistence exact_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(exact_alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ add_executable(weighted_alpha_complex_3d_persistence weighted_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(exact_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Alpha_complex_utilities_alpha_complex_3d_persistence COMMAND $<TARGET_FILE:alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
+ add_test(NAME Alpha_complex_utilities_exact_alpha_complex_3d COMMAND $<TARGET_FILE:exact_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
+ add_test(NAME Alpha_complex_utilities_weighted_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.weights" "-p" "2" "-m" "0.45")
+
+ install(TARGETS alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS exact_alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS weighted_alpha_complex_3d_persistence DESTINATION bin)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
+ target_link_libraries(alpha_complex_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+ add_test(NAME Alpha_complex_utilities_alpha_complex_persistence COMMAND $<TARGET_FILE:alpha_complex_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
+ add_test(NAME Alpha_complex_utilities_periodic_alpha_complex_3d_persistence COMMAND $<TARGET_FILE:periodic_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "-p" "2" "-m" "0")
+
+ install(TARGETS alpha_complex_persistence DESTINATION bin)
+ install(TARGETS periodic_alpha_complex_3d_persistence DESTINATION bin)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+
+ if (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+ add_executable(weighted_periodic_alpha_complex_3d_persistence weighted_periodic_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(weighted_periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(weighted_periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Persistent_cohomology_example_weigted_periodic_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_periodic_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.weights"
+ "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "3" "1.0")
+
+ install(TARGETS weighted_periodic_alpha_complex_3d_persistence DESTINATION bin)
+
+ endif (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+
+endif(CGAL_FOUND)
diff --git a/src/Alpha_complex/utilities/README b/src/Alpha_complex/utilities/README
new file mode 100644
index 00000000..1cd2ca95
--- /dev/null
+++ b/src/Alpha_complex/utilities/README
@@ -0,0 +1,177 @@
+# Alpha_complex #
+
+## `alpha_complex_3d_persistence` ##
+This program computes the persistent homology with coefficient field Z/pZ of the 3D alpha complex built from a 3D point cloud. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+`alpha_complex_3d_persistence [options] <input OFF file>`
+where
+`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example**
+`alpha_complex_3d_persistence ../../data/points/tore3D_300.off -p 2 -m 0.45`
+
+outputs:
+```
+Simplex_tree dim: 3
+2 0 0 inf
+2 1 0.0682162 1.0001
+2 1 0.0934117 1.00003
+2 2 0.56444 1.03938
+```
+
+Here we retrieve expected Betti numbers on a tore 3D:
+```
+Betti numbers[0] = 1
+Betti numbers[1] = 2
+Betti numbers[2] = 1
+```
+
+N.B.:
+* `alpha_complex_3d_persistence` only accepts OFF files in dimension 3.
+* Filtration values are alpha square values.
+
+
+
+## `exact_alpha_complex_3d_persistence` ##
+Same as `alpha_complex_3d_persistence`, but using exact computation. It is slower, but it is necessary when points are on a grid for instance.
+
+
+
+## `weighted_alpha_complex_3d_persistence` ##
+Same as `alpha_complex_3d_persistence`, but using weighted points.
+
+**Usage**
+`weighted_alpha_complex_3d_persistence [options] <input OFF file> <weights input file>`
+where
+`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+`<input weights file>` is the path to the file containing the weights of the points (one value per line).
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example**
+`weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off ../../data/points/tore3D_300.weights -p 2 -m 0.45`
+
+outputs:
+```
+Simplex_tree dim: 3
+2 0 -1 inf
+2 1 -0.931784 0.000103311
+2 1 -0.906588 2.60165e-05
+2 2 -0.43556 0.0393798
+```
+
+N.B.:
+* Weights values are explained on CGAL [Alpha shape](https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title0)
+and [Regular triangulation](https://doc.cgal.org/latest/Triangulation_3/index.html#Triangulation3secclassRegulartriangulation) documentation.
+* Filtration values are alpha square values.
+
+
+## `periodic_alpha_complex_3d_persistence` ##
+Same as `alpha_complex_3d_persistence`, but using periodic alpha shape 3d.
+Refer to the [CGAL's 3D Periodic Triangulations User Manual](https://doc.cgal.org/latest/Periodic_3_triangulation_3/index.html) for more details.
+
+**Usage**
+`periodic_alpha_complex_3d_persistence [options] <input OFF file> <cuboid file>`
+where
+`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+`<cuboid file>` is the path to the file describing the periodic domain. It must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid).
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals
+
+
+**Example**
+`periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0`
+
+outputs:
+```
+Periodic Delaunay computed.
+Simplex_tree dim: 3
+3 0 0 inf
+3 1 0.0025 inf
+3 1 0.0025 inf
+3 1 0.0025 inf
+3 2 0.005 inf
+3 2 0.005 inf
+3 2 0.005 inf
+3 3 0.0075 inf
+```
+
+Here we retrieve expected Betti numbers on an 3D iso-oriented cuboids:
+```
+Betti numbers[0] = 1
+Betti numbers[1] = 3
+Betti numbers[2] = 3
+Betti numbers[3] = 1
+```
+
+N.B.:
+* Cuboid file must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid).
+* Filtration values are alpha square values.
+
+
+
+
+## `alpha_complex_persistence` ##
+This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+`alpha_complex_persistence [options] <input OFF file>`
+where
+`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-r [ --max-alpha-square-value ]` (default = inf) Maximal alpha square value for the Alpha complex construction.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example**
+`alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off`
+
+outputs:
+```
+Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
+Simplex_tree dim: 3
+2 0 0 inf
+2 1 0.0682162 1.0001
+2 1 0.0934117 1.00003
+2 2 0.56444 1.03938
+```
+
+Here we retrieve expected Betti numbers on a tore 3D:
+```
+Betti numbers[0] = 1
+Betti numbers[1] = 2
+Betti numbers[2] = 1
+```
+
+N.B.:
+* Filtration values are alpha square values.
diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_helper.h b/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
index 7865e4ec..6b3b7d5d 100644
--- a/src/Persistent_cohomology/example/alpha_complex_3d_helper.h
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
@@ -23,7 +23,7 @@
#ifndef ALPHA_COMPLEX_3D_HELPER_H_
#define ALPHA_COMPLEX_3D_HELPER_H_
-template<class Vertex_list, class Cell_handle>
+template <class Vertex_list, class Cell_handle>
Vertex_list from_cell(const Cell_handle& ch) {
Vertex_list the_list;
for (auto i = 0; i < 4; i++) {
@@ -35,7 +35,7 @@ Vertex_list from_cell(const Cell_handle& ch) {
return the_list;
}
-template<class Vertex_list, class Facet>
+template <class Vertex_list, class Facet>
Vertex_list from_facet(const Facet& fct) {
Vertex_list the_list;
for (auto i = 0; i < 4; i++) {
@@ -49,7 +49,7 @@ Vertex_list from_facet(const Facet& fct) {
return the_list;
}
-template<class Vertex_list, class Edge_3>
+template <class Vertex_list, class Edge_3>
Vertex_list from_edge(const Edge_3& edg) {
Vertex_list the_list;
for (auto i = 0; i < 4; i++) {
@@ -63,7 +63,7 @@ Vertex_list from_edge(const Edge_3& edg) {
return the_list;
}
-template<class Vertex_list, class Vertex_handle>
+template <class Vertex_list, class Vertex_handle>
Vertex_list from_vertex(const Vertex_handle& vh) {
Vertex_list the_list;
#ifdef DEBUG_TRACES
diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
index fd227b82..0a021a0f 100644
--- a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
@@ -20,6 +20,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
@@ -39,6 +40,7 @@
#include <utility>
#include <list>
#include <vector>
+#include <cstdlib>
#include "alpha_complex_3d_helper.h"
@@ -56,10 +58,10 @@ using Point_3 = Kernel::Point_3;
// filtration with alpha values needed type definition
using Alpha_value_type = Alpha_shape_3::FT;
using Object = CGAL::Object;
-using Dispatch = CGAL::Dispatch_output_iterator<
- CGAL::cpp11::tuple<Object, Alpha_value_type>,
- CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
- std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Dispatch =
+ CGAL::Dispatch_output_iterator<CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator<std::vector<Object> >,
+ std::back_insert_iterator<std::vector<Alpha_value_type> > > >;
using Cell_handle = Alpha_shape_3::Cell_handle;
using Facet = Alpha_shape_3::Facet;
using Edge_3 = Alpha_shape_3::Edge;
@@ -70,41 +72,29 @@ using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = ST::Filtration_value;
using Simplex_tree_vertex = ST::Vertex_handle;
-using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
-using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
-using PCOH = Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp >;
+using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
+using Persistent_cohomology =
+ Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(const std::string& progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
-
-int main(int argc, char * const argv[]) {
- // program args management
- if (argc != 4) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence);
- int coeff_field_characteristic = atoi(argv[2]);
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- Filtration_value min_persistence = 0.0;
- int returnedScanValue = sscanf(argv[3], "%f", &min_persistence);
- if ((returnedScanValue == EOF) || (min_persistence < -1.0)) {
- std::cerr << "Error: " << argv[3] << " is not correct\n";
- usage(argv[0]);
- }
+ program_options(argc, argv, off_file_points, output_file_diag, coeff_field_characteristic, min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read file " << off_file_points << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
@@ -142,28 +132,28 @@ int main(int argc, char * const argv[]) {
Filtration_value filtration_max = 0.0;
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
if (dim_max < 3) {
// Cell is of dim 3
dim_max = 3;
}
- } else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
if (dim_max < 2) {
// Facet is of dim 2
dim_max = 2;
}
- } else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
if (dim_max < 1) {
// Edge_3 is of dim 1
dim_max = 1;
}
- } else if (const Vertex_handle * vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
@@ -202,8 +192,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
- simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
std::cout << "vertices \t\t" << count_vertices << std::endl;
@@ -211,12 +199,10 @@ int main(int argc, char * const argv[]) {
std::cout << "facets \t\t" << count_facets << std::endl;
std::cout << "cells \t\t" << count_cells << std::endl;
-
std::cout << "Information of the Simplex Tree: " << std::endl;
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
@@ -231,13 +217,64 @@ int main(int argc, char * const argv[]) {
std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
// Compute the persistence diagram of the complex
- PCOH pcoh(simplex_tree);
+ Persistent_cohomology pcoh(simplex_tree, true);
// initializes the coefficient field for homology
pcoh.init_coefficients(coeff_field_characteristic);
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
index 9e84e91f..2105220a 100644
--- a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
@@ -14,12 +14,9 @@
using Simplex_tree = Gudhi::Simplex_tree<>;
using Filtration_value = Simplex_tree::Filtration_value;
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , std::string & output_file_diag
- , Filtration_value & alpha_square_max_value
- , int & coeff_field_characteristic
- , Filtration_value & min_persistence);
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ Filtration_value &alpha_square_max_value, int &coeff_field_characteristic,
+ Filtration_value &min_persistence);
int main(int argc, char **argv) {
std::string off_file_points;
@@ -28,13 +25,13 @@ int main(int argc, char **argv) {
int coeff_field_characteristic;
Filtration_value min_persistence;
- program_options(argc, argv, off_file_points, output_file_diag, alpha_square_max_value,
- coeff_field_characteristic, min_persistence);
+ program_options(argc, argv, off_file_points, output_file_diag, alpha_square_max_value, coeff_field_characteristic,
+ min_persistence);
// ----------------------------------------------------------------------------
// Init of an alpha complex from an OFF file
// ----------------------------------------------------------------------------
- using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_points);
Simplex_tree simplex;
@@ -42,17 +39,16 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Simplicial complex is of dimension " << simplex.dimension() <<
- " - " << simplex.num_simplices() << " simplices - " <<
- simplex.num_vertices() << " vertices." << std::endl;
+ std::cout << "Simplicial complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
+ << " simplices - " << simplex.num_vertices() << " vertices." << std::endl;
// Sort the simplices in the order of the filtration
simplex.initialize_filtration();
std::cout << "Simplex_tree dim: " << simplex.dimension() << std::endl;
// Compute the persistence diagram of the complex
- Gudhi::persistent_cohomology::Persistent_cohomology< Simplex_tree,
- Gudhi::persistent_cohomology::Field_Zp > pcoh(simplex);
+ Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Gudhi::persistent_cohomology::Field_Zp> pcoh(
+ simplex);
// initializes the coefficient field for homology
pcoh.init_coefficients(coeff_field_characteristic);
@@ -72,30 +68,26 @@ int main(int argc, char **argv) {
return 0;
}
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , std::string & output_file_diag
- , Filtration_value & alpha_square_max_value
- , int & coeff_field_characteristic
- , Filtration_value & min_persistence) {
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ Filtration_value &alpha_square_max_value, int &coeff_field_characteristic,
+ Filtration_value &min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&off_file_points),
- "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
po::options_description visible("Allowed options", 100);
- visible.add_options()
- ("help,h", "produce help message")
- ("output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-alpha-square-value,r",
- po::value<Filtration_value>(&alpha_square_max_value)->default_value(std::numeric_limits<Filtration_value>::infinity()),
- "Maximal alpha square value for the Alpha complex construction.")
- ("field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
- "Characteristic p of the coefficient field Z/pZ for computing homology.")
- ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
- "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-alpha-square-value,r", po::value<Filtration_value>(&alpha_square_max_value)
+ ->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal alpha square value for the Alpha complex construction.")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -104,8 +96,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
diff --git a/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
index 8a335075..9a266418 100644
--- a/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2014 INRIA Saclay (France)
+ * Copyright (C) 2014 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -20,6 +20,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
@@ -39,6 +40,7 @@
#include <utility>
#include <list>
#include <vector>
+#include <cstdlib>
#include "alpha_complex_3d_helper.h"
@@ -57,10 +59,10 @@ using Point_3 = Kernel::Point_3;
// filtration with alpha values needed type definition
using Alpha_value_type = Alpha_shape_3::FT;
using Object = CGAL::Object;
-using Dispatch = CGAL::Dispatch_output_iterator<
- CGAL::cpp11::tuple<Object, Alpha_value_type>,
- CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
- std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Dispatch =
+ CGAL::Dispatch_output_iterator<CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator<std::vector<Object> >,
+ std::back_insert_iterator<std::vector<Alpha_value_type> > > >;
using Cell_handle = Alpha_shape_3::Cell_handle;
using Facet = Alpha_shape_3::Facet;
using Edge_3 = Alpha_shape_3::Edge;
@@ -71,41 +73,29 @@ using Vertex_list = std::list<Vertex_handle>;
using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = ST::Filtration_value;
using Simplex_tree_vertex = ST::Vertex_handle;
-using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
-using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
-using PCOH = Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp >;
+using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
+using Persistent_cohomology =
+ Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(char * const progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
-
-int main(int argc, char * const argv[]) {
- // program args management
- if (argc != 4) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence);
- int coeff_field_characteristic = atoi(argv[2]);
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- Filtration_value min_persistence = 0.0;
- int returnedScanValue = sscanf(argv[3], "%f", &min_persistence);
- if ((returnedScanValue == EOF) || (min_persistence < -1.0)) {
- std::cerr << "Error: " << argv[3] << " is not correct\n";
- usage(argv[0]);
- }
+ program_options(argc, argv, off_file_points, output_file_diag, coeff_field_characteristic, min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read file " << off_file_points << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
@@ -143,28 +133,28 @@ int main(int argc, char * const argv[]) {
Filtration_value filtration_max = 0.0;
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
if (dim_max < 3) {
// Cell is of dim 3
dim_max = 3;
}
- } else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
if (dim_max < 2) {
// Facet is of dim 2
dim_max = 2;
}
- } else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
if (dim_max < 1) {
// Edge_3 is of dim 1
dim_max = 1;
}
- } else if (const Vertex_handle * vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
@@ -204,8 +194,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
- simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
std::cout << "vertices \t\t" << count_vertices << std::endl;
@@ -213,12 +201,10 @@ int main(int argc, char * const argv[]) {
std::cout << "facets \t\t" << count_facets << std::endl;
std::cout << "cells \t\t" << count_cells << std::endl;
-
std::cout << "Information of the Simplex Tree: " << std::endl;
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
@@ -233,13 +219,64 @@ int main(int argc, char * const argv[]) {
std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
// Compute the persistence diagram of the complex
- PCOH pcoh(simplex_tree);
+ Persistent_cohomology pcoh(simplex_tree, true);
// initializes the coefficient field for homology
pcoh.init_coefficients(coeff_field_characteristic);
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
new file mode 100644
index 00000000..186a58f8
--- /dev/null
+++ b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
@@ -0,0 +1,308 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2014 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <boost/program_options.hpp>
+#include <boost/variant.hpp>
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_3D_off_io.h>
+
+#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
+#include <CGAL/Periodic_3_Delaunay_triangulation_traits_3.h>
+#include <CGAL/Periodic_3_Delaunay_triangulation_3.h>
+#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/iterator.h>
+
+#include <fstream>
+#include <cmath>
+#include <string>
+#include <tuple>
+#include <map>
+#include <utility>
+#include <list>
+#include <vector>
+#include <cstdlib>
+
+#include "alpha_complex_3d_helper.h"
+
+// Traits
+using K = CGAL::Exact_predicates_inexact_constructions_kernel;
+using PK = CGAL::Periodic_3_Delaunay_triangulation_traits_3<K>;
+// Vertex type
+using DsVb = CGAL::Periodic_3_triangulation_ds_vertex_base_3<>;
+using Vb = CGAL::Triangulation_vertex_base_3<PK, DsVb>;
+using AsVb = CGAL::Alpha_shape_vertex_base_3<PK, Vb>;
+// Cell type
+using DsCb = CGAL::Periodic_3_triangulation_ds_cell_base_3<>;
+using Cb = CGAL::Triangulation_cell_base_3<PK, DsCb>;
+using AsCb = CGAL::Alpha_shape_cell_base_3<PK, Cb>;
+using Tds = CGAL::Triangulation_data_structure_3<AsVb, AsCb>;
+using P3DT3 = CGAL::Periodic_3_Delaunay_triangulation_3<PK, Tds>;
+using Alpha_shape_3 = CGAL::Alpha_shape_3<P3DT3>;
+using Point_3 = PK::Point_3;
+
+// filtration with alpha values needed type definition
+using Alpha_value_type = Alpha_shape_3::FT;
+using Object = CGAL::Object;
+using Dispatch =
+ CGAL::Dispatch_output_iterator<CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator<std::vector<Object> >,
+ std::back_insert_iterator<std::vector<Alpha_value_type> > > >;
+using Cell_handle = Alpha_shape_3::Cell_handle;
+using Facet = Alpha_shape_3::Facet;
+using Edge_3 = Alpha_shape_3::Edge;
+using Vertex_handle = Alpha_shape_3::Vertex_handle;
+using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
+
+// gudhi type definition
+using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = ST::Filtration_value;
+using Simplex_tree_vertex = ST::Vertex_handle;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
+using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
+using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
+using Persistent_cohomology =
+ Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &cuboid_file,
+ std::string &output_file_diag, int &coeff_field_characteristic, Filtration_value &min_persistence);
+
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string cuboid_file;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
+
+ program_options(argc, argv, off_file_points, cuboid_file, output_file_diag, coeff_field_characteristic,
+ min_persistence);
+
+ // Read the OFF file (input file name given as parameter) and triangulate points
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read OFF file " << off_file_points << std::endl;
+ exit(-1);
+ }
+
+ // Read iso_cuboid_3 information from file
+ std::ifstream iso_cuboid_str(cuboid_file);
+ double x_min, y_min, z_min, x_max, y_max, z_max;
+ if (iso_cuboid_str.good()) {
+ iso_cuboid_str >> x_min >> y_min >> z_min >> x_max >> y_max >> z_max;
+ } else {
+ std::cerr << "Unable to read file " << cuboid_file << std::endl;
+ exit(-1);
+ }
+
+ // Retrieve the triangulation
+ std::vector<Point_3> lp = off_reader.get_point_cloud();
+
+ // Define the periodic cube
+ P3DT3 pdt(PK::Iso_cuboid_3(x_min, y_min, z_min, x_max, y_max, z_max));
+ // Heuristic for inserting large point sets (if pts is reasonably large)
+ pdt.insert(lp.begin(), lp.end(), true);
+ // As pdt won't be modified anymore switch to 1-sheeted cover if possible
+ if (pdt.is_triangulation_in_1_sheet()) pdt.convert_to_1_sheeted_covering();
+ std::cout << "Periodic Delaunay computed." << std::endl;
+
+ // alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode. This is the default mode
+ // Maybe need to set it to GENERAL mode
+ Alpha_shape_3 as(pdt, 0, Alpha_shape_3::GENERAL);
+
+ // filtration with alpha values from alpha shape
+ std::vector<Object> the_objects;
+ std::vector<Alpha_value_type> the_alpha_values;
+
+ Dispatch disp = CGAL::dispatch_output<Object, Alpha_value_type>(std::back_inserter(the_objects),
+ std::back_inserter(the_alpha_values));
+
+ as.filtration_with_alpha_values(disp);
+#ifdef DEBUG_TRACES
+ std::cout << "filtration_with_alpha_values returns : " << the_objects.size() << " objects" << std::endl;
+#endif // DEBUG_TRACES
+
+ Alpha_shape_3::size_type count_vertices = 0;
+ Alpha_shape_3::size_type count_edges = 0;
+ Alpha_shape_3::size_type count_facets = 0;
+ Alpha_shape_3::size_type count_cells = 0;
+
+ // Loop on objects vector
+ Vertex_list vertex_list;
+ ST simplex_tree;
+ Alpha_shape_simplex_tree_map map_cgal_simplex_tree;
+ std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
+ int dim_max = 0;
+ Filtration_value filtration_max = 0.0;
+ for (auto object_iterator : the_objects) {
+ // Retrieve Alpha shape vertex list from object
+ if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
+ count_cells++;
+ if (dim_max < 3) {
+ // Cell is of dim 3
+ dim_max = 3;
+ }
+ } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ vertex_list = from_facet<Vertex_list, Facet>(*facet);
+ count_facets++;
+ if (dim_max < 2) {
+ // Facet is of dim 2
+ dim_max = 2;
+ }
+ } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
+ count_edges++;
+ if (dim_max < 1) {
+ // Edge_3 is of dim 1
+ dim_max = 1;
+ }
+ } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ count_vertices++;
+ vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
+ }
+ // Construction of the vector of simplex_tree vertex from list of alpha_shapes vertex
+ Simplex_tree_vector_vertex the_simplex_tree;
+ for (auto the_alpha_shape_vertex : vertex_list) {
+ Alpha_shape_simplex_tree_map::iterator the_map_iterator = map_cgal_simplex_tree.find(the_alpha_shape_vertex);
+ if (the_map_iterator == map_cgal_simplex_tree.end()) {
+ // alpha shape not found
+ Simplex_tree_vertex vertex = map_cgal_simplex_tree.size();
+#ifdef DEBUG_TRACES
+ std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] not found - insert " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ the_simplex_tree.push_back(vertex);
+ map_cgal_simplex_tree.insert(Alpha_shape_simplex_tree_pair(the_alpha_shape_vertex, vertex));
+ } else {
+ // alpha shape found
+ Simplex_tree_vertex vertex = the_map_iterator->second;
+#ifdef DEBUG_TRACES
+ std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] found in " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ the_simplex_tree.push_back(vertex);
+ }
+ }
+ // Construction of the simplex_tree
+ Filtration_value filtr = /*std::sqrt*/ (*the_alpha_value_iterator);
+#ifdef DEBUG_TRACES
+ std::cout << "filtration = " << filtr << std::endl;
+#endif // DEBUG_TRACES
+ if (filtr > filtration_max) {
+ filtration_max = filtr;
+ }
+ simplex_tree.insert_simplex(the_simplex_tree, filtr);
+ if (the_alpha_value_iterator != the_alpha_values.end())
+ ++the_alpha_value_iterator;
+ else
+ std::cout << "This shall not happen" << std::endl;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cout << "vertices \t\t" << count_vertices << std::endl;
+ std::cout << "edges \t\t" << count_edges << std::endl;
+ std::cout << "facets \t\t" << count_facets << std::endl;
+ std::cout << "cells \t\t" << count_cells << std::endl;
+
+ std::cout << "Information of the Simplex Tree: " << std::endl;
+ std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
+ std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
+ std::cout << " Dimension = " << simplex_tree.dimension() << " ";
+#endif // DEBUG_TRACES
+
+#ifdef DEBUG_TRACES
+ std::cout << "Iterator on vertices: " << std::endl;
+ for (auto vertex : simplex_tree.complex_vertex_range()) {
+ std::cout << vertex << " ";
+ }
+#endif // DEBUG_TRACES
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(simplex_tree, true);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+
+ return 0;
+}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &cuboid_file,
+ std::string &output_file_diag, int &coeff_field_characteristic,
+ Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ")(
+ "cuboid-file", po::value<std::string>(&cuboid_file),
+ "Name of file describing the periodic domain. Format is: min_hx min_hy min_hz\nmax_hx max_hy max_hz");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+ pos.add("cuboid-file", 2);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file") || !vm.count("cuboid-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a periodic 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file cuboid-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
index 34b90933..0e73a99b 100644
--- a/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
@@ -20,18 +20,24 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_3D_off_io.h>
+#include <CGAL/config.h>
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
-#include <CGAL/Regular_triangulation_euclidean_traits_3.h>
#include <CGAL/Regular_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
#include <CGAL/iterator.h>
+// For CGAL < 4.11
+#if CGAL_VERSION_NR < 1041100000
+#include <CGAL/Regular_triangulation_euclidean_traits_3.h>
+#endif // CGAL_VERSION_NR < 1041100000
+
#include <fstream>
#include <cmath>
#include <string>
@@ -44,26 +50,44 @@
#include "alpha_complex_3d_helper.h"
-// Traits
+// Alpha_shape_3 templates type definitions
using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel;
+
+// For CGAL < 4.11
+#if CGAL_VERSION_NR < 1041100000
using Gt = CGAL::Regular_triangulation_euclidean_traits_3<Kernel>;
using Vb = CGAL::Alpha_shape_vertex_base_3<Gt>;
using Fb = CGAL::Alpha_shape_cell_base_3<Gt>;
using Tds = CGAL::Triangulation_data_structure_3<Vb, Fb>;
using Triangulation_3 = CGAL::Regular_triangulation_3<Gt, Tds>;
-using Alpha_shape_3 = CGAL::Alpha_shape_3<Triangulation_3>;
// From file type definition
using Point_3 = Gt::Bare_point;
using Weighted_point_3 = Gt::Weighted_point;
+// For CGAL >= 4.11
+#else // CGAL_VERSION_NR < 1041100000
+using Rvb = CGAL::Regular_triangulation_vertex_base_3<Kernel>;
+using Vb = CGAL::Alpha_shape_vertex_base_3<Kernel,Rvb>;
+using Rcb = CGAL::Regular_triangulation_cell_base_3<Kernel>;
+using Cb = CGAL::Alpha_shape_cell_base_3<Kernel,Rcb>;
+using Tds = CGAL::Triangulation_data_structure_3<Vb,Cb>;
+using Triangulation_3 = CGAL::Regular_triangulation_3<Kernel,Tds>;
+
+// From file type definition
+using Point_3 = Triangulation_3::Bare_point;
+using Weighted_point_3 = Triangulation_3::Weighted_point;
+#endif // CGAL_VERSION_NR < 1041100000
+
+using Alpha_shape_3 = CGAL::Alpha_shape_3<Triangulation_3>;
+
// filtration with alpha values needed type definition
using Alpha_value_type = Alpha_shape_3::FT;
using Object = CGAL::Object;
-using Dispatch = CGAL::Dispatch_output_iterator<
- CGAL::cpp11::tuple<Object, Alpha_value_type>,
- CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
- std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Dispatch =
+ CGAL::Dispatch_output_iterator<CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator<std::vector<Object> >,
+ std::back_insert_iterator<std::vector<Alpha_value_type> > > >;
using Cell_handle = Alpha_shape_3::Cell_handle;
using Facet = Alpha_shape_3::Facet;
using Edge_3 = Alpha_shape_3::Edge;
@@ -74,63 +98,59 @@ using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = ST::Filtration_value;
using Simplex_tree_vertex = ST::Vertex_handle;
-using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
-using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
-using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<
- ST, Gudhi::persistent_cohomology::Field_Zp >;
-
-void usage(char * const progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph path_to_weight_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
+using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
+using Persistent_cohomology =
+ Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-int main(int argc, char * const argv[]) {
- // program args management
- if (argc != 5) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &weight_file,
+ std::string &output_file_diag, int &coeff_field_characteristic, Filtration_value &min_persistence);
+
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string weight_file;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- int coeff_field_characteristic = atoi(argv[3]);
- Filtration_value min_persistence = strtof(argv[4], nullptr);
+ program_options(argc, argv, off_file_points, weight_file, output_file_diag, coeff_field_characteristic,
+ min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read OFF file " << off_file_points << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
std::vector<Point_3> lp = off_reader.get_point_cloud();
// Read weights information from file
- std::ifstream weights_ifstr(argv[2]);
+ std::ifstream weights_ifstr(weight_file);
std::vector<Weighted_point_3> wp;
if (weights_ifstr.good()) {
double weight = 0.0;
std::size_t index = 0;
+ wp.reserve(lp.size());
// Attempt read the weight in a double format, return false if it fails
while ((weights_ifstr >> weight) && (index < lp.size())) {
wp.push_back(Weighted_point_3(lp[index], weight));
index++;
}
if (index != lp.size()) {
- std::cerr << "Bad number of weights in file " << argv[2] << std::endl;
- usage(argv[0]);
+ std::cerr << "Bad number of weights in file " << weight_file << std::endl;
+ exit(-1);
}
} else {
- std::cerr << "Unable to read file " << argv[2] << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read weights file " << weight_file << std::endl;
+ exit(-1);
}
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
- Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
+ Alpha_shape_3 as(wp.begin(), wp.end(), 0, Alpha_shape_3::GENERAL);
#ifdef DEBUG_TRACES
std::cout << "Alpha shape computed in GENERAL mode" << std::endl;
#endif // DEBUG_TRACES
@@ -161,29 +181,28 @@ int main(int argc, char * const argv[]) {
Filtration_value filtration_max = 0.0;
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
if (dim_max < 3) {
// Cell is of dim 3
dim_max = 3;
}
- } else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
if (dim_max < 2) {
// Facet is of dim 2
dim_max = 2;
}
- } else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
if (dim_max < 1) {
// Edge_3 is of dim 1
dim_max = 1;
}
- } else if (const Alpha_shape_3::Vertex_handle * vertex =
- CGAL::object_cast<Alpha_shape_3::Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
@@ -209,7 +228,7 @@ int main(int argc, char * const argv[]) {
}
}
// Construction of the simplex_tree
- Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator);
+ Filtration_value filtr = /*std::sqrt*/ (*the_alpha_value_iterator);
#ifdef DEBUG_TRACES
std::cout << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
@@ -222,8 +241,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
- simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
std::cout << "vertices \t\t" << count_vertices << std::endl;
@@ -231,12 +248,10 @@ int main(int argc, char * const argv[]) {
std::cout << "facets \t\t" << count_facets << std::endl;
std::cout << "cells \t\t" << count_cells << std::endl;
-
std::cout << "Information of the Simplex Tree: " << std::endl;
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
@@ -257,7 +272,62 @@ int main(int argc, char * const argv[]) {
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &weight_file,
+ std::string &output_file_diag, int &coeff_field_characteristic,
+ Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ")(
+ "weight-file", po::value<std::string>(&weight_file),
+ "Name of file containing a point weights. Format is one weigt per line: W1\n...\nWn ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+ pos.add("weight-file", 2);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file") || !vm.count("weight-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a weighted 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file weight-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
index 8928cfc2..13634ff7 100644
--- a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
@@ -27,8 +27,8 @@
#include <gudhi/Points_3D_off_io.h>
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
-#include <CGAL/Periodic_3_Delaunay_triangulation_traits_3.h>
-#include <CGAL/Periodic_3_Delaunay_triangulation_3.h>
+#include <CGAL/Periodic_3_regular_triangulation_traits_3.h>
+#include <CGAL/Periodic_3_regular_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
#include <CGAL/iterator.h>
@@ -45,28 +45,31 @@
#include "alpha_complex_3d_helper.h"
// Traits
-using K = CGAL::Exact_predicates_inexact_constructions_kernel;
-using PK = CGAL::Periodic_3_Delaunay_triangulation_traits_3<K>;
+using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel;
+using PK = CGAL::Periodic_3_regular_triangulation_traits_3<Kernel>;
+
// Vertex type
using DsVb = CGAL::Periodic_3_triangulation_ds_vertex_base_3<>;
-using Vb = CGAL::Triangulation_vertex_base_3<PK, DsVb>;
-using AsVb = CGAL::Alpha_shape_vertex_base_3<PK, Vb>;
+using Vb = CGAL::Regular_triangulation_vertex_base_3<PK,DsVb>;
+using AsVb = CGAL::Alpha_shape_vertex_base_3<PK,Vb>;
// Cell type
using DsCb = CGAL::Periodic_3_triangulation_ds_cell_base_3<>;
-using Cb = CGAL::Triangulation_cell_base_3<PK, DsCb>;
-using AsCb = CGAL::Alpha_shape_cell_base_3<PK, Cb>;
-using Tds = CGAL::Triangulation_data_structure_3<AsVb, AsCb>;
-using P3DT3 = CGAL::Periodic_3_Delaunay_triangulation_3<PK, Tds>;
-using Alpha_shape_3 = CGAL::Alpha_shape_3<P3DT3>;
-using Point_3 = PK::Point_3;
+using Cb = CGAL::Regular_triangulation_cell_base_3<PK,DsCb>;
+using AsCb = CGAL::Alpha_shape_cell_base_3<PK,Cb>;
+using Tds = CGAL::Triangulation_data_structure_3<AsVb,AsCb>;
+using P3RT3 = CGAL::Periodic_3_regular_triangulation_3<PK,Tds>;
+using Alpha_shape_3 = CGAL::Alpha_shape_3<P3RT3>;
+
+using Point_3 = P3RT3::Bare_point;
+using Weighted_point_3 = P3RT3::Weighted_point;
// filtration with alpha values needed type definition
using Alpha_value_type = Alpha_shape_3::FT;
using Object = CGAL::Object;
-using Dispatch = CGAL::Dispatch_output_iterator<
- CGAL::cpp11::tuple<Object, Alpha_value_type>,
- CGAL::cpp11::tuple<std::back_insert_iterator< std::vector<Object> >,
- std::back_insert_iterator< std::vector<Alpha_value_type> > > >;
+using Dispatch =
+ CGAL::Dispatch_output_iterator<CGAL::cpp11::tuple<Object, Alpha_value_type>,
+ CGAL::cpp11::tuple<std::back_insert_iterator<std::vector<Object> >,
+ std::back_insert_iterator<std::vector<Alpha_value_type> > > >;
using Cell_handle = Alpha_shape_3::Cell_handle;
using Facet = Alpha_shape_3::Facet;
using Edge_3 = Alpha_shape_3::Edge;
@@ -77,27 +80,27 @@ using Vertex_list = std::list<Alpha_shape_3::Vertex_handle>;
using ST = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = ST::Filtration_value;
using Simplex_tree_vertex = ST::Vertex_handle;
-using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex >;
+using Alpha_shape_simplex_tree_map = std::map<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
using Alpha_shape_simplex_tree_pair = std::pair<Alpha_shape_3::Vertex_handle, Simplex_tree_vertex>;
-using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
-using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<
- ST, Gudhi::persistent_cohomology::Field_Zp >;
+using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
+using Persistent_cohomology =
+ Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(char * const progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph path_to_iso_cuboid_3_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+void usage(const std::string& progName) {
+ std::cerr << "Usage: " << progName << " path_to_the_OFF_file path_to_weight_file path_to_the_cuboid_file "
+ "coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
exit(-1);
}
-int main(int argc, char * const argv[]) {
+int main(int argc, char* const argv[]) {
// program args management
- if (argc != 5) {
+ if (argc != 6) {
std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
usage(argv[0]);
}
- int coeff_field_characteristic = atoi(argv[3]);
- Filtration_value min_persistence = strtof(argv[4], nullptr);
+ int coeff_field_characteristic = atoi(argv[4]);
+ Filtration_value min_persistence = strtof(argv[5], nullptr);
// Read points from file
std::string offInputFile(argv[1]);
@@ -109,30 +112,51 @@ int main(int argc, char * const argv[]) {
usage(argv[0]);
}
+ // Retrieve the triangulation
+ std::vector<Point_3> lp = off_reader.get_point_cloud();
+
+ // Read weights information from file
+ std::ifstream weights_ifstr(argv[2]);
+ std::vector<Weighted_point_3> wp;
+ if (weights_ifstr.good()) {
+ double weight = 0.0;
+ std::size_t index = 0;
+ wp.reserve(lp.size());
+ // Attempt read the weight in a double format, return false if it fails
+ while ((weights_ifstr >> weight) && (index < lp.size())) {
+ wp.push_back(Weighted_point_3(lp[index], weight));
+ index++;
+ }
+ if (index != lp.size()) {
+ std::cerr << "Bad number of weights in file " << argv[2] << std::endl;
+ usage(argv[0]);
+ }
+ } else {
+ std::cerr << "Unable to read file " << argv[2] << std::endl;
+ usage(argv[0]);
+ }
+
// Read iso_cuboid_3 information from file
- std::ifstream iso_cuboid_str(argv[2]);
+ std::ifstream iso_cuboid_str(argv[3]);
double x_min, y_min, z_min, x_max, y_max, z_max;
if (iso_cuboid_str.good()) {
iso_cuboid_str >> x_min >> y_min >> z_min >> x_max >> y_max >> z_max;
} else {
- std::cerr << "Unable to read file " << argv[2] << std::endl;
+ std::cerr << "Unable to read file " << argv[3] << std::endl;
usage(argv[0]);
}
- // Retrieve the triangulation
- std::vector<Point_3> lp = off_reader.get_point_cloud();
-
// Define the periodic cube
- P3DT3 pdt(PK::Iso_cuboid_3(x_min, y_min, z_min, x_max, y_max, z_max));
+ P3RT3 prt(PK::Iso_cuboid_3(x_min, y_min, z_min, x_max, y_max, z_max));
// Heuristic for inserting large point sets (if pts is reasonably large)
- pdt.insert(lp.begin(), lp.end(), true);
- // As pdt won't be modified anymore switch to 1-sheeted cover if possible
- if (pdt.is_triangulation_in_1_sheet()) pdt.convert_to_1_sheeted_covering();
+ prt.insert(wp.begin(), wp.end(), true);
+ // As prt won't be modified anymore switch to 1-sheeted cover if possible
+ if (prt.is_triangulation_in_1_sheet()) prt.convert_to_1_sheeted_covering();
std::cout << "Periodic Delaunay computed." << std::endl;
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode. This is the default mode
// Maybe need to set it to GENERAL mode
- Alpha_shape_3 as(pdt, 0, Alpha_shape_3::GENERAL);
+ Alpha_shape_3 as(prt, 0, Alpha_shape_3::GENERAL);
// filtration with alpha values from alpha shape
std::vector<Object> the_objects;
@@ -160,29 +184,28 @@ int main(int argc, char * const argv[]) {
Filtration_value filtration_max = 0.0;
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle * cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
if (dim_max < 3) {
// Cell is of dim 3
dim_max = 3;
}
- } else if (const Facet * facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
if (dim_max < 2) {
// Facet is of dim 2
dim_max = 2;
}
- } else if (const Edge_3 * edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
if (dim_max < 1) {
// Edge_3 is of dim 1
dim_max = 1;
}
- } else if (const Alpha_shape_3::Vertex_handle * vertex =
- CGAL::object_cast<Alpha_shape_3::Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
@@ -208,7 +231,7 @@ int main(int argc, char * const argv[]) {
}
}
// Construction of the simplex_tree
- Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator);
+ Filtration_value filtr = /*std::sqrt*/ (*the_alpha_value_iterator);
#ifdef DEBUG_TRACES
std::cout << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
@@ -221,8 +244,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
- simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
std::cout << "vertices \t\t" << count_vertices << std::endl;
@@ -230,12 +251,10 @@ int main(int argc, char * const argv[]) {
std::cout << "facets \t\t" << count_facets << std::endl;
std::cout << "cells \t\t" << count_cells << std::endl;
-
std::cout << "Information of the Simplex Tree: " << std::endl;
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
index 5963caa3..ee84e201 100644
--- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
+++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
@@ -63,7 +63,7 @@ namespace cubical_complex {
* For further details and theory of cubical complexes, please consult \cite kaczynski2004computational as well as the
* following paper \cite peikert2012topological .
*
- * \section cubicalcomplexdatastructure Data structure.
+ * \section cubicalcomplexdatastructure Data structure
*
* The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in
* \f$\mathbb{R}^n\f$. This extra assumption allows for a memory efficient way of storing cubical complexes in a form
@@ -85,37 +85,14 @@ namespace cubical_complex {
* present in the product that gives the cube \f$C\f$. In a similar way, we can compute boundary and the coboundary of
* each cube. Further details can be found in the literature.
*
- * \section inputformat Input Format.
+ * \section inputformat Input Format
*
* In the current implantation, filtration is given at the maximal cubes, and it is then extended by the lower star
* filtration to all cubes. There are a number of constructors that can be used to construct cubical complex by users
* who want to use the code directly. They can be found in the \a Bitmap_cubical_complex class.
* Currently one input from a text file is used. It uses a format used already in Perseus software
- * (http://www.sas.upenn.edu/~vnanda/perseus/) by Vidit Nanda.
- * Below we are providing a description of the format. The first line contains a number d begin the dimension of the
- * bitmap (2 in the example below). Next d lines are the numbers of top dimensional cubes in each dimensions (3 and 3
- * in the example below). Next, in lexicographical order, the filtration of top dimensional cubes is given (1 4 6 8
- * 20 4 7 6 5 in the example below).
- *
- *
- * \image html "exampleBitmap.png" "Example of a input data."
- *
- * The input file for the following complex is:
- * \verbatim
-2
-3
-3
-1
-4
-6
-8
-20
-4
-7
-6
-5
-\endverbatim
-
+ * (http://www.sas.upenn.edu/~vnanda/perseus/) by Vidit Nanda. The file format is described here: \ref FileFormatsPerseus.
+ *
* \section PeriodicBoundaryConditions Periodic boundary conditions
* Often one would like to impose periodic boundary conditions to the cubical complex. Let \f$ I_1\times ... \times
* I_n \f$ be a box that is decomposed with a cubical complex \f$ \mathcal{K} \f$. Imposing periodic boundary
@@ -123,28 +100,10 @@ namespace cubical_complex {
* considered the same. In particular, if for a bitmap \f$ \mathcal{K} \f$ periodic boundary conditions are imposed
* in all directions, then complex \f$ \mathcal{K} \f$ became n-dimensional torus. One can use various constructors
* from the file Bitmap_cubical_complex_periodic_boundary_conditions_base.h to construct cubical complex with periodic
- * boundary conditions. One can also use Perseus style input files. To indicate periodic boundary conditions in a
- * given direction, then number of top dimensional cells in this direction have to be multiplied by -1. For instance:
-
- *\verbatim
-2
--3
-3
-1
-4
-6
-8
-20
-4
-7
-6
-5
-\endverbatim
-
- * Indicate that we have imposed periodic boundary conditions in the direction x, but not in the direction y.
-
+ * boundary conditions. One can also use Perseus style input files (see \ref FileFormatsPerseus).
+ *
* \section BitmapExamples Examples
- * End user programs are available in example/Bitmap_cubical_complex folder.
+ * End user programs are available in example/Bitmap_cubical_complex and utilities/Bitmap_cubical_complex folders.
*
* \copyright GNU General Public License v3.
*/
diff --git a/src/Bitmap_cubical_complex/example/CMakeLists.txt b/src/Bitmap_cubical_complex/example/CMakeLists.txt
index a0401619..99304aa4 100644
--- a/src/Bitmap_cubical_complex/example/CMakeLists.txt
+++ b/src/Bitmap_cubical_complex/example/CMakeLists.txt
@@ -1,17 +1,6 @@
cmake_minimum_required(VERSION 2.6)
project(Bitmap_cubical_complex_examples)
-add_executable ( Bitmap_cubical_complex Bitmap_cubical_complex.cpp )
-if (TBB_FOUND)
- target_link_libraries(Bitmap_cubical_complex ${TBB_LIBRARIES})
-endif()
-
-add_test(NAME Bitmap_cubical_complex_example_persistence_one_sphere COMMAND $<TARGET_FILE:Bitmap_cubical_complex>
- "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalOneSphere.txt")
-
-add_test(NAME Bitmap_cubical_complex_example_persistence_two_sphere COMMAND $<TARGET_FILE:Bitmap_cubical_complex>
- "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt")
-
add_executable ( Random_bitmap_cubical_complex Random_bitmap_cubical_complex.cpp )
if (TBB_FOUND)
target_link_libraries(Random_bitmap_cubical_complex ${TBB_LIBRARIES})
@@ -19,19 +8,4 @@ endif()
add_test(NAME Bitmap_cubical_complex_example_random COMMAND $<TARGET_FILE:Random_bitmap_cubical_complex>
"2" "100" "100")
-add_executable ( Bitmap_cubical_complex_periodic_boundary_conditions Bitmap_cubical_complex_periodic_boundary_conditions.cpp )
-if (TBB_FOUND)
- target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${TBB_LIBRARIES})
-endif()
-
-add_test(NAME Bitmap_cubical_complex_example_periodic_boundary_conditions_2d_torus
- COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
- "${CMAKE_SOURCE_DIR}/data/bitmap/2d_torus.txt")
-
-add_test(NAME Bitmap_cubical_complex_example_periodic_boundary_conditions_3d_torus
- COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
- "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt")
-
-install(TARGETS Bitmap_cubical_complex DESTINATION bin)
install(TARGETS Random_bitmap_cubical_complex DESTINATION bin)
-install(TARGETS Bitmap_cubical_complex_periodic_boundary_conditions DESTINATION bin)
diff --git a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp
index 16ad65a0..f70558f2 100644
--- a/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp
+++ b/src/Bitmap_cubical_complex/example/Random_bitmap_cubical_complex.cpp
@@ -20,7 +20,6 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-
// for persistence algorithm
#include <gudhi/reader_utils.h>
#include <gudhi/Bitmap_cubical_complex.h>
@@ -34,10 +33,11 @@
int main(int argc, char** argv) {
srand(time(0));
- std::cout << "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical " <<
- "complexes. The first parameter of the program is the dimension D of the bitmap. The next D parameters are " <<
- "number of top dimensional cubes in each dimension of the bitmap. The program will create random cubical " <<
- "complex of that sizes and compute persistent homology of it." << std::endl;
+ std::cout
+ << "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical "
+ << "complexes. The first parameter of the program is the dimension D of the bitmap. The next D parameters are "
+ << "number of top dimensional cubes in each dimension of the bitmap. The program will create random cubical "
+ << "complex of that sizes and compute persistent homology of it." << std::endl;
int p = 2;
double min_persistence = 0;
@@ -47,16 +47,16 @@ int main(int argc, char** argv) {
return 1;
}
- size_t dimensionOfBitmap = (size_t) atoi(argv[1]);
- std::vector< unsigned > sizes;
+ size_t dimensionOfBitmap = (size_t)atoi(argv[1]);
+ std::vector<unsigned> sizes;
size_t multipliers = 1;
for (size_t dim = 0; dim != dimensionOfBitmap; ++dim) {
- unsigned sizeInThisDimension = (unsigned) atoi(argv[2 + dim]);
+ unsigned sizeInThisDimension = (unsigned)atoi(argv[2 + dim]);
sizes.push_back(sizeInThisDimension);
multipliers *= sizeInThisDimension;
}
- std::vector< double > data;
+ std::vector<double> data;
for (size_t i = 0; i != multipliers; ++i) {
data.push_back(rand() / static_cast<double>(RAND_MAX));
}
@@ -80,4 +80,3 @@ int main(int argc, char** argv) {
return 0;
}
-
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
index f395de65..969daba6 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
@@ -31,10 +31,11 @@
#endif
#include <limits>
-#include <utility> // for pair<>
+#include <utility> // for pair<>
#include <algorithm> // for sort
#include <vector>
#include <numeric> // for iota
+#include <cstddef>
namespace Gudhi {
@@ -43,7 +44,8 @@ namespace cubical_complex {
// global variable, was used just for debugging.
const bool globalDbg = false;
-template <typename T> class is_before_in_filtration;
+template <typename T>
+class is_before_in_filtration;
/**
* @brief Cubical complex represented as a bitmap.
@@ -60,11 +62,10 @@ class Bitmap_cubical_complex : public T {
//*********************************************//
// Typedefs and typenames
//*********************************************//
- typedef size_t Simplex_key;
+ typedef std::size_t Simplex_key;
typedef typename T::filtration_type Filtration_value;
typedef Simplex_key Simplex_handle;
-
//*********************************************//
// Constructors
//*********************************************//
@@ -77,12 +78,12 @@ class Bitmap_cubical_complex : public T {
/**
* Constructor form a Perseus-style file.
**/
- Bitmap_cubical_complex(const char* perseus_style_file) :
- T(perseus_style_file), key_associated_to_simplex(this->total_number_of_cells + 1) {
+ Bitmap_cubical_complex(const char* perseus_style_file)
+ : T(perseus_style_file), key_associated_to_simplex(this->total_number_of_cells + 1) {
if (globalDbg) {
std::cerr << "Bitmap_cubical_complex( const char* perseus_style_file )\n";
}
- for (size_t i = 0; i != this->total_number_of_cells; ++i) {
+ for (std::size_t i = 0; i != this->total_number_of_cells; ++i) {
this->key_associated_to_simplex[i] = i;
}
// we initialize this only once, in each constructor, when the bitmap is constructed.
@@ -97,10 +98,9 @@ class Bitmap_cubical_complex : public T {
* with filtration on top dimensional cells.
**/
Bitmap_cubical_complex(const std::vector<unsigned>& dimensions,
- const std::vector<Filtration_value>& top_dimensional_cells) :
- T(dimensions, top_dimensional_cells),
- key_associated_to_simplex(this->total_number_of_cells + 1) {
- for (size_t i = 0; i != this->total_number_of_cells; ++i) {
+ const std::vector<Filtration_value>& top_dimensional_cells)
+ : T(dimensions, top_dimensional_cells), key_associated_to_simplex(this->total_number_of_cells + 1) {
+ for (std::size_t i = 0; i != this->total_number_of_cells; ++i) {
this->key_associated_to_simplex[i] = i;
}
// we initialize this only once, in each constructor, when the bitmap is constructed.
@@ -118,10 +118,10 @@ class Bitmap_cubical_complex : public T {
**/
Bitmap_cubical_complex(const std::vector<unsigned>& dimensions,
const std::vector<Filtration_value>& top_dimensional_cells,
- std::vector< bool > directions_in_which_periodic_b_cond_are_to_be_imposed) :
- T(dimensions, top_dimensional_cells, directions_in_which_periodic_b_cond_are_to_be_imposed),
- key_associated_to_simplex(this->total_number_of_cells + 1) {
- for (size_t i = 0; i != this->total_number_of_cells; ++i) {
+ std::vector<bool> directions_in_which_periodic_b_cond_are_to_be_imposed)
+ : T(dimensions, top_dimensional_cells, directions_in_which_periodic_b_cond_are_to_be_imposed),
+ key_associated_to_simplex(this->total_number_of_cells + 1) {
+ for (std::size_t i = 0; i != this->total_number_of_cells; ++i) {
this->key_associated_to_simplex[i] = i;
}
// we initialize this only once, in each constructor, when the bitmap is constructed.
@@ -142,9 +142,7 @@ class Bitmap_cubical_complex : public T {
/**
* Returns number of all cubes in the complex.
**/
- size_t num_simplices()const {
- return this->total_number_of_cells;
- }
+ std::size_t num_simplices() const { return this->total_number_of_cells; }
/**
* Returns a Simplex_handle to a cube that do not exist in this complex.
@@ -159,14 +157,12 @@ class Bitmap_cubical_complex : public T {
/**
* Returns dimension of the complex.
**/
- inline size_t dimension()const {
- return this->sizes.size();
- }
+ inline std::size_t dimension() const { return this->sizes.size(); }
/**
* Return dimension of a cell pointed by the Simplex_handle.
**/
- inline unsigned dimension(Simplex_handle sh)const {
+ inline unsigned dimension(Simplex_handle sh) const {
if (globalDbg) {
std::cerr << "unsigned dimension(const Simplex_handle& sh)\n";
}
@@ -199,7 +195,7 @@ class Bitmap_cubical_complex : public T {
/**
* Return the key of a cube pointed by the Simplex_handle.
**/
- Simplex_key key(Simplex_handle sh)const {
+ Simplex_key key(Simplex_handle sh) const {
if (globalDbg) {
std::cerr << "Simplex_key key(const Simplex_handle& sh)\n";
}
@@ -217,7 +213,7 @@ class Bitmap_cubical_complex : public T {
std::cerr << "Simplex_handle simplex(Simplex_key key)\n";
}
if (key != null_key()) {
- return this->simplex_associated_to_key[ key ];
+ return this->simplex_associated_to_key[key];
}
return null_simplex();
}
@@ -246,8 +242,8 @@ class Bitmap_cubical_complex : public T {
/**
* Boundary_simplex_range class provides ranges for boundary iterators.
**/
- typedef typename std::vector< Simplex_handle >::iterator Boundary_simplex_iterator;
- typedef typename std::vector< Simplex_handle > Boundary_simplex_range;
+ typedef typename std::vector<Simplex_handle>::iterator Boundary_simplex_iterator;
+ typedef typename std::vector<Simplex_handle> Boundary_simplex_range;
/**
* Filtration_simplex_iterator class provides an iterator though the whole structure in the order of filtration.
@@ -257,13 +253,13 @@ class Bitmap_cubical_complex : public T {
**/
class Filtration_simplex_range;
- class Filtration_simplex_iterator : std::iterator< std::input_iterator_tag, Simplex_handle > {
+ class Filtration_simplex_iterator : std::iterator<std::input_iterator_tag, Simplex_handle> {
// Iterator over all simplices of the complex in the order of the indexing scheme.
// 'value_type' must be 'Simplex_handle'.
public:
- Filtration_simplex_iterator(Bitmap_cubical_complex* b) : b(b), position(0) { }
+ Filtration_simplex_iterator(Bitmap_cubical_complex* b) : b(b), position(0) {}
- Filtration_simplex_iterator() : b(NULL), position(0) { }
+ Filtration_simplex_iterator() : b(NULL), position(0) {}
Filtration_simplex_iterator operator++() {
if (globalDbg) {
@@ -288,14 +284,14 @@ class Bitmap_cubical_complex : public T {
return (*this);
}
- bool operator==(const Filtration_simplex_iterator& rhs)const {
+ bool operator==(const Filtration_simplex_iterator& rhs) const {
if (globalDbg) {
std::cerr << "bool operator == ( const Filtration_simplex_iterator& rhs )\n";
}
- return ( this->position == rhs.position);
+ return (this->position == rhs.position);
}
- bool operator!=(const Filtration_simplex_iterator& rhs)const {
+ bool operator!=(const Filtration_simplex_iterator& rhs) const {
if (globalDbg) {
std::cerr << "bool operator != ( const Filtration_simplex_iterator& rhs )\n";
}
@@ -306,14 +302,14 @@ class Bitmap_cubical_complex : public T {
if (globalDbg) {
std::cerr << "Simplex_handle operator*()\n";
}
- return this->b->simplex_associated_to_key[ this->position ];
+ return this->b->simplex_associated_to_key[this->position];
}
friend class Filtration_simplex_range;
private:
Bitmap_cubical_complex<T>* b;
- size_t position;
+ std::size_t position;
};
/**
@@ -326,7 +322,7 @@ class Bitmap_cubical_complex : public T {
typedef Filtration_simplex_iterator const_iterator;
typedef Filtration_simplex_iterator iterator;
- Filtration_simplex_range(Bitmap_cubical_complex<T>* b) : b(b) { }
+ Filtration_simplex_range(Bitmap_cubical_complex<T>* b) : b(b) {}
Filtration_simplex_iterator begin() {
if (globalDbg) {
@@ -348,8 +344,6 @@ class Bitmap_cubical_complex : public T {
Bitmap_cubical_complex<T>* b;
};
-
-
//*********************************************//
// Methods to access iterators from the container:
@@ -357,9 +351,7 @@ class Bitmap_cubical_complex : public T {
* boundary_simplex_range creates an object of a Boundary_simplex_range class
* that provides ranges for the Boundary_simplex_iterator.
**/
- Boundary_simplex_range boundary_simplex_range(Simplex_handle sh) {
- return this->get_boundary_of_a_cell(sh);
- }
+ Boundary_simplex_range boundary_simplex_range(Simplex_handle sh) { return this->get_boundary_of_a_cell(sh); }
/**
* filtration_simplex_range creates an object of a Filtration_simplex_range class
@@ -374,8 +366,6 @@ class Bitmap_cubical_complex : public T {
}
//*********************************************//
-
-
//*********************************************//
// Elements which are in Gudhi now, but I (and in all the cases I asked also Marc) do not understand why they are
// there.
@@ -390,43 +380,41 @@ class Bitmap_cubical_complex : public T {
* Function needed for compatibility with Gudhi. Not useful for other purposes.
**/
std::pair<Simplex_handle, Simplex_handle> endpoints(Simplex_handle sh) {
- std::vector< size_t > bdry = this->get_boundary_of_a_cell(sh);
+ std::vector<std::size_t> bdry = this->get_boundary_of_a_cell(sh);
if (globalDbg) {
std::cerr << "std::pair<Simplex_handle, Simplex_handle> endpoints( Simplex_handle sh )\n";
std::cerr << "bdry.size() : " << bdry.size() << std::endl;
}
// this method returns two first elements from the boundary of sh.
if (bdry.size() < 2)
- throw("Error in endpoints in Bitmap_cubical_complex class. The cell have less than two elements in the "
- "boundary.");
+ throw(
+ "Error in endpoints in Bitmap_cubical_complex class. The cell have less than two elements in the "
+ "boundary.");
return std::make_pair(bdry[0], bdry[1]);
}
-
/**
* Class needed for compatibility with Gudhi. Not useful for other purposes.
**/
class Skeleton_simplex_range;
- class Skeleton_simplex_iterator : std::iterator< std::input_iterator_tag, Simplex_handle > {
+ class Skeleton_simplex_iterator : std::iterator<std::input_iterator_tag, Simplex_handle> {
// Iterator over all simplices of the complex in the order of the indexing scheme.
// 'value_type' must be 'Simplex_handle'.
public:
- Skeleton_simplex_iterator(Bitmap_cubical_complex* b, size_t d) : b(b), dimension(d) {
+ Skeleton_simplex_iterator(Bitmap_cubical_complex* b, std::size_t d) : b(b), dimension(d) {
if (globalDbg) {
- std::cerr << "Skeleton_simplex_iterator ( Bitmap_cubical_complex* b , size_t d )\n";
+ std::cerr << "Skeleton_simplex_iterator ( Bitmap_cubical_complex* b , std::size_t d )\n";
}
// find the position of the first simplex of a dimension d
this->position = 0;
- while (
- (this->position != b->data.size()) &&
- (this->b->get_dimension_of_a_cell(this->position) != this->dimension)
- ) {
+ while ((this->position != b->data.size()) &&
+ (this->b->get_dimension_of_a_cell(this->position) != this->dimension)) {
++this->position;
}
}
- Skeleton_simplex_iterator() : b(NULL), position(0), dimension(0) { }
+ Skeleton_simplex_iterator() : b(NULL), position(0), dimension(0) {}
Skeleton_simplex_iterator operator++() {
if (globalDbg) {
@@ -434,10 +422,8 @@ class Bitmap_cubical_complex : public T {
}
// increment the position as long as you did not get to the next element of the dimension dimension.
++this->position;
- while (
- (this->position != this->b->data.size()) &&
- (this->b->get_dimension_of_a_cell(this->position) != this->dimension)
- ) {
+ while ((this->position != this->b->data.size()) &&
+ (this->b->get_dimension_of_a_cell(this->position) != this->dimension)) {
++this->position;
}
return (*this);
@@ -459,14 +445,14 @@ class Bitmap_cubical_complex : public T {
return (*this);
}
- bool operator==(const Skeleton_simplex_iterator& rhs)const {
+ bool operator==(const Skeleton_simplex_iterator& rhs) const {
if (globalDbg) {
std::cerr << "bool operator ==\n";
}
- return ( this->position == rhs.position);
+ return (this->position == rhs.position);
}
- bool operator!=(const Skeleton_simplex_iterator& rhs)const {
+ bool operator!=(const Skeleton_simplex_iterator& rhs) const {
if (globalDbg) {
std::cerr << "bool operator != ( const Skeleton_simplex_iterator& rhs )\n";
}
@@ -481,9 +467,10 @@ class Bitmap_cubical_complex : public T {
}
friend class Skeleton_simplex_range;
+
private:
Bitmap_cubical_complex<T>* b;
- size_t position;
+ std::size_t position;
unsigned dimension;
};
@@ -497,7 +484,7 @@ class Bitmap_cubical_complex : public T {
typedef Skeleton_simplex_iterator const_iterator;
typedef Skeleton_simplex_iterator iterator;
- Skeleton_simplex_range(Bitmap_cubical_complex<T>* b, unsigned dimension) : b(b), dimension(dimension) { }
+ Skeleton_simplex_range(Bitmap_cubical_complex<T>* b, unsigned dimension) : b(b), dimension(dimension) {}
Skeleton_simplex_iterator begin() {
if (globalDbg) {
@@ -533,8 +520,8 @@ class Bitmap_cubical_complex : public T {
friend class is_before_in_filtration<T>;
protected:
- std::vector< size_t > key_associated_to_simplex;
- std::vector< size_t > simplex_associated_to_key;
+ std::vector<std::size_t> key_associated_to_simplex;
+ std::vector<std::size_t> simplex_associated_to_key;
}; // Bitmap_cubical_complex
template <typename T>
@@ -542,7 +529,7 @@ void Bitmap_cubical_complex<T>::initialize_simplex_associated_to_key() {
if (globalDbg) {
std::cerr << "void Bitmap_cubical_complex<T>::initialize_elements_ordered_according_to_filtration() \n";
}
- this->simplex_associated_to_key = std::vector<size_t>(this->data.size());
+ this->simplex_associated_to_key = std::vector<std::size_t>(this->data.size());
std::iota(std::begin(simplex_associated_to_key), std::end(simplex_associated_to_key), 0);
#ifdef GUDHI_USE_TBB
tbb::parallel_sort(simplex_associated_to_key.begin(), simplex_associated_to_key.end(),
@@ -552,16 +539,15 @@ void Bitmap_cubical_complex<T>::initialize_simplex_associated_to_key() {
#endif
// we still need to deal here with a key_associated_to_simplex:
- for ( size_t i = 0 ; i != simplex_associated_to_key.size() ; ++i ) {
- this->key_associated_to_simplex[ simplex_associated_to_key[i] ] = i;
+ for (std::size_t i = 0; i != simplex_associated_to_key.size(); ++i) {
+ this->key_associated_to_simplex[simplex_associated_to_key[i]] = i;
}
}
template <typename T>
class is_before_in_filtration {
public:
- explicit is_before_in_filtration(Bitmap_cubical_complex<T> * CC)
- : CC_(CC) { }
+ explicit is_before_in_filtration(Bitmap_cubical_complex<T>* CC) : CC_(CC) {}
bool operator()(const typename Bitmap_cubical_complex<T>::Simplex_handle& sh1,
const typename Bitmap_cubical_complex<T>::Simplex_handle& sh2) const {
@@ -573,8 +559,8 @@ class is_before_in_filtration {
return fil1 < fil2;
}
// in this case they are on the same filtration level, so the dimension decide.
- size_t dim1 = CC_->get_dimension_of_a_cell(sh1);
- size_t dim2 = CC_->get_dimension_of_a_cell(sh2);
+ std::size_t dim1 = CC_->get_dimension_of_a_cell(sh1);
+ std::size_t dim2 = CC_->get_dimension_of_a_cell(sh2);
if (dim1 != dim2) {
return dim1 < dim2;
}
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h
index 4b072f10..705b68a0 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h
@@ -25,6 +25,7 @@
#include <iostream>
#include <vector>
+#include <cstddef>
namespace Gudhi {
@@ -63,14 +64,14 @@ class counter {
* If the value of the function is false, that means, that the counter have reached its end-value.
**/
bool increment() {
- size_t i = 0;
+ std::size_t i = 0;
while ((i != this->end.size()) && (this->current[i] == this->end[i])) {
++i;
}
if (i == this->end.size())return false;
++this->current[i];
- for (size_t j = 0; j != i; ++j) {
+ for (std::size_t j = 0; j != i; ++j) {
this->current[j] = this->begin[j];
}
return true;
@@ -80,7 +81,7 @@ class counter {
* Function to check if we are at the end of counter.
**/
bool isFinal() {
- for (size_t i = 0; i != this->current.size(); ++i) {
+ for (std::size_t i = 0; i != this->current.size(); ++i) {
if (this->current[i] == this->end[i])return true;
}
return false;
@@ -93,7 +94,7 @@ class counter {
**/
std::vector< unsigned > find_opposite(const std::vector< bool >& directionsForPeriodicBCond) {
std::vector< unsigned > result;
- for (size_t i = 0; i != this->current.size(); ++i) {
+ for (std::size_t i = 0; i != this->current.size(); ++i) {
if ((this->current[i] == this->end[i]) && (directionsForPeriodicBCond[i] == true)) {
result.push_back(this->begin[i]);
} else {
@@ -108,7 +109,7 @@ class counter {
**/
std::vector< bool > directions_of_finals() {
std::vector< bool > result;
- for (size_t i = 0; i != this->current.size(); ++i) {
+ for (std::size_t i = 0; i != this->current.size(); ++i) {
if (this->current[i] == this->end[i]) {
result.push_back(true);
} else {
@@ -123,7 +124,7 @@ class counter {
**/
friend std::ostream& operator<<(std::ostream& out, const counter& c) {
// std::cerr << "c.current.size() : " << c.current.size() << endl;
- for (size_t i = 0; i != c.current.size(); ++i) {
+ for (std::size_t i = 0; i != c.current.size(); ++i) {
out << c.current[i] << " ";
}
return out;
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
index 0442ac34..bf257be1 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
@@ -32,7 +32,9 @@
#include <algorithm>
#include <iterator>
#include <limits>
-#include <utility> // for pair<>
+#include <utility>
+#include <stdexcept>
+#include <cstddef>
namespace Gudhi {
@@ -65,8 +67,7 @@ class Bitmap_cubical_complex_base {
/**
*Default constructor
**/
- Bitmap_cubical_complex_base() :
- total_number_of_cells(0) { }
+ Bitmap_cubical_complex_base() : total_number_of_cells(0) {}
/**
* There are a few constructors of a Bitmap_cubical_complex_base class.
* First one, that takes vector<unsigned>, creates an empty bitmap of a dimension equal
@@ -90,7 +91,7 @@ class Bitmap_cubical_complex_base {
/**
* Destructor of the Bitmap_cubical_complex_base class.
**/
- virtual ~Bitmap_cubical_complex_base() { }
+ virtual ~Bitmap_cubical_complex_base() {}
/**
* The functions get_boundary_of_a_cell, get_coboundary_of_a_cell, get_dimension_of_a_cell
@@ -100,8 +101,10 @@ class Bitmap_cubical_complex_base {
* non-negative integer, indicating a position of a cube in the data structure.
* In the case of functions that compute (co)boundary, the output is a vector if non-negative integers pointing to
* the positions of (co)boundary element of the input cell.
+ * The boundary elements are guaranteed to be returned so that the
+ * incidence coefficients of boundary elements are alternating.
*/
- virtual inline std::vector< size_t > get_boundary_of_a_cell(size_t cell)const;
+ virtual inline std::vector<std::size_t> get_boundary_of_a_cell(std::size_t cell) const;
/**
* The functions get_coboundary_of_a_cell, get_coboundary_of_a_cell,
* get_dimension_of_a_cell and get_cell_data are the basic
@@ -112,21 +115,81 @@ class Bitmap_cubical_complex_base {
* In the case of functions that compute (co)boundary, the output is a vector if
* non-negative integers pointing to the
* positions of (co)boundary element of the input cell.
+ * Note that unlike in the case of boundary, over here the elements are
+ * not guaranteed to be returned with alternating incidence numbers.
+ *
**/
- virtual inline std::vector< size_t > get_coboundary_of_a_cell(size_t cell)const;
+ virtual inline std::vector<std::size_t> get_coboundary_of_a_cell(std::size_t cell) const;
+
/**
- * In the case of get_dimension_of_a_cell function, the output is a non-negative integer
- * indicating the dimension of a cell.
- **/
- inline unsigned get_dimension_of_a_cell(size_t cell)const;
+ * This procedure compute incidence numbers between cubes. For a cube \f$A\f$ of
+ * dimension n and a cube \f$B \subset A\f$ of dimension n-1, an incidence
+ * between \f$A\f$ and \f$B\f$ is the integer with which \f$B\f$ appears in the boundary of \f$A\f$.
+ * Note that first parameter is a cube of dimension n,
+ * and the second parameter is an adjusted cube in dimension n-1.
+ * Given \f$A = [b_1,e_1] \times \ldots \ [b_{j-1},e_{j-1}] \times [b_{j},e_{j}] \times [b_{j+1},e_{j+1}] \times \ldots
+ *\times [b_{n},e_{n}] \f$
+ * such that \f$ b_{j} \neq e_{j} \f$
+ * and \f$B = [b_1,e_1] \times \ldots \ [b_{j-1},e_{j-1}] \times [a,a] \times [b_{j+1},e_{j+1}] \times \ldots \times
+ *[b_{n},e_{n}] \f$
+ * where \f$ a = b_{j}\f$ or \f$ a = e_{j}\f$, the incidence between \f$A\f$ and \f$B\f$
+ * computed by this procedure is given by formula:
+ * \f$ c\ (-1)^{\sum_{i=1}^{j-1} dim [b_{i},e_{i}]} \f$
+ * Where \f$ dim [b_{i},e_{i}] = 0 \f$ if \f$ b_{i}=e_{i} \f$ and 1 in other case.
+ * c is -1 if \f$ a = b_{j}\f$ and 1 if \f$ a = e_{j}\f$.
+ * @exception std::logic_error In case when the cube \f$B\f$ is not n-1
+ * dimensional face of a cube \f$A\f$.
+ **/
+ virtual int compute_incidence_between_cells(std::size_t coface, std::size_t face) const {
+ // first get the counters for coface and face:
+ std::vector<unsigned> coface_counter = this->compute_counter_for_given_cell(coface);
+ std::vector<unsigned> face_counter = this->compute_counter_for_given_cell(face);
+
+ // coface_counter and face_counter should agree at all positions except from one:
+ int number_of_position_in_which_counters_do_not_agree = -1;
+ std::size_t number_of_full_faces_that_comes_before = 0;
+ for (std::size_t i = 0; i != coface_counter.size(); ++i) {
+ if ((coface_counter[i] % 2 == 1) && (number_of_position_in_which_counters_do_not_agree == -1)) {
+ ++number_of_full_faces_that_comes_before;
+ }
+ if (coface_counter[i] != face_counter[i]) {
+ if (number_of_position_in_which_counters_do_not_agree != -1) {
+ std::cout << "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.\n";
+ throw std::logic_error(
+ "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.");
+ }
+ number_of_position_in_which_counters_do_not_agree = i;
+ }
+ }
+
+ int incidence = 1;
+ if (number_of_full_faces_that_comes_before % 2) incidence = -1;
+ // if the face cell is on the right from coface cell:
+ if (coface_counter[number_of_position_in_which_counters_do_not_agree] + 1 ==
+ face_counter[number_of_position_in_which_counters_do_not_agree]) {
+ incidence *= -1;
+ }
+
+ return incidence;
+ }
+
+ /**
+* In the case of get_dimension_of_a_cell function, the output is a non-negative integer
+* indicating the dimension of a cell.
+* Note that unlike in the case of boundary, over here the elements are
+* not guaranteed to be returned with alternating incidence numbers.
+* To compute incidence between cells use compute_incidence_between_cells
+* procedure
+**/
+ inline unsigned get_dimension_of_a_cell(std::size_t cell) const;
+
/**
* In the case of get_cell_data, the output parameter is a reference to the value of a cube in a given position.
* This allows reading and changing the value of filtration. Note that if the value of a filtration is changed, the
* code do not check if we have a filtration or not. i.e. it do not check if the value of a filtration of a cell is
* not smaller than the value of a filtration of its boundary and not greater than the value of its coboundary.
**/
- inline T& get_cell_data(size_t cell);
-
+ inline T& get_cell_data(std::size_t cell);
/**
* Typical input used to construct a baseBitmap class is a filtration given at the top dimensional cells.
@@ -141,33 +204,29 @@ class Bitmap_cubical_complex_base {
/**
* Returns dimension of a complex.
**/
- inline unsigned dimension()const {
- return sizes.size();
- }
+ inline unsigned dimension() const { return sizes.size(); }
/**
* Returns number of all cubes in the data structure.
**/
- inline unsigned size()const {
- return this->data.size();
- }
+ inline unsigned size() const { return this->data.size(); }
/**
* Writing to stream operator. By using it we get the values T of cells in order in which they are stored in the
* structure. This procedure is used for debugging purposes.
**/
template <typename K>
- friend std::ostream& operator<<(std::ostream & os, const Bitmap_cubical_complex_base<K>& b);
+ friend std::ostream& operator<<(std::ostream& os, const Bitmap_cubical_complex_base<K>& b);
/**
* Function that put the input data to bins. By putting data to bins we mean rounding them to a sequence of values
* equally distributed in the range of data.
* Sometimes if most of the cells have different birth-death times, the performance of the algorithms to compute
* persistence gets worst. When dealing with this type of data, one may want to put different values on cells to
- * some number of bins. The function put_data_to_bins( size_t number_of_bins ) is designed for that purpose.
+ * some number of bins. The function put_data_to_bins( std::size_t number_of_bins ) is designed for that purpose.
* The parameter of the function is the number of bins (distinct values) we want to have in the cubical complex.
**/
- void put_data_to_bins(size_t number_of_bins);
+ void put_data_to_bins(std::size_t number_of_bins);
/**
* Function that put the input data to bins. By putting data to bins we mean rounding them to a sequence of values
@@ -184,7 +243,7 @@ class Bitmap_cubical_complex_base {
/**
* Functions to find min and max values of filtration.
**/
- std::pair< T, T > min_max_filtration();
+ std::pair<T, T> min_max_filtration();
// ITERATORS
@@ -192,11 +251,9 @@ class Bitmap_cubical_complex_base {
* @brief Iterator through all cells in the complex (in order they appear in the structure -- i.e.
* in lexicographical order).
**/
- class All_cells_iterator : std::iterator< std::input_iterator_tag, T > {
+ class All_cells_iterator : std::iterator<std::input_iterator_tag, T> {
public:
- All_cells_iterator() {
- this->counter = 0;
- }
+ All_cells_iterator() { this->counter = 0; }
All_cells_iterator operator++() {
// first find first element of the counter that can be increased:
@@ -215,14 +272,12 @@ class Bitmap_cubical_complex_base {
return *this;
}
- bool operator==(const All_cells_iterator& rhs)const {
- if (this->counter != rhs.counter)return false;
+ bool operator==(const All_cells_iterator& rhs) const {
+ if (this->counter != rhs.counter) return false;
return true;
}
- bool operator!=(const All_cells_iterator& rhs)const {
- return !(*this == rhs);
- }
+ bool operator!=(const All_cells_iterator& rhs) const { return !(*this == rhs); }
/*
* The operator * returns position of a cube in the structure of cubical complex. This position can be then used as
@@ -231,12 +286,11 @@ class Bitmap_cubical_complex_base {
* boundary and coboundary and dimension
* and in function get_cell_data to get a filtration of a cell.
*/
- size_t operator*() {
- return this->counter;
- }
+ std::size_t operator*() { return this->counter; }
friend class Bitmap_cubical_complex_base;
+
protected:
- size_t counter;
+ std::size_t counter;
};
/**
@@ -261,71 +315,61 @@ class Bitmap_cubical_complex_base {
**/
class All_cells_range {
public:
- All_cells_range(Bitmap_cubical_complex_base* b) : b(b) { }
+ All_cells_range(Bitmap_cubical_complex_base* b) : b(b) {}
- All_cells_iterator begin() {
- return b->all_cells_iterator_begin();
- }
+ All_cells_iterator begin() { return b->all_cells_iterator_begin(); }
+
+ All_cells_iterator end() { return b->all_cells_iterator_end(); }
- All_cells_iterator end() {
- return b->all_cells_iterator_end();
- }
private:
Bitmap_cubical_complex_base<T>* b;
};
- All_cells_range all_cells_range() {
- return All_cells_range(this);
- }
-
+ All_cells_range all_cells_range() { return All_cells_range(this); }
/**
* Boundary_range class provides ranges for boundary iterators.
**/
- typedef typename std::vector< size_t >::const_iterator Boundary_iterator;
- typedef typename std::vector< size_t > Boundary_range;
+ typedef typename std::vector<std::size_t>::const_iterator Boundary_iterator;
+ typedef typename std::vector<std::size_t> Boundary_range;
/**
* boundary_simplex_range creates an object of a Boundary_simplex_range class
* that provides ranges for the Boundary_simplex_iterator.
**/
- Boundary_range boundary_range(size_t sh) {
- return this->get_boundary_of_a_cell(sh);
- }
+ Boundary_range boundary_range(std::size_t sh) { return this->get_boundary_of_a_cell(sh); }
/**
* Coboundary_range class provides ranges for boundary iterators.
**/
- typedef typename std::vector< size_t >::const_iterator Coboundary_iterator;
- typedef typename std::vector< size_t > Coboundary_range;
+ typedef typename std::vector<std::size_t>::const_iterator Coboundary_iterator;
+ typedef typename std::vector<std::size_t> Coboundary_range;
/**
* boundary_simplex_range creates an object of a Boundary_simplex_range class
* that provides ranges for the Boundary_simplex_iterator.
**/
- Coboundary_range coboundary_range(size_t sh) {
- return this->get_coboundary_of_a_cell(sh);
- }
+ Coboundary_range coboundary_range(std::size_t sh) { return this->get_coboundary_of_a_cell(sh); }
/**
* @brief Iterator through top dimensional cells of the complex. The cells appear in order they are stored
* in the structure (i.e. in lexicographical order)
**/
- class Top_dimensional_cells_iterator : std::iterator< std::input_iterator_tag, T > {
+ class Top_dimensional_cells_iterator : std::iterator<std::input_iterator_tag, T> {
public:
Top_dimensional_cells_iterator(Bitmap_cubical_complex_base& b) : b(b) {
- this->counter = std::vector<size_t>(b.dimension());
+ this->counter = std::vector<std::size_t>(b.dimension());
// std::fill( this->counter.begin() , this->counter.end() , 0 );
}
Top_dimensional_cells_iterator operator++() {
// first find first element of the counter that can be increased:
- size_t dim = 0;
- while ((dim != this->b.dimension()) && (this->counter[dim] == this->b.sizes[dim] - 1))++dim;
+ std::size_t dim = 0;
+ while ((dim != this->b.dimension()) && (this->counter[dim] == this->b.sizes[dim] - 1)) ++dim;
if (dim != this->b.dimension()) {
++this->counter[dim];
- for (size_t i = 0; i != dim; ++i) {
+ for (std::size_t i = 0; i != dim; ++i) {
this->counter[i] = 0;
}
} else {
@@ -346,18 +390,16 @@ class Bitmap_cubical_complex_base {
return *this;
}
- bool operator==(const Top_dimensional_cells_iterator& rhs)const {
- if (&this->b != &rhs.b)return false;
- if (this->counter.size() != rhs.counter.size())return false;
- for (size_t i = 0; i != this->counter.size(); ++i) {
- if (this->counter[i] != rhs.counter[i])return false;
+ bool operator==(const Top_dimensional_cells_iterator& rhs) const {
+ if (&this->b != &rhs.b) return false;
+ if (this->counter.size() != rhs.counter.size()) return false;
+ for (std::size_t i = 0; i != this->counter.size(); ++i) {
+ if (this->counter[i] != rhs.counter[i]) return false;
}
return true;
}
- bool operator!=(const Top_dimensional_cells_iterator& rhs)const {
- return !(*this == rhs);
- }
+ bool operator!=(const Top_dimensional_cells_iterator& rhs) const { return !(*this == rhs); }
/*
* The operator * returns position of a cube in the structure of cubical complex. This position can be then used as
@@ -366,26 +408,25 @@ class Bitmap_cubical_complex_base {
* boundary and coboundary and dimension
* and in function get_cell_data to get a filtration of a cell.
*/
- size_t operator*() {
- return this->compute_index_in_bitmap();
- }
+ std::size_t operator*() { return this->compute_index_in_bitmap(); }
- size_t compute_index_in_bitmap()const {
- size_t index = 0;
- for (size_t i = 0; i != this->counter.size(); ++i) {
+ std::size_t compute_index_in_bitmap() const {
+ std::size_t index = 0;
+ for (std::size_t i = 0; i != this->counter.size(); ++i) {
index += (2 * this->counter[i] + 1) * this->b.multipliers[i];
}
return index;
}
- void print_counter()const {
- for (size_t i = 0; i != this->counter.size(); ++i) {
+ void print_counter() const {
+ for (std::size_t i = 0; i != this->counter.size(); ++i) {
std::cout << this->counter[i] << " ";
}
}
friend class Bitmap_cubical_complex_base;
+
protected:
- std::vector< size_t > counter;
+ std::vector<std::size_t> counter;
Bitmap_cubical_complex_base& b;
};
@@ -402,7 +443,7 @@ class Bitmap_cubical_complex_base {
**/
Top_dimensional_cells_iterator top_dimensional_cells_iterator_end() {
Top_dimensional_cells_iterator a(*this);
- for (size_t i = 0; i != this->dimension(); ++i) {
+ for (std::size_t i = 0; i != this->dimension(); ++i) {
a.counter[i] = this->sizes[i] - 1;
}
a.counter[0]++;
@@ -414,32 +455,24 @@ class Bitmap_cubical_complex_base {
**/
class Top_dimensional_cells_range {
public:
- Top_dimensional_cells_range(Bitmap_cubical_complex_base* b) : b(b) { }
+ Top_dimensional_cells_range(Bitmap_cubical_complex_base* b) : b(b) {}
- Top_dimensional_cells_iterator begin() {
- return b->top_dimensional_cells_iterator_begin();
- }
+ Top_dimensional_cells_iterator begin() { return b->top_dimensional_cells_iterator_begin(); }
+
+ Top_dimensional_cells_iterator end() { return b->top_dimensional_cells_iterator_end(); }
- Top_dimensional_cells_iterator end() {
- return b->top_dimensional_cells_iterator_end();
- }
private:
Bitmap_cubical_complex_base<T>* b;
};
- Top_dimensional_cells_range top_dimensional_cells_range() {
- return Top_dimensional_cells_range(this);
- }
-
+ Top_dimensional_cells_range top_dimensional_cells_range() { return Top_dimensional_cells_range(this); }
//****************************************************************************************************************//
//****************************************************************************************************************//
//****************************************************************************************************************//
//****************************************************************************************************************//
- inline size_t number_cells()const {
- return this->total_number_of_cells;
- }
+ inline std::size_t number_cells() const { return this->total_number_of_cells; }
//****************************************************************************************************************//
//****************************************************************************************************************//
@@ -450,11 +483,11 @@ class Bitmap_cubical_complex_base {
std::vector<unsigned> sizes;
std::vector<unsigned> multipliers;
std::vector<T> data;
- size_t total_number_of_cells;
+ std::size_t total_number_of_cells;
void set_up_containers(const std::vector<unsigned>& sizes) {
unsigned multiplier = 1;
- for (size_t i = 0; i != sizes.size(); ++i) {
+ for (std::size_t i = 0; i != sizes.size(); ++i) {
this->sizes.push_back(sizes[i]);
this->multipliers.push_back(multiplier);
multiplier *= 2 * sizes[i] + 1;
@@ -463,18 +496,18 @@ class Bitmap_cubical_complex_base {
this->total_number_of_cells = multiplier;
}
- size_t compute_position_in_bitmap(const std::vector< unsigned >& counter) {
- size_t position = 0;
- for (size_t i = 0; i != this->multipliers.size(); ++i) {
+ std::size_t compute_position_in_bitmap(const std::vector<unsigned>& counter) {
+ std::size_t position = 0;
+ for (std::size_t i = 0; i != this->multipliers.size(); ++i) {
position += this->multipliers[i] * counter[i];
}
return position;
}
- std::vector<unsigned> compute_counter_for_given_cell(size_t cell)const {
+ std::vector<unsigned> compute_counter_for_given_cell(std::size_t cell) const {
std::vector<unsigned> counter;
counter.reserve(this->sizes.size());
- for (size_t dim = this->sizes.size(); dim != 0; --dim) {
+ for (std::size_t dim = this->sizes.size(); dim != 0; --dim) {
counter.push_back(cell / this->multipliers[dim - 1]);
cell = cell % this->multipliers[dim - 1];
}
@@ -486,96 +519,94 @@ class Bitmap_cubical_complex_base {
const std::vector<T>& top_dimensional_cells);
Bitmap_cubical_complex_base(const char* perseus_style_file, std::vector<bool> directions);
Bitmap_cubical_complex_base(const std::vector<unsigned>& sizes, std::vector<bool> directions);
- Bitmap_cubical_complex_base(const std::vector<unsigned>& dimensions,
- const std::vector<T>& top_dimensional_cells,
+ Bitmap_cubical_complex_base(const std::vector<unsigned>& dimensions, const std::vector<T>& top_dimensional_cells,
std::vector<bool> directions);
};
template <typename T>
-void Bitmap_cubical_complex_base<T>::put_data_to_bins(size_t number_of_bins) {
- bool bdg = false;
+void Bitmap_cubical_complex_base<T>::put_data_to_bins(std::size_t number_of_bins) {
+ bool dbg = false;
- std::pair< T, T > min_max = this->min_max_filtration();
- T dx = (min_max.second - min_max.first) / (T) number_of_bins;
+ std::pair<T, T> min_max = this->min_max_filtration();
+ T dx = (min_max.second - min_max.first) / (T)number_of_bins;
// now put the data into the appropriate bins:
- for (size_t i = 0; i != this->data.size(); ++i) {
- if (bdg) {
+ for (std::size_t i = 0; i != this->data.size(); ++i) {
+ if (dbg) {
std::cerr << "Before binning : " << this->data[i] << std::endl;
}
this->data[i] = min_max.first + dx * (this->data[i] - min_max.first) / number_of_bins;
- if (bdg) {
+ if (dbg) {
std::cerr << "After binning : " << this->data[i] << std::endl;
- getchar();
}
}
}
template <typename T>
void Bitmap_cubical_complex_base<T>::put_data_to_bins(T diameter_of_bin) {
- bool bdg = false;
- std::pair< T, T > min_max = this->min_max_filtration();
+ bool dbg = false;
+ std::pair<T, T> min_max = this->min_max_filtration();
- size_t number_of_bins = (min_max.second - min_max.first) / diameter_of_bin;
+ std::size_t number_of_bins = (min_max.second - min_max.first) / diameter_of_bin;
// now put the data into the appropriate bins:
- for (size_t i = 0; i != this->data.size(); ++i) {
- if (bdg) {
+ for (std::size_t i = 0; i != this->data.size(); ++i) {
+ if (dbg) {
std::cerr << "Before binning : " << this->data[i] << std::endl;
}
this->data[i] = min_max.first + diameter_of_bin * (this->data[i] - min_max.first) / number_of_bins;
- if (bdg) {
+ if (dbg) {
std::cerr << "After binning : " << this->data[i] << std::endl;
- getchar();
}
}
}
template <typename T>
-std::pair< T, T > Bitmap_cubical_complex_base<T>::min_max_filtration() {
- std::pair< T, T > min_max(std::numeric_limits<T>::max(), std::numeric_limits<T>::min());
- for (size_t i = 0; i != this->data.size(); ++i) {
- if (this->data[i] < min_max.first)min_max.first = this->data[i];
- if (this->data[i] > min_max.second)min_max.second = this->data[i];
+std::pair<T, T> Bitmap_cubical_complex_base<T>::min_max_filtration() {
+ std::pair<T, T> min_max(std::numeric_limits<T>::max(), std::numeric_limits<T>::min());
+ for (std::size_t i = 0; i != this->data.size(); ++i) {
+ if (this->data[i] < min_max.first) min_max.first = this->data[i];
+ if (this->data[i] > min_max.second) min_max.second = this->data[i];
}
return min_max;
}
template <typename K>
-std::ostream& operator<<(std::ostream & out, const Bitmap_cubical_complex_base<K>& b) {
- for (typename Bitmap_cubical_complex_base<K>::all_cells_const_iterator
- it = b.all_cells_const_begin(); it != b.all_cells_const_end(); ++it) {
+std::ostream& operator<<(std::ostream& out, const Bitmap_cubical_complex_base<K>& b) {
+ for (typename Bitmap_cubical_complex_base<K>::all_cells_const_iterator it = b.all_cells_const_begin();
+ it != b.all_cells_const_end(); ++it) {
out << *it << " ";
}
return out;
}
template <typename T>
-Bitmap_cubical_complex_base<T>::Bitmap_cubical_complex_base
-(const std::vector<unsigned>& sizes) {
+Bitmap_cubical_complex_base<T>::Bitmap_cubical_complex_base(const std::vector<unsigned>& sizes) {
this->set_up_containers(sizes);
}
template <typename T>
-void Bitmap_cubical_complex_base<T>::setup_bitmap_based_on_top_dimensional_cells_list(const std::vector<unsigned>& sizes_in_following_directions,
- const std::vector<T>& top_dimensional_cells) {
+void Bitmap_cubical_complex_base<T>::setup_bitmap_based_on_top_dimensional_cells_list(
+ const std::vector<unsigned>& sizes_in_following_directions, const std::vector<T>& top_dimensional_cells) {
this->set_up_containers(sizes_in_following_directions);
- size_t number_of_top_dimensional_elements = 1;
- for (size_t i = 0; i != sizes_in_following_directions.size(); ++i) {
+ std::size_t number_of_top_dimensional_elements = 1;
+ for (std::size_t i = 0; i != sizes_in_following_directions.size(); ++i) {
number_of_top_dimensional_elements *= sizes_in_following_directions[i];
}
if (number_of_top_dimensional_elements != top_dimensional_cells.size()) {
- std::cerr << "Error in constructor Bitmap_cubical_complex_base ( std::vector<size_t> sizes_in_following_directions"
- << ", std::vector<T> top_dimensional_cells ). Number of top dimensional elements that follow from "
- << "sizes_in_following_directions vector is different than the size of top_dimensional_cells vector."
- << std::endl;
- throw("Error in constructor Bitmap_cubical_complex_base( std::vector<size_t> sizes_in_following_directions,"
- "std::vector<T> top_dimensional_cells ). Number of top dimensional elements that follow from "
- "sizes_in_following_directions vector is different than the size of top_dimensional_cells vector.");
+ std::cerr << "Error in constructor Bitmap_cubical_complex_base ( std::vector<std::size_t> "
+ << "sizes_in_following_directions, std::vector<T> top_dimensional_cells ). Number of top dimensional "
+ << "elements that follow from sizes_in_following_directions vector is different than the size of "
+ << "top_dimensional_cells vector."
+ << std::endl;
+ throw(
+ "Error in constructor Bitmap_cubical_complex_base( std::vector<std::size_t> sizes_in_following_directions,"
+ "std::vector<T> top_dimensional_cells ). Number of top dimensional elements that follow from "
+ "sizes_in_following_directions vector is different than the size of top_dimensional_cells vector.");
}
Bitmap_cubical_complex_base<T>::Top_dimensional_cells_iterator it(*this);
- size_t index = 0;
+ std::size_t index = 0;
for (it = this->top_dimensional_cells_iterator_begin(); it != this->top_dimensional_cells_iterator_end(); ++it) {
this->get_cell_data(*it) = top_dimensional_cells[index];
++index;
@@ -584,8 +615,8 @@ void Bitmap_cubical_complex_base<T>::setup_bitmap_based_on_top_dimensional_cells
}
template <typename T>
-Bitmap_cubical_complex_base<T>::Bitmap_cubical_complex_base
-(const std::vector<unsigned>& sizes_in_following_directions, const std::vector<T>& top_dimensional_cells) {
+Bitmap_cubical_complex_base<T>::Bitmap_cubical_complex_base(const std::vector<unsigned>& sizes_in_following_directions,
+ const std::vector<T>& top_dimensional_cells) {
this->setup_bitmap_based_on_top_dimensional_cells_list(sizes_in_following_directions, top_dimensional_cells);
}
@@ -599,15 +630,17 @@ void Bitmap_cubical_complex_base<T>::read_perseus_style_file(const char* perseus
if (dbg) {
std::cerr << "dimensionOfData : " << dimensionOfData << std::endl;
- getchar();
}
std::vector<unsigned> sizes;
sizes.reserve(dimensionOfData);
- for (size_t i = 0; i != dimensionOfData; ++i) {
+ // all dimensions multiplied
+ std::size_t dimensions = 1;
+ for (std::size_t i = 0; i != dimensionOfData; ++i) {
unsigned size_in_this_dimension;
inFiltration >> size_in_this_dimension;
sizes.push_back(size_in_this_dimension);
+ dimensions *= size_in_this_dimension;
if (dbg) {
std::cerr << "size_in_this_dimension : " << size_in_this_dimension << std::endl;
}
@@ -617,19 +650,20 @@ void Bitmap_cubical_complex_base<T>::read_perseus_style_file(const char* perseus
Bitmap_cubical_complex_base<T>::Top_dimensional_cells_iterator it(*this);
it = this->top_dimensional_cells_iterator_begin();
- while (!inFiltration.eof()) {
- T filtrationLevel;
- inFiltration >> filtrationLevel;
+ T filtrationLevel;
+ for (std::size_t i = 0; i < dimensions; ++i) {
+ if (!(inFiltration >> filtrationLevel) || (inFiltration.eof())) {
+ throw std::ios_base::failure("Bad Perseus file format.");
+ }
if (dbg) {
- std::cerr << "Cell of an index : "
- << it.compute_index_in_bitmap()
- << " and dimension: "
- << this->get_dimension_of_a_cell(it.compute_index_in_bitmap())
- << " get the value : " << filtrationLevel << std::endl;
+ std::cerr << "Cell of an index : " << it.compute_index_in_bitmap()
+ << " and dimension: " << this->get_dimension_of_a_cell(it.compute_index_in_bitmap())
+ << " get the value : " << filtrationLevel << std::endl;
}
this->get_cell_data(*it) = filtrationLevel;
++it;
}
+
inFiltration.close();
this->impose_lower_star_filtration();
}
@@ -668,37 +702,44 @@ Bitmap_cubical_complex_base<T>::Bitmap_cubical_complex_base(const char* perseus_
}
template <typename T>
-std::vector< size_t > Bitmap_cubical_complex_base<T>::get_boundary_of_a_cell(size_t cell)const {
- std::vector< size_t > boundary_elements;
+std::vector<std::size_t> Bitmap_cubical_complex_base<T>::get_boundary_of_a_cell(std::size_t cell) const {
+ std::vector<std::size_t> boundary_elements;
// Speed traded of for memory. Check if it is better in practice.
- boundary_elements.reserve(this->dimension()*2);
+ boundary_elements.reserve(this->dimension() * 2);
- size_t cell1 = cell;
- for (size_t i = this->multipliers.size(); i != 0; --i) {
+ std::size_t sum_of_dimensions = 0;
+ std::size_t cell1 = cell;
+ for (std::size_t i = this->multipliers.size(); i != 0; --i) {
unsigned position = cell1 / this->multipliers[i - 1];
if (position % 2 == 1) {
- boundary_elements.push_back(cell - this->multipliers[ i - 1 ]);
- boundary_elements.push_back(cell + this->multipliers[ i - 1 ]);
+ if (sum_of_dimensions % 2) {
+ boundary_elements.push_back(cell + this->multipliers[i - 1]);
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ } else {
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ boundary_elements.push_back(cell + this->multipliers[i - 1]);
+ }
+ ++sum_of_dimensions;
}
cell1 = cell1 % this->multipliers[i - 1];
}
+
return boundary_elements;
}
template <typename T>
-std::vector< size_t > Bitmap_cubical_complex_base<T>::get_coboundary_of_a_cell(size_t cell)const {
+std::vector<std::size_t> Bitmap_cubical_complex_base<T>::get_coboundary_of_a_cell(std::size_t cell) const {
std::vector<unsigned> counter = this->compute_counter_for_given_cell(cell);
- std::vector< size_t > coboundary_elements;
- size_t cell1 = cell;
- for (size_t i = this->multipliers.size(); i != 0; --i) {
+ std::vector<std::size_t> coboundary_elements;
+ std::size_t cell1 = cell;
+ for (std::size_t i = this->multipliers.size(); i != 0; --i) {
unsigned position = cell1 / this->multipliers[i - 1];
if (position % 2 == 0) {
if ((cell > this->multipliers[i - 1]) && (counter[i - 1] != 0)) {
coboundary_elements.push_back(cell - this->multipliers[i - 1]);
}
- if (
- (cell + this->multipliers[i - 1] < this->data.size()) && (counter[i - 1] != 2 * this->sizes[i - 1])) {
+ if ((cell + this->multipliers[i - 1] < this->data.size()) && (counter[i - 1] != 2 * this->sizes[i - 1])) {
coboundary_elements.push_back(cell + this->multipliers[i - 1]);
}
}
@@ -708,11 +749,11 @@ std::vector< size_t > Bitmap_cubical_complex_base<T>::get_coboundary_of_a_cell(s
}
template <typename T>
-unsigned Bitmap_cubical_complex_base<T>::get_dimension_of_a_cell(size_t cell)const {
+unsigned Bitmap_cubical_complex_base<T>::get_dimension_of_a_cell(std::size_t cell) const {
bool dbg = false;
if (dbg) std::cerr << "\n\n\n Computing position o a cell of an index : " << cell << std::endl;
unsigned dimension = 0;
- for (size_t i = this->multipliers.size(); i != 0; --i) {
+ for (std::size_t i = this->multipliers.size(); i != 0; --i) {
unsigned position = cell / this->multipliers[i - 1];
if (dbg) {
@@ -720,7 +761,6 @@ unsigned Bitmap_cubical_complex_base<T>::get_dimension_of_a_cell(size_t cell)con
std::cerr << "cell : " << cell << std::endl;
std::cerr << "position : " << position << std::endl;
std::cerr << "multipliers[" << i - 1 << "] = " << this->multipliers[i - 1] << std::endl;
- getchar();
}
if (position % 2 == 1) {
@@ -733,7 +773,7 @@ unsigned Bitmap_cubical_complex_base<T>::get_dimension_of_a_cell(size_t cell)con
}
template <typename T>
-inline T& Bitmap_cubical_complex_base<T>::get_cell_data(size_t cell) {
+inline T& Bitmap_cubical_complex_base<T>::get_cell_data(std::size_t cell) {
return this->data[cell];
}
@@ -744,12 +784,12 @@ void Bitmap_cubical_complex_base<T>::impose_lower_star_filtration() {
// this vector will be used to check which elements have already been taken care of in imposing lower star filtration
std::vector<bool> is_this_cell_considered(this->data.size(), false);
- size_t size_to_reserve = 1;
- for (size_t i = 0; i != this->multipliers.size(); ++i) {
- size_to_reserve *= (size_t) ((this->multipliers[i] - 1) / 2);
+ std::size_t size_to_reserve = 1;
+ for (std::size_t i = 0; i != this->multipliers.size(); ++i) {
+ size_to_reserve *= (std::size_t)((this->multipliers[i] - 1) / 2);
}
- std::vector<size_t> indices_to_consider;
+ std::vector<std::size_t> indices_to_consider;
indices_to_consider.reserve(size_to_reserve);
// we assume here that we already have a filtration on the top dimensional cells and
// we have to extend it to lower ones.
@@ -761,32 +801,29 @@ void Bitmap_cubical_complex_base<T>::impose_lower_star_filtration() {
while (indices_to_consider.size()) {
if (dbg) {
std::cerr << "indices_to_consider in this iteration \n";
- for (size_t i = 0; i != indices_to_consider.size(); ++i) {
+ for (std::size_t i = 0; i != indices_to_consider.size(); ++i) {
std::cout << indices_to_consider[i] << " ";
}
- getchar();
}
- std::vector<size_t> new_indices_to_consider;
- for (size_t i = 0; i != indices_to_consider.size(); ++i) {
- std::vector<size_t> bd = this->get_boundary_of_a_cell(indices_to_consider[i]);
- for (size_t boundaryIt = 0; boundaryIt != bd.size(); ++boundaryIt) {
+ std::vector<std::size_t> new_indices_to_consider;
+ for (std::size_t i = 0; i != indices_to_consider.size(); ++i) {
+ std::vector<std::size_t> bd = this->get_boundary_of_a_cell(indices_to_consider[i]);
+ for (std::size_t boundaryIt = 0; boundaryIt != bd.size(); ++boundaryIt) {
if (dbg) {
- std::cerr << "filtration of a cell : " << bd[boundaryIt] << " is : " << this->data[ bd[boundaryIt] ]
- << " while of a cell: " << indices_to_consider[i] << " is: " << this->data[ indices_to_consider[i] ]
- << std::endl;
- getchar();
+ std::cerr << "filtration of a cell : " << bd[boundaryIt] << " is : " << this->data[bd[boundaryIt]]
+ << " while of a cell: " << indices_to_consider[i] << " is: " << this->data[indices_to_consider[i]]
+ << std::endl;
}
- if (this->data[ bd[boundaryIt] ] > this->data[ indices_to_consider[i] ]) {
- this->data[ bd[boundaryIt] ] = this->data[ indices_to_consider[i] ];
+ if (this->data[bd[boundaryIt]] > this->data[indices_to_consider[i]]) {
+ this->data[bd[boundaryIt]] = this->data[indices_to_consider[i]];
if (dbg) {
- std::cerr << "Setting the value of a cell : " << bd[boundaryIt] << " to : "
- << this->data[ indices_to_consider[i] ] << std::endl;
- getchar();
+ std::cerr << "Setting the value of a cell : " << bd[boundaryIt]
+ << " to : " << this->data[indices_to_consider[i]] << std::endl;
}
}
- if (is_this_cell_considered[ bd[boundaryIt] ] == false) {
+ if (is_this_cell_considered[bd[boundaryIt]] == false) {
new_indices_to_consider.push_back(bd[boundaryIt]);
- is_this_cell_considered[ bd[boundaryIt] ] = true;
+ is_this_cell_considered[bd[boundaryIt]] = true;
}
}
}
@@ -795,8 +832,8 @@ void Bitmap_cubical_complex_base<T>::impose_lower_star_filtration() {
}
template <typename T>
-bool compareFirstElementsOfTuples(const std::pair< std::pair< T, size_t >, char >& first,
- const std::pair< std::pair< T, size_t >, char >& second) {
+bool compareFirstElementsOfTuples(const std::pair<std::pair<T, std::size_t>, char>& first,
+ const std::pair<std::pair<T, std::size_t>, char>& second) {
if (first.first.first < second.first.first) {
return true;
} else {
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
index c3cc93dd..4a0d1c74 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
@@ -28,6 +28,8 @@
#include <cmath>
#include <limits> // for numeric_limits<>
#include <vector>
+#include <stdexcept>
+#include <cstddef>
namespace Gudhi {
@@ -41,7 +43,8 @@ namespace cubical_complex {
/**
* @brief Cubical complex with periodic boundary conditions represented as a bitmap.
* @ingroup cubical_complex
- * @details This is a class implementing a bitmap data structure with periodic boundary conditions. Most of the functions are
+ * @details This is a class implementing a bitmap data structure with periodic boundary conditions. Most of the
+ * functions are
* identical to the functions from Bitmap_cubical_complex_base.
* The ones that needed to be updated are the constructors and get_boundary_of_a_cell and get_coboundary_of_a_cell.
*/
@@ -53,7 +56,7 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
/**
* Default constructor of Bitmap_cubical_complex_periodic_boundary_conditions_base class.
*/
- Bitmap_cubical_complex_periodic_boundary_conditions_base() { }
+ Bitmap_cubical_complex_periodic_boundary_conditions_base() {}
/**
* A constructor of Bitmap_cubical_complex_periodic_boundary_conditions_base class that takes the following
* parameters: (1) vector with numbers of top dimensional cells in all dimensions and (2) vector of booleans. If
@@ -61,8 +64,9 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
* imposed in this direction. In case of false, the periodic boundary conditions will not be imposed in the direction
* i.
*/
- Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& sizes,
- const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed);
+ Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const std::vector<unsigned>& sizes,
+ const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed);
/**
* A constructor of Bitmap_cubical_complex_periodic_boundary_conditions_base class that takes the name of Perseus
* style file as an input. Please consult the documentation about the specification of the file.
@@ -75,9 +79,9 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
* value, that means that periodic boundary conditions are to be imposed in this direction. In case of false, the
* periodic boundary conditions will not be imposed in the direction i.
*/
- Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& dimensions,
- const std::vector<T>& topDimensionalCells,
- const std::vector< bool >& directions_in_which_periodic_b_cond_are_to_be_imposed);
+ Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const std::vector<unsigned>& dimensions, const std::vector<T>& topDimensionalCells,
+ const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed);
/**
* Destructor of the Bitmap_cubical_complex_periodic_boundary_conditions_base class.
@@ -88,21 +92,81 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
/**
* A version of a function that return boundary of a given cell for an object of
* Bitmap_cubical_complex_periodic_boundary_conditions_base class.
+ * The boundary elements are guaranteed to be returned so that the
+ * incidence coefficients are alternating.
*/
- virtual std::vector< size_t > get_boundary_of_a_cell(size_t cell) const;
+ virtual std::vector<std::size_t> get_boundary_of_a_cell(std::size_t cell) const;
/**
* A version of a function that return coboundary of a given cell for an object of
* Bitmap_cubical_complex_periodic_boundary_conditions_base class.
+ * Note that unlike in the case of boundary, over here the elements are
+ * not guaranteed to be returned with alternating incidence numbers.
+ * To compute incidence between cells use compute_incidence_between_cells
+ * procedure
*/
- virtual std::vector< size_t > get_coboundary_of_a_cell(size_t cell) const;
+ virtual std::vector<std::size_t> get_coboundary_of_a_cell(std::size_t cell) const;
+
+ /**
+ * This procedure compute incidence numbers between cubes. For a cube \f$A\f$ of
+ * dimension n and a cube \f$B \subset A\f$ of dimension n-1, an incidence
+ * between \f$A\f$ and \f$B\f$ is the integer with which \f$B\f$ appears in the boundary of \f$A\f$.
+ * Note that first parameter is a cube of dimension n,
+ * and the second parameter is an adjusted cube in dimension n-1.
+ * Given \f$A = [b_1,e_1] \times \ldots \ [b_{j-1},e_{j-1}] \times [b_{j},e_{j}] \times [b_{j+1},e_{j+1}] \times \ldots
+ *\times [b_{n},e_{n}] \f$
+ * such that \f$ b_{j} \neq e_{j} \f$
+ * and \f$B = [b_1,e_1] \times \ldots \ [b_{j-1},e_{j-1}] \times [a,a] \times [b_{j+1},e_{j+1}] \times \ldots \times
+ *[b_{n},e_{n}]s \f$
+ * where \f$ a = b_{j}\f$ or \f$ a = e_{j}\f$, the incidence between \f$A\f$ and \f$B\f$
+ * computed by this procedure is given by formula:
+ * \f$ c\ (-1)^{\sum_{i=1}^{j-1} dim [b_{i},e_{i}]} \f$
+ * Where \f$ dim [b_{i},e_{i}] = 0 \f$ if \f$ b_{i}=e_{i} \f$ and 1 in other case.
+ * c is -1 if \f$ a = b_{j}\f$ and 1 if \f$ a = e_{j}\f$.
+ * @exception std::logic_error In case when the cube \f$B\f$ is not n-1
+ * dimensional face of a cube \f$A\f$.
+ **/
+ virtual int compute_incidence_between_cells(std::size_t coface, std::size_t face) {
+ // first get the counters for coface and face:
+ std::vector<unsigned> coface_counter = this->compute_counter_for_given_cell(coface);
+ std::vector<unsigned> face_counter = this->compute_counter_for_given_cell(face);
+
+ // coface_counter and face_counter should agree at all positions except from one:
+ int number_of_position_in_which_counters_do_not_agree = -1;
+ std::size_t number_of_full_faces_that_comes_before = 0;
+ for (std::size_t i = 0; i != coface_counter.size(); ++i) {
+ if ((coface_counter[i] % 2 == 1) && (number_of_position_in_which_counters_do_not_agree == -1)) {
+ ++number_of_full_faces_that_comes_before;
+ }
+ if (coface_counter[i] != face_counter[i]) {
+ if (number_of_position_in_which_counters_do_not_agree != -1) {
+ std::cout << "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.\n";
+ throw std::logic_error(
+ "Cells given to compute_incidence_between_cells procedure do not form a pair of coface-face.");
+ }
+ number_of_position_in_which_counters_do_not_agree = i;
+ }
+ }
+
+ int incidence = 1;
+ if (number_of_full_faces_that_comes_before % 2) incidence = -1;
+ // if the face cell is on the right from coface cell:
+ if ((coface_counter[number_of_position_in_which_counters_do_not_agree] + 1 ==
+ face_counter[number_of_position_in_which_counters_do_not_agree]) ||
+ ((coface_counter[number_of_position_in_which_counters_do_not_agree] != 1) &&
+ (face_counter[number_of_position_in_which_counters_do_not_agree] == 0))) {
+ incidence *= -1;
+ }
+
+ return incidence;
+ }
protected:
- std::vector< bool > directions_in_which_periodic_b_cond_are_to_be_imposed;
+ std::vector<bool> directions_in_which_periodic_b_cond_are_to_be_imposed;
void set_up_containers(const std::vector<unsigned>& sizes) {
unsigned multiplier = 1;
- for (size_t i = 0; i != sizes.size(); ++i) {
+ for (std::size_t i = 0; i != sizes.size(); ++i) {
this->sizes.push_back(sizes[i]);
this->multipliers.push_back(multiplier);
@@ -119,19 +183,23 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& sizes);
Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& dimensions,
const std::vector<T>& topDimensionalCells);
- void construct_complex_based_on_top_dimensional_cells(const std::vector<unsigned>& dimensions,
- const std::vector<T>& topDimensionalCells,
- const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed);
+
+ /**
+ * A procedure used to construct the data structures in the class.
+ **/
+ void construct_complex_based_on_top_dimensional_cells(
+ const std::vector<unsigned>& dimensions, const std::vector<T>& topDimensionalCells,
+ const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed);
};
template <typename T>
-void Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::construct_complex_based_on_top_dimensional_cells(const std::vector<unsigned>& dimensions,
- const std::vector<T>& topDimensionalCells,
- const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed) {
+void Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::construct_complex_based_on_top_dimensional_cells(
+ const std::vector<unsigned>& dimensions, const std::vector<T>& topDimensionalCells,
+ const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed) {
this->directions_in_which_periodic_b_cond_are_to_be_imposed = directions_in_which_periodic_b_cond_are_to_be_imposed;
this->set_up_containers(dimensions);
- size_t i = 0;
+ std::size_t i = 0;
for (auto it = this->top_dimensional_cells_iterator_begin(); it != this->top_dimensional_cells_iterator_end(); ++it) {
this->get_cell_data(*it) = topDimensionalCells[i];
++i;
@@ -140,14 +208,16 @@ void Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::construct_comp
}
template <typename T>
-Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& sizes,
- const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed) {
+Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const std::vector<unsigned>& sizes,
+ const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed) {
this->directions_in_which_periodic_b_cond_are_to_be_imposed(directions_in_which_periodic_b_cond_are_to_be_imposed);
this->set_up_containers(sizes);
}
template <typename T>
-Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(const char* perseus_style_file) {
+Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const char* perseus_style_file) {
// for Perseus style files:
bool dbg = false;
@@ -160,7 +230,7 @@ Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_comp
std::vector<unsigned> sizes;
sizes.reserve(dimensionOfData);
- for (size_t i = 0; i != dimensionOfData; ++i) {
+ for (std::size_t i = 0; i != dimensionOfData; ++i) {
int size_in_this_dimension;
inFiltration >> size_in_this_dimension;
if (size_in_this_dimension < 0) {
@@ -176,14 +246,12 @@ Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_comp
while (!inFiltration.eof()) {
double filtrationLevel;
inFiltration >> filtrationLevel;
- if (inFiltration.eof())break;
+ if (inFiltration.eof()) break;
if (dbg) {
- std::cerr << "Cell of an index : "
- << it.compute_index_in_bitmap()
- << " and dimension: "
- << this->get_dimension_of_a_cell(it.compute_index_in_bitmap())
- << " get the value : " << filtrationLevel << std::endl;
+ std::cerr << "Cell of an index : " << it.compute_index_in_bitmap()
+ << " and dimension: " << this->get_dimension_of_a_cell(it.compute_index_in_bitmap())
+ << " get the value : " << filtrationLevel << std::endl;
}
this->get_cell_data(*it) = filtrationLevel;
++it;
@@ -193,24 +261,24 @@ Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_comp
}
template <typename T>
-Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& sizes) {
+Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const std::vector<unsigned>& sizes) {
this->directions_in_which_periodic_b_cond_are_to_be_imposed = std::vector<bool>(sizes.size(), false);
this->set_up_containers(sizes);
}
template <typename T>
-Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& dimensions,
- const std::vector<T>& topDimensionalCells) {
+Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const std::vector<unsigned>& dimensions, const std::vector<T>& topDimensionalCells) {
std::vector<bool> directions_in_which_periodic_b_cond_are_to_be_imposed = std::vector<bool>(dimensions.size(), false);
this->construct_complex_based_on_top_dimensional_cells(dimensions, topDimensionalCells,
directions_in_which_periodic_b_cond_are_to_be_imposed);
}
template <typename T>
-Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::
-Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& dimensions,
- const std::vector<T>& topDimensionalCells,
- const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed) {
+Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::Bitmap_cubical_complex_periodic_boundary_conditions_base(
+ const std::vector<unsigned>& dimensions, const std::vector<T>& topDimensionalCells,
+ const std::vector<bool>& directions_in_which_periodic_b_cond_are_to_be_imposed) {
this->construct_complex_based_on_top_dimensional_cells(dimensions, topDimensionalCells,
directions_in_which_periodic_b_cond_are_to_be_imposed);
}
@@ -218,46 +286,65 @@ Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsig
// ***********************Methods************************ //
template <typename T>
-std::vector< size_t > Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::get_boundary_of_a_cell(size_t cell) const {
+std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::get_boundary_of_a_cell(
+ std::size_t cell) const {
bool dbg = false;
if (dbg) {
std::cerr << "Computations of boundary of a cell : " << cell << std::endl;
}
- std::vector< size_t > boundary_elements;
- size_t cell1 = cell;
- for (size_t i = this->multipliers.size(); i != 0; --i) {
+ std::vector<std::size_t> boundary_elements;
+ boundary_elements.reserve(this->dimension() * 2);
+ std::size_t cell1 = cell;
+ std::size_t sum_of_dimensions = 0;
+
+ for (std::size_t i = this->multipliers.size(); i != 0; --i) {
unsigned position = cell1 / this->multipliers[i - 1];
// this cell have a nonzero length in this direction, therefore we can compute its boundary in this direction.
-
if (position % 2 == 1) {
// if there are no periodic boundary conditions in this direction, we do not have to do anything.
if (!directions_in_which_periodic_b_cond_are_to_be_imposed[i - 1]) {
// std::cerr << "A\n";
- boundary_elements.push_back(cell - this->multipliers[ i - 1 ]);
- boundary_elements.push_back(cell + this->multipliers[ i - 1 ]);
+ if (sum_of_dimensions % 2) {
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ boundary_elements.push_back(cell + this->multipliers[i - 1]);
+ } else {
+ boundary_elements.push_back(cell + this->multipliers[i - 1]);
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ }
if (dbg) {
- std::cerr << cell - this->multipliers[ i - 1 ] << " " << cell + this->multipliers[ i - 1 ] << " ";
+ std::cerr << cell - this->multipliers[i - 1] << " " << cell + this->multipliers[i - 1] << " ";
}
} else {
// in this direction we have to do boundary conditions. Therefore, we need to check if we are not at the end.
- if (position != 2 * this->sizes[ i - 1 ] - 1) {
+ if (position != 2 * this->sizes[i - 1] - 1) {
// std::cerr << "B\n";
- boundary_elements.push_back(cell - this->multipliers[ i - 1 ]);
- boundary_elements.push_back(cell + this->multipliers[ i - 1 ]);
+ if (sum_of_dimensions % 2) {
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ boundary_elements.push_back(cell + this->multipliers[i - 1]);
+ } else {
+ boundary_elements.push_back(cell + this->multipliers[i - 1]);
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ }
if (dbg) {
- std::cerr << cell - this->multipliers[ i - 1 ] << " " << cell + this->multipliers[ i - 1 ] << " ";
+ std::cerr << cell - this->multipliers[i - 1] << " " << cell + this->multipliers[i - 1] << " ";
}
} else {
// std::cerr << "C\n";
- boundary_elements.push_back(cell - this->multipliers[ i - 1 ]);
- boundary_elements.push_back(cell - (2 * this->sizes[ i - 1 ] - 1) * this->multipliers[ i - 1 ]);
+ if (sum_of_dimensions % 2) {
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ boundary_elements.push_back(cell - (2 * this->sizes[i - 1] - 1) * this->multipliers[i - 1]);
+ } else {
+ boundary_elements.push_back(cell - (2 * this->sizes[i - 1] - 1) * this->multipliers[i - 1]);
+ boundary_elements.push_back(cell - this->multipliers[i - 1]);
+ }
if (dbg) {
- std::cerr << cell - this->multipliers[ i - 1 ] << " " <<
- cell - (2 * this->sizes[ i - 1 ] - 1) * this->multipliers[ i - 1 ] << " ";
+ std::cerr << cell - this->multipliers[i - 1] << " "
+ << cell - (2 * this->sizes[i - 1] - 1) * this->multipliers[i - 1] << " ";
}
}
}
+ ++sum_of_dimensions;
}
cell1 = cell1 % this->multipliers[i - 1];
}
@@ -265,11 +352,12 @@ std::vector< size_t > Bitmap_cubical_complex_periodic_boundary_conditions_base<T
}
template <typename T>
-std::vector< size_t > Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::get_coboundary_of_a_cell(size_t cell) const {
+std::vector<std::size_t> Bitmap_cubical_complex_periodic_boundary_conditions_base<T>::get_coboundary_of_a_cell(
+ std::size_t cell) const {
std::vector<unsigned> counter = this->compute_counter_for_given_cell(cell);
- std::vector< size_t > coboundary_elements;
- size_t cell1 = cell;
- for (size_t i = this->multipliers.size(); i != 0; --i) {
+ std::vector<std::size_t> coboundary_elements;
+ std::size_t cell1 = cell;
+ for (std::size_t i = this->multipliers.size(); i != 0; --i) {
unsigned position = cell1 / this->multipliers[i - 1];
// if the cell has zero length in this direction, then it will have cbd in this direction.
if (position % 2 == 0) {
@@ -289,7 +377,7 @@ std::vector< size_t > Bitmap_cubical_complex_periodic_boundary_conditions_base<T
} else {
// in this case counter[i-1] == 0.
coboundary_elements.push_back(cell + this->multipliers[i - 1]);
- coboundary_elements.push_back(cell + (2 * this->sizes[ i - 1 ] - 1) * this->multipliers[i - 1]);
+ coboundary_elements.push_back(cell + (2 * this->sizes[i - 1] - 1) * this->multipliers[i - 1]);
}
}
}
diff --git a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp
index db90eb94..4af699e9 100644
--- a/src/Bitmap_cubical_complex/test/Bitmap_test.cpp
+++ b/src/Bitmap_cubical_complex/test/Bitmap_test.cpp
@@ -33,17 +33,16 @@
#include <sstream>
#include <vector>
-
typedef Gudhi::cubical_complex::Bitmap_cubical_complex_base<double> Bitmap_cubical_complex_base;
typedef Gudhi::cubical_complex::Bitmap_cubical_complex<Bitmap_cubical_complex_base> Bitmap_cubical_complex;
typedef Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>
-Bitmap_cubical_complex_periodic_boundary_conditions_base;
+ Bitmap_cubical_complex_periodic_boundary_conditions_base;
typedef Gudhi::cubical_complex::Bitmap_cubical_complex<Bitmap_cubical_complex_periodic_boundary_conditions_base>
-Bitmap_cubical_complex_periodic_boundary_conditions;
+ Bitmap_cubical_complex_periodic_boundary_conditions;
BOOST_AUTO_TEST_CASE(check_dimension) {
- std::vector< double > increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
std::vector<unsigned> dimensions({3, 3});
@@ -52,29 +51,28 @@ BOOST_AUTO_TEST_CASE(check_dimension) {
}
BOOST_AUTO_TEST_CASE(topDimensionalCellsIterator_test) {
- std::vector< double > expectedFiltrationValues1({0, 0, 0, 0, 100, 0, 0, 0, 0});
+ std::vector<double> expectedFiltrationValues1({0, 0, 0, 0, 100, 0, 0, 0, 0});
- std::vector< double > expectedFiltrationValues2({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> expectedFiltrationValues2({1, 2, 3, 4, 5, 6, 7, 8, 9});
- std::vector< double > increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
- std::vector< double > oneDimensionalCycle({0, 0, 0, 0, 100, 0, 0, 0, 0});
+ std::vector<double> oneDimensionalCycle({0, 0, 0, 0, 100, 0, 0, 0, 0});
std::vector<unsigned> dimensions({3, 3});
Bitmap_cubical_complex increasing(dimensions, increasingFiltrationOfTopDimensionalCells);
Bitmap_cubical_complex hole(dimensions, oneDimensionalCycle);
-
int i = 0;
- for (Bitmap_cubical_complex::Top_dimensional_cells_iterator
- it = increasing.top_dimensional_cells_iterator_begin(); it != increasing.top_dimensional_cells_iterator_end(); ++it) {
+ for (Bitmap_cubical_complex::Top_dimensional_cells_iterator it = increasing.top_dimensional_cells_iterator_begin();
+ it != increasing.top_dimensional_cells_iterator_end(); ++it) {
BOOST_CHECK(increasing.get_cell_data(*it) == expectedFiltrationValues2[i]);
++i;
}
i = 0;
- for (Bitmap_cubical_complex::Top_dimensional_cells_iterator
- it = hole.top_dimensional_cells_iterator_begin(); it != hole.top_dimensional_cells_iterator_end(); ++it) {
+ for (Bitmap_cubical_complex::Top_dimensional_cells_iterator it = hole.top_dimensional_cells_iterator_begin();
+ it != hole.top_dimensional_cells_iterator_end(); ++it) {
BOOST_CHECK(hole.get_cell_data(*it) == expectedFiltrationValues1[i]);
++i;
}
@@ -100,24 +98,24 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_1) {
std::vector<double> boundary8;
boundary8.push_back(1);
boundary8.push_back(15);
- boundary8.push_back(7);
boundary8.push_back(9);
+ boundary8.push_back(7);
std::vector<double> boundary9;
boundary9.push_back(2);
boundary9.push_back(16);
std::vector<double> boundary10;
boundary10.push_back(3);
boundary10.push_back(17);
- boundary10.push_back(9);
boundary10.push_back(11);
+ boundary10.push_back(9);
std::vector<double> boundary11;
boundary11.push_back(4);
boundary11.push_back(18);
std::vector<double> boundary12;
boundary12.push_back(5);
boundary12.push_back(19);
- boundary12.push_back(11);
boundary12.push_back(13);
+ boundary12.push_back(11);
std::vector<double> boundary13;
boundary13.push_back(6);
boundary13.push_back(20);
@@ -140,24 +138,24 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_1) {
std::vector<double> boundary22;
boundary22.push_back(15);
boundary22.push_back(29);
- boundary22.push_back(21);
boundary22.push_back(23);
+ boundary22.push_back(21);
std::vector<double> boundary23;
boundary23.push_back(16);
boundary23.push_back(30);
std::vector<double> boundary24;
boundary24.push_back(17);
boundary24.push_back(31);
- boundary24.push_back(23);
boundary24.push_back(25);
+ boundary24.push_back(23);
std::vector<double> boundary25;
boundary25.push_back(18);
boundary25.push_back(32);
std::vector<double> boundary26;
boundary26.push_back(19);
boundary26.push_back(33);
- boundary26.push_back(25);
boundary26.push_back(27);
+ boundary26.push_back(25);
std::vector<double> boundary27;
boundary27.push_back(20);
boundary27.push_back(34);
@@ -180,24 +178,24 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_1) {
std::vector<double> boundary36;
boundary36.push_back(29);
boundary36.push_back(43);
- boundary36.push_back(35);
boundary36.push_back(37);
+ boundary36.push_back(35);
std::vector<double> boundary37;
boundary37.push_back(30);
boundary37.push_back(44);
std::vector<double> boundary38;
boundary38.push_back(31);
boundary38.push_back(45);
- boundary38.push_back(37);
boundary38.push_back(39);
+ boundary38.push_back(37);
std::vector<double> boundary39;
boundary39.push_back(32);
boundary39.push_back(46);
std::vector<double> boundary40;
boundary40.push_back(33);
boundary40.push_back(47);
- boundary40.push_back(39);
boundary40.push_back(41);
+ boundary40.push_back(39);
std::vector<double> boundary41;
boundary41.push_back(34);
boundary41.push_back(48);
@@ -214,7 +212,7 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_1) {
boundary47.push_back(46);
boundary47.push_back(48);
std::vector<double> boundary48;
- std::vector< std::vector<double> > boundaries;
+ std::vector<std::vector<double> > boundaries;
boundaries.push_back(boundary0);
boundaries.push_back(boundary1);
boundaries.push_back(boundary2);
@@ -265,15 +263,13 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_1) {
boundaries.push_back(boundary47);
boundaries.push_back(boundary48);
-
-
- std::vector< double > increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
std::vector<unsigned> dimensions({3, 3});
Bitmap_cubical_complex increasing(dimensions, increasingFiltrationOfTopDimensionalCells);
for (size_t i = 0; i != increasing.size(); ++i) {
- std::vector< size_t > bd = increasing.get_boundary_of_a_cell(i);
+ std::vector<size_t> bd = increasing.get_boundary_of_a_cell(i);
for (size_t j = 0; j != bd.size(); ++j) {
BOOST_CHECK(boundaries[i][j] == bd[j]);
}
@@ -281,13 +277,12 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_1) {
}
BOOST_AUTO_TEST_CASE(compute_boundary_test_2) {
- std::vector< double > increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
std::vector<unsigned> dimensions({3, 3});
Bitmap_cubical_complex increasing(dimensions, increasingFiltrationOfTopDimensionalCells);
-
std::vector<double> coboundaryElements;
coboundaryElements.push_back(7);
coboundaryElements.push_back(1);
@@ -373,9 +368,10 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_2) {
coboundaryElements.push_back(40);
coboundaryElements.push_back(41);
coboundaryElements.push_back(47);
+
size_t number = 0;
for (size_t i = 0; i != increasing.size(); ++i) {
- std::vector< size_t > bd = increasing.get_coboundary_of_a_cell(i);
+ std::vector<size_t> bd = increasing.get_coboundary_of_a_cell(i);
for (size_t j = 0; j != bd.size(); ++j) {
BOOST_CHECK(coboundaryElements[number] == bd[j]);
++number;
@@ -384,7 +380,7 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_2) {
}
BOOST_AUTO_TEST_CASE(compute_boundary_test_3) {
- std::vector< double > increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
std::vector<unsigned> dimensions({3, 3});
@@ -447,13 +443,13 @@ BOOST_AUTO_TEST_CASE(compute_boundary_test_3) {
}
BOOST_AUTO_TEST_CASE(Filtration_simplex_iterator_test) {
- std::vector< double > increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
+ std::vector<double> increasingFiltrationOfTopDimensionalCells({1, 2, 3, 4, 5, 6, 7, 8, 9});
std::vector<unsigned> dimensions({3, 3});
Bitmap_cubical_complex increasing(dimensions, increasingFiltrationOfTopDimensionalCells);
- std::vector< unsigned > dim;
+ std::vector<unsigned> dim;
dim.push_back(0);
dim.push_back(0);
dim.push_back(0);
@@ -555,7 +551,6 @@ BOOST_AUTO_TEST_CASE(Filtration_simplex_iterator_test) {
fil.push_back(9);
fil.push_back(9);
-
Bitmap_cubical_complex::Filtration_simplex_range range = increasing.filtration_simplex_range();
size_t position = 0;
for (Bitmap_cubical_complex::Filtration_simplex_iterator it = range.begin(); it != range.end(); ++it) {
@@ -566,7 +561,7 @@ BOOST_AUTO_TEST_CASE(Filtration_simplex_iterator_test) {
}
BOOST_AUTO_TEST_CASE(boudary_operator_2d_bitmap_with_periodic_bcond) {
- std::vector< double > filtration({0, 0, 0, 0});
+ std::vector<double> filtration({0, 0, 0, 0});
std::vector<unsigned> dimensions({2, 2});
@@ -575,57 +570,56 @@ BOOST_AUTO_TEST_CASE(boudary_operator_2d_bitmap_with_periodic_bcond) {
Bitmap_cubical_complex_periodic_boundary_conditions cmplx(dimensions, filtration, periodic_directions);
BOOST_CHECK(cmplx.dimension() == 2);
-
std::vector<double> boundary0;
std::vector<double> boundary1;
- boundary1.push_back(0);
boundary1.push_back(2);
+ boundary1.push_back(0);
std::vector<double> boundary2;
std::vector<double> boundary3;
- boundary3.push_back(2);
boundary3.push_back(0);
+ boundary3.push_back(2);
std::vector<double> boundary4;
- boundary4.push_back(0);
boundary4.push_back(8);
+ boundary4.push_back(0);
std::vector<double> boundary5;
- boundary5.push_back(1);
boundary5.push_back(9);
+ boundary5.push_back(1);
boundary5.push_back(4);
boundary5.push_back(6);
std::vector<double> boundary6;
- boundary6.push_back(2);
boundary6.push_back(10);
+ boundary6.push_back(2);
std::vector<double> boundary7;
- boundary7.push_back(3);
boundary7.push_back(11);
+ boundary7.push_back(3);
boundary7.push_back(6);
boundary7.push_back(4);
std::vector<double> boundary8;
std::vector<double> boundary9;
- boundary9.push_back(8);
boundary9.push_back(10);
+ boundary9.push_back(8);
std::vector<double> boundary10;
std::vector<double> boundary11;
- boundary11.push_back(10);
boundary11.push_back(8);
+ boundary11.push_back(10);
std::vector<double> boundary12;
- boundary12.push_back(8);
boundary12.push_back(0);
+ boundary12.push_back(8);
std::vector<double> boundary13;
- boundary13.push_back(9);
boundary13.push_back(1);
+ boundary13.push_back(9);
boundary13.push_back(12);
boundary13.push_back(14);
std::vector<double> boundary14;
- boundary14.push_back(10);
boundary14.push_back(2);
+ boundary14.push_back(10);
std::vector<double> boundary15;
- boundary15.push_back(11);
boundary15.push_back(3);
+ boundary15.push_back(11);
boundary15.push_back(14);
boundary15.push_back(12);
- std::vector< std::vector<double> > boundaries;
+ std::vector<std::vector<double> > boundaries;
boundaries.push_back(boundary0);
boundaries.push_back(boundary1);
boundaries.push_back(boundary2);
@@ -644,7 +638,7 @@ BOOST_AUTO_TEST_CASE(boudary_operator_2d_bitmap_with_periodic_bcond) {
boundaries.push_back(boundary15);
for (size_t i = 0; i != cmplx.size(); ++i) {
- std::vector< size_t > bd = cmplx.get_boundary_of_a_cell(i);
+ std::vector<size_t> bd = cmplx.get_boundary_of_a_cell(i);
for (size_t j = 0; j != bd.size(); ++j) {
BOOST_CHECK(boundaries[i][j] == bd[j]);
}
@@ -652,7 +646,7 @@ BOOST_AUTO_TEST_CASE(boudary_operator_2d_bitmap_with_periodic_bcond) {
}
BOOST_AUTO_TEST_CASE(coboudary_operator_2d_bitmap_with_periodic_bcond) {
- std::vector< double > filtration({0, 0, 0, 0});
+ std::vector<double> filtration({0, 0, 0, 0});
std::vector<unsigned> dimensions({2, 2});
@@ -661,7 +655,6 @@ BOOST_AUTO_TEST_CASE(coboudary_operator_2d_bitmap_with_periodic_bcond) {
Bitmap_cubical_complex_periodic_boundary_conditions cmplx(dimensions, filtration, periodic_directions);
BOOST_CHECK(cmplx.dimension() == 2);
-
std::vector<double> coboundary0;
coboundary0.push_back(4);
coboundary0.push_back(12);
@@ -711,7 +704,7 @@ BOOST_AUTO_TEST_CASE(coboudary_operator_2d_bitmap_with_periodic_bcond) {
coboundary14.push_back(15);
std::vector<double> coboundary15;
- std::vector< std::vector<double> > coboundaries;
+ std::vector<std::vector<double> > coboundaries;
coboundaries.push_back(coboundary0);
coboundaries.push_back(coboundary1);
coboundaries.push_back(coboundary2);
@@ -730,7 +723,7 @@ BOOST_AUTO_TEST_CASE(coboudary_operator_2d_bitmap_with_periodic_bcond) {
coboundaries.push_back(coboundary15);
for (size_t i = 0; i != cmplx.size(); ++i) {
- std::vector< size_t > cbd = cmplx.get_coboundary_of_a_cell(i);
+ std::vector<size_t> cbd = cmplx.get_coboundary_of_a_cell(i);
for (size_t j = 0; j != cbd.size(); ++j) {
BOOST_CHECK(coboundaries[i][j] == cbd[j]);
}
@@ -738,7 +731,7 @@ BOOST_AUTO_TEST_CASE(coboudary_operator_2d_bitmap_with_periodic_bcond) {
}
BOOST_AUTO_TEST_CASE(bitmap_2d_with_periodic_bcond_filtration) {
- std::vector< double > filtrationOrg({0, 1, 2, 3});
+ std::vector<double> filtrationOrg({0, 1, 2, 3});
std::vector<unsigned> dimensions({2, 2});
@@ -747,7 +740,6 @@ BOOST_AUTO_TEST_CASE(bitmap_2d_with_periodic_bcond_filtration) {
Bitmap_cubical_complex_periodic_boundary_conditions cmplx(dimensions, filtrationOrg, periodic_directions);
BOOST_CHECK(cmplx.dimension() == 2);
-
std::vector<double> filtration;
filtration.push_back(0); // 0
filtration.push_back(0); // 1
@@ -766,613 +758,821 @@ BOOST_AUTO_TEST_CASE(bitmap_2d_with_periodic_bcond_filtration) {
filtration.push_back(2); // 14
filtration.push_back(3); // 15
-
for (size_t i = 0; i != cmplx.size(); ++i) {
BOOST_CHECK(filtration[i] == cmplx.get_cell_data(i));
}
}
-BOOST_AUTO_TEST_CASE(all_cells_iterator_and_boundary_iterators_in_Bitmap_cubical_complex_base_check)
-{
- std::vector< double > expected_filtration;
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
-
- std::vector<unsigned> expected_dimension;
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
-
- std::vector<size_t> expected_boundary;
- expected_boundary.push_back(0);
- expected_boundary.push_back(2);
- expected_boundary.push_back(2);
- expected_boundary.push_back(4);
- expected_boundary.push_back(0);
- expected_boundary.push_back(10);
- expected_boundary.push_back(1);
- expected_boundary.push_back(11);
- expected_boundary.push_back(5);
- expected_boundary.push_back(7);
- expected_boundary.push_back(2);
- expected_boundary.push_back(12);
- expected_boundary.push_back(3);
- expected_boundary.push_back(13);
- expected_boundary.push_back(7);
- expected_boundary.push_back(9);
- expected_boundary.push_back(4);
- expected_boundary.push_back(14);
- expected_boundary.push_back(10);
- expected_boundary.push_back(12);
- expected_boundary.push_back(12);
- expected_boundary.push_back(14);
- expected_boundary.push_back(10);
- expected_boundary.push_back(20);
- expected_boundary.push_back(11);
- expected_boundary.push_back(21);
- expected_boundary.push_back(15);
- expected_boundary.push_back(17);
- expected_boundary.push_back(12);
- expected_boundary.push_back(22);
- expected_boundary.push_back(13);
- expected_boundary.push_back(23);
- expected_boundary.push_back(17);
- expected_boundary.push_back(19);
- expected_boundary.push_back(14);
- expected_boundary.push_back(24);
- expected_boundary.push_back(20);
- expected_boundary.push_back(22);
- expected_boundary.push_back(22);
- expected_boundary.push_back(24);
-
-
- std::vector<size_t> expected_coboundary;
- expected_coboundary.push_back(5);
- expected_coboundary.push_back(1);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(7);
- expected_coboundary.push_back(1);
- expected_coboundary.push_back(3);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(9);
- expected_coboundary.push_back(3);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(5);
- expected_coboundary.push_back(15);
- expected_coboundary.push_back(11);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(7);
- expected_coboundary.push_back(17);
- expected_coboundary.push_back(11);
- expected_coboundary.push_back(13);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(9);
- expected_coboundary.push_back(19);
- expected_coboundary.push_back(13);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(15);
- expected_coboundary.push_back(21);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(17);
- expected_coboundary.push_back(21);
- expected_coboundary.push_back(23);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(19);
- expected_coboundary.push_back(23);
-
-
-
- std::vector< unsigned > sizes(2);
- sizes[0] = 2;
- sizes[1] = 2;
-
- std::vector< double > data(4);
- data[0] = 0;
- data[1] = 1;
- data[2] = 2;
- data[3] = 3;
-
- Bitmap_cubical_complex_base ba( sizes , data );
- int i = 0;
- int bd_it = 0;
- int cbd_it = 0;
- for ( Bitmap_cubical_complex_base::All_cells_iterator it = ba.all_cells_iterator_begin() ; it != ba.all_cells_iterator_end() ; ++it )
- {
- BOOST_CHECK( expected_filtration[i] == ba.get_cell_data( *it ) );
- BOOST_CHECK( expected_dimension[i] == ba.get_dimension_of_a_cell( *it ) );
-
- Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
- for ( Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin() ; bd != bdrange.end() ; ++bd )
- {
- BOOST_CHECK( expected_boundary[bd_it] == *bd );
- ++bd_it;
- }
-
- Bitmap_cubical_complex_base::Coboundary_range cbdrange = ba.coboundary_range(*it);
- for ( Bitmap_cubical_complex_base::Coboundary_iterator cbd = cbdrange.begin() ; cbd != cbdrange.end() ; ++cbd )
- {
- BOOST_CHECK( expected_coboundary[cbd_it] == *cbd );
- ++cbd_it;
- }
- ++i;
+
+BOOST_AUTO_TEST_CASE(all_cells_iterator_and_boundary_iterators_in_Bitmap_cubical_complex_base_check) {
+ std::vector<double> expected_filtration;
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+
+ std::vector<unsigned> expected_dimension;
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+
+ std::vector<size_t> expected_boundary;
+ expected_boundary.push_back(0);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(4);
+ expected_boundary.push_back(0);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(1);
+ expected_boundary.push_back(11);
+ expected_boundary.push_back(7);
+ expected_boundary.push_back(5);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(3);
+ expected_boundary.push_back(13);
+ expected_boundary.push_back(9);
+ expected_boundary.push_back(7);
+ expected_boundary.push_back(4);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(20);
+ expected_boundary.push_back(11);
+ expected_boundary.push_back(21);
+ expected_boundary.push_back(17);
+ expected_boundary.push_back(15);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(13);
+ expected_boundary.push_back(23);
+ expected_boundary.push_back(19);
+ expected_boundary.push_back(17);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(24);
+ expected_boundary.push_back(20);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(24);
+
+ std::vector<size_t> expected_coboundary;
+ expected_coboundary.push_back(5);
+ expected_coboundary.push_back(1);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(7);
+ expected_coboundary.push_back(1);
+ expected_coboundary.push_back(3);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(9);
+ expected_coboundary.push_back(3);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(5);
+ expected_coboundary.push_back(15);
+ expected_coboundary.push_back(11);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(7);
+ expected_coboundary.push_back(17);
+ expected_coboundary.push_back(11);
+ expected_coboundary.push_back(13);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(9);
+ expected_coboundary.push_back(19);
+ expected_coboundary.push_back(13);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(15);
+ expected_coboundary.push_back(21);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(17);
+ expected_coboundary.push_back(21);
+ expected_coboundary.push_back(23);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(19);
+ expected_coboundary.push_back(23);
+
+ std::vector<unsigned> sizes(2);
+ sizes[0] = 2;
+ sizes[1] = 2;
+
+ std::vector<double> data(4);
+ data[0] = 0;
+ data[1] = 1;
+ data[2] = 2;
+ data[3] = 3;
+
+ Bitmap_cubical_complex_base ba(sizes, data);
+ int i = 0;
+ int bd_it = 0;
+ int cbd_it = 0;
+ for (Bitmap_cubical_complex_base::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ BOOST_CHECK(expected_filtration[i] == ba.get_cell_data(*it));
+ BOOST_CHECK(expected_dimension[i] == ba.get_dimension_of_a_cell(*it));
+
+ Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin(); bd != bdrange.end(); ++bd) {
+ BOOST_CHECK(expected_boundary[bd_it] == *bd);
+ ++bd_it;
}
+
+ Bitmap_cubical_complex_base::Coboundary_range cbdrange = ba.coboundary_range(*it);
+ for (Bitmap_cubical_complex_base::Coboundary_iterator cbd = cbdrange.begin(); cbd != cbdrange.end(); ++cbd) {
+ BOOST_CHECK(expected_coboundary[cbd_it] == *cbd);
+ ++cbd_it;
+ }
+ ++i;
+ }
}
+BOOST_AUTO_TEST_CASE(all_cells_iterator_and_boundary_iterators_in_Bitmap_cubical_complex_base_check_range_check_2) {
+ std::vector<double> expected_filtration;
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+
+ std::vector<unsigned> expected_dimension;
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+
+ std::vector<size_t> expected_boundary;
+ expected_boundary.push_back(0);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(4);
+ expected_boundary.push_back(0);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(1);
+ expected_boundary.push_back(11);
+ expected_boundary.push_back(7);
+ expected_boundary.push_back(5);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(3);
+ expected_boundary.push_back(13);
+ expected_boundary.push_back(9);
+ expected_boundary.push_back(7);
+ expected_boundary.push_back(4);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(20);
+ expected_boundary.push_back(11);
+ expected_boundary.push_back(21);
+ expected_boundary.push_back(17);
+ expected_boundary.push_back(15);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(13);
+ expected_boundary.push_back(23);
+ expected_boundary.push_back(19);
+ expected_boundary.push_back(17);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(24);
+ expected_boundary.push_back(20);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(24);
+
+ std::vector<size_t> expected_coboundary;
+ expected_coboundary.push_back(5);
+ expected_coboundary.push_back(1);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(7);
+ expected_coboundary.push_back(1);
+ expected_coboundary.push_back(3);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(9);
+ expected_coboundary.push_back(3);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(5);
+ expected_coboundary.push_back(15);
+ expected_coboundary.push_back(11);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(7);
+ expected_coboundary.push_back(17);
+ expected_coboundary.push_back(11);
+ expected_coboundary.push_back(13);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(9);
+ expected_coboundary.push_back(19);
+ expected_coboundary.push_back(13);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(15);
+ expected_coboundary.push_back(21);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(17);
+ expected_coboundary.push_back(21);
+ expected_coboundary.push_back(23);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(19);
+ expected_coboundary.push_back(23);
+
+ std::vector<unsigned> sizes(2);
+ sizes[0] = 2;
+ sizes[1] = 2;
+
+ std::vector<double> data(4);
+ data[0] = 0;
+ data[1] = 1;
+ data[2] = 2;
+ data[3] = 3;
+
+ Bitmap_cubical_complex_base ba(sizes, data);
+ int i = 0;
+ int bd_it = 0;
+ int cbd_it = 0;
+
+ Bitmap_cubical_complex_base::All_cells_range range(&ba);
+ for (Bitmap_cubical_complex_base::All_cells_iterator it = range.begin(); it != range.end(); ++it) {
+ BOOST_CHECK(expected_filtration[i] == ba.get_cell_data(*it));
+ BOOST_CHECK(expected_dimension[i] == ba.get_dimension_of_a_cell(*it));
+
+ Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin(); bd != bdrange.end(); ++bd) {
+ BOOST_CHECK(expected_boundary[bd_it] == *bd);
+ ++bd_it;
+ }
+ Bitmap_cubical_complex_base::Coboundary_range cbdrange = ba.coboundary_range(*it);
+ for (Bitmap_cubical_complex_base::Coboundary_iterator cbd = cbdrange.begin(); cbd != cbdrange.end(); ++cbd) {
+ BOOST_CHECK(expected_coboundary[cbd_it] == *cbd);
+ ++cbd_it;
+ }
+ ++i;
+ }
+}
+BOOST_AUTO_TEST_CASE(all_cells_iterator_and_boundary_iterators_in_Bitmap_cubical_complex_base_check_range_check) {
+ std::vector<double> expected_filtration;
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+
+ std::vector<unsigned> expected_dimension;
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(2);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+ expected_dimension.push_back(1);
+ expected_dimension.push_back(0);
+
+ std::vector<size_t> expected_boundary;
+ expected_boundary.push_back(0);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(4);
+ expected_boundary.push_back(0);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(1);
+ expected_boundary.push_back(11);
+ expected_boundary.push_back(7);
+ expected_boundary.push_back(5);
+ expected_boundary.push_back(2);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(3);
+ expected_boundary.push_back(13);
+ expected_boundary.push_back(9);
+ expected_boundary.push_back(7);
+ expected_boundary.push_back(4);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(10);
+ expected_boundary.push_back(20);
+ expected_boundary.push_back(11);
+ expected_boundary.push_back(21);
+ expected_boundary.push_back(17);
+ expected_boundary.push_back(15);
+ expected_boundary.push_back(12);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(13);
+ expected_boundary.push_back(23);
+ expected_boundary.push_back(19);
+ expected_boundary.push_back(17);
+ expected_boundary.push_back(14);
+ expected_boundary.push_back(24);
+ expected_boundary.push_back(20);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(22);
+ expected_boundary.push_back(24);
+
+ std::vector<size_t> expected_coboundary;
+ expected_coboundary.push_back(5);
+ expected_coboundary.push_back(1);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(7);
+ expected_coboundary.push_back(1);
+ expected_coboundary.push_back(3);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(9);
+ expected_coboundary.push_back(3);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(5);
+ expected_coboundary.push_back(15);
+ expected_coboundary.push_back(11);
+ expected_coboundary.push_back(6);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(7);
+ expected_coboundary.push_back(17);
+ expected_coboundary.push_back(11);
+ expected_coboundary.push_back(13);
+ expected_coboundary.push_back(8);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(9);
+ expected_coboundary.push_back(19);
+ expected_coboundary.push_back(13);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(15);
+ expected_coboundary.push_back(21);
+ expected_coboundary.push_back(16);
+ expected_coboundary.push_back(17);
+ expected_coboundary.push_back(21);
+ expected_coboundary.push_back(23);
+ expected_coboundary.push_back(18);
+ expected_coboundary.push_back(19);
+ expected_coboundary.push_back(23);
+
+ std::vector<unsigned> sizes(2);
+ sizes[0] = 2;
+ sizes[1] = 2;
+
+ std::vector<double> data(4);
+ data[0] = 0;
+ data[1] = 1;
+ data[2] = 2;
+ data[3] = 3;
+
+ Bitmap_cubical_complex_base ba(sizes, data);
+ int i = 0;
+ int bd_it = 0;
+ int cbd_it = 0;
+
+ Bitmap_cubical_complex_base::All_cells_range range = ba.all_cells_range();
+ for (Bitmap_cubical_complex_base::All_cells_iterator it = range.begin(); it != range.end(); ++it) {
+ BOOST_CHECK(expected_filtration[i] == ba.get_cell_data(*it));
+ BOOST_CHECK(expected_dimension[i] == ba.get_dimension_of_a_cell(*it));
+
+ Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin(); bd != bdrange.end(); ++bd) {
+ BOOST_CHECK(expected_boundary[bd_it] == *bd);
+ ++bd_it;
+ }
+ Bitmap_cubical_complex_base::Coboundary_range cbdrange = ba.coboundary_range(*it);
+ for (Bitmap_cubical_complex_base::Coboundary_iterator cbd = cbdrange.begin(); cbd != cbdrange.end(); ++cbd) {
+ BOOST_CHECK(expected_coboundary[cbd_it] == *cbd);
+ ++cbd_it;
+ }
+ ++i;
+ }
+}
+BOOST_AUTO_TEST_CASE(Top_dimensional_cells_iterator_range_check) {
+ std::vector<double> expected_filtration;
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(0);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(1);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(2);
+ expected_filtration.push_back(3);
+ expected_filtration.push_back(3);
+
+ std::vector<unsigned> sizes(2);
+ sizes[0] = 2;
+ sizes[1] = 2;
+
+ std::vector<double> data(4);
+ data[0] = 0;
+ data[1] = 1;
+ data[2] = 2;
+ data[3] = 3;
+
+ Bitmap_cubical_complex_base ba(sizes, data);
+ int i = 0;
+ Bitmap_cubical_complex_base::Top_dimensional_cells_range range = ba.top_dimensional_cells_range();
+ for (Bitmap_cubical_complex_base::Top_dimensional_cells_iterator it = range.begin(); it != range.end(); ++it) {
+ BOOST_CHECK(data[i] == ba.get_cell_data(*it));
+ BOOST_CHECK(ba.get_dimension_of_a_cell(*it) == 2);
+ ++i;
+ }
+}
+BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_non_periodic_case_3_d) {
+ std::vector<unsigned> sizes(3);
+ sizes[0] = 3;
+ sizes[1] = 3;
+ sizes[2] = 3;
+
+ std::vector<double> data(27, 0);
+
+ int number_of_all_elements = (2 * sizes[0] + 1) * (2 * sizes[1] + 1) * (2 * sizes[2] + 1);
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ Bitmap_cubical_complex_base ba(sizes, data);
+ for (Bitmap_cubical_complex_base::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ int i = 1;
+ Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin(); bd != bdrange.end(); ++bd) {
+ Bitmap_cubical_complex_base::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ int j = 1;
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd2 = second_bdrange.begin(); bd2 != second_bdrange.end();
+ ++bd2) {
+ elems_in_boundary[*bd2] += i * j;
+ j *= -1;
+ }
+ i *= -1;
+ }
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
+ }
+ }
+}
-BOOST_AUTO_TEST_CASE(all_cells_iterator_and_boundary_iterators_in_Bitmap_cubical_complex_base_check_range_check_2)
-{
- std::vector< double > expected_filtration;
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
-
- std::vector<unsigned> expected_dimension;
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
-
- std::vector<size_t> expected_boundary;
- expected_boundary.push_back(0);
- expected_boundary.push_back(2);
- expected_boundary.push_back(2);
- expected_boundary.push_back(4);
- expected_boundary.push_back(0);
- expected_boundary.push_back(10);
- expected_boundary.push_back(1);
- expected_boundary.push_back(11);
- expected_boundary.push_back(5);
- expected_boundary.push_back(7);
- expected_boundary.push_back(2);
- expected_boundary.push_back(12);
- expected_boundary.push_back(3);
- expected_boundary.push_back(13);
- expected_boundary.push_back(7);
- expected_boundary.push_back(9);
- expected_boundary.push_back(4);
- expected_boundary.push_back(14);
- expected_boundary.push_back(10);
- expected_boundary.push_back(12);
- expected_boundary.push_back(12);
- expected_boundary.push_back(14);
- expected_boundary.push_back(10);
- expected_boundary.push_back(20);
- expected_boundary.push_back(11);
- expected_boundary.push_back(21);
- expected_boundary.push_back(15);
- expected_boundary.push_back(17);
- expected_boundary.push_back(12);
- expected_boundary.push_back(22);
- expected_boundary.push_back(13);
- expected_boundary.push_back(23);
- expected_boundary.push_back(17);
- expected_boundary.push_back(19);
- expected_boundary.push_back(14);
- expected_boundary.push_back(24);
- expected_boundary.push_back(20);
- expected_boundary.push_back(22);
- expected_boundary.push_back(22);
- expected_boundary.push_back(24);
-
-
- std::vector<size_t> expected_coboundary;
- expected_coboundary.push_back(5);
- expected_coboundary.push_back(1);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(7);
- expected_coboundary.push_back(1);
- expected_coboundary.push_back(3);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(9);
- expected_coboundary.push_back(3);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(5);
- expected_coboundary.push_back(15);
- expected_coboundary.push_back(11);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(7);
- expected_coboundary.push_back(17);
- expected_coboundary.push_back(11);
- expected_coboundary.push_back(13);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(9);
- expected_coboundary.push_back(19);
- expected_coboundary.push_back(13);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(15);
- expected_coboundary.push_back(21);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(17);
- expected_coboundary.push_back(21);
- expected_coboundary.push_back(23);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(19);
- expected_coboundary.push_back(23);
-
-
-
- std::vector< unsigned > sizes(2);
- sizes[0] = 2;
- sizes[1] = 2;
-
- std::vector< double > data(4);
- data[0] = 0;
- data[1] = 1;
- data[2] = 2;
- data[3] = 3;
-
- Bitmap_cubical_complex_base ba( sizes , data );
- int i = 0;
- int bd_it = 0;
- int cbd_it = 0;
-
- Bitmap_cubical_complex_base::All_cells_range range(&ba);
- for ( Bitmap_cubical_complex_base::All_cells_iterator it = range.begin() ; it != range.end() ; ++it )
- {
- BOOST_CHECK( expected_filtration[i] == ba.get_cell_data( *it ) );
- BOOST_CHECK( expected_dimension[i] == ba.get_dimension_of_a_cell( *it ) );
-
- Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
- for ( Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin() ; bd != bdrange.end() ; ++bd )
- {
- BOOST_CHECK( expected_boundary[bd_it] == *bd );
- ++bd_it;
- }
-
- Bitmap_cubical_complex_base::Coboundary_range cbdrange = ba.coboundary_range(*it);
- for ( Bitmap_cubical_complex_base::Coboundary_iterator cbd = cbdrange.begin() ; cbd != cbdrange.end() ; ++cbd )
- {
- BOOST_CHECK( expected_coboundary[cbd_it] == *cbd );
- ++cbd_it;
- }
- ++i;
+BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_non_periodic_case_4_d) {
+ std::vector<unsigned> sizes(4);
+ sizes[0] = 3;
+ sizes[1] = 3;
+ sizes[2] = 3;
+ sizes[3] = 3;
+
+ std::vector<double> data(81, 0);
+
+ int number_of_all_elements = (2 * sizes[0] + 1) * (2 * sizes[1] + 1) * (2 * sizes[2] + 1) * (2 * sizes[3] + 1);
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ Bitmap_cubical_complex_base ba(sizes, data);
+ for (Bitmap_cubical_complex_base::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ int i = 1;
+ Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin(); bd != bdrange.end(); ++bd) {
+ Bitmap_cubical_complex_base::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ int j = 1;
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd2 = second_bdrange.begin(); bd2 != second_bdrange.end();
+ ++bd2) {
+ elems_in_boundary[*bd2] += i * j;
+ j *= -1;
+ }
+ i *= -1;
}
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
+ }
+ }
}
+BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_periodic_case_2d) {
+ std::vector<unsigned> sizes(2);
+ sizes[0] = 3;
+ sizes[1] = 3;
+
+ std::vector<bool> directions_of_periodicity(2, true);
+ std::vector<double> data(9, 0);
+
+ int number_of_all_elements = (2 * sizes[0]) * (2 * sizes[1]); // *(2*sizes[2]);
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ Bitmap_cubical_complex_periodic_boundary_conditions ba(sizes, data, directions_of_periodicity);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ int i = 1;
+
+ // std::cout << "Element : " << *it << std::endl;
+
+ Bitmap_cubical_complex_periodic_boundary_conditions_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
+ bd != bdrange.end(); ++bd) {
+ // std::cout << *bd << " ";
+ Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ int j = 1;
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
+ bd2 != second_bdrange.end(); ++bd2) {
+ elems_in_boundary[*bd2] += i * j;
+ j *= -1;
+ }
+ i *= -1;
+ }
+ // getchar();
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
+ }
+ }
+}
+BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_periodic_case_3d) {
+ std::vector<unsigned> sizes(3);
+ sizes[0] = 3;
+ sizes[1] = 3;
+ sizes[2] = 3;
+
+ std::vector<bool> directions_of_periodicity(3, true);
+
+ std::vector<double> data(27, 0);
+
+ int number_of_all_elements = (2 * sizes[0]) * (2 * sizes[1]) * (2 * sizes[2]);
+ Bitmap_cubical_complex_periodic_boundary_conditions ba(sizes, data, directions_of_periodicity);
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ // std::cout << "Element : " << *it << std::endl;
+
+ int i = 1;
+
+ Bitmap_cubical_complex_periodic_boundary_conditions_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
+ bd != bdrange.end(); ++bd) {
+ Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ // std::cout << *bd << " ";
+ int j = 1;
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
+ bd2 != second_bdrange.end(); ++bd2) {
+ elems_in_boundary[*bd2] += i * j;
+ j *= -1;
+ }
+ i *= -1;
+ }
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
+ }
+ }
+}
+BOOST_AUTO_TEST_CASE(check_if_boundary_of_boundary_is_zero_periodic_case_4d) {
+ std::vector<unsigned> sizes(4);
+ sizes[0] = 3;
+ sizes[1] = 3;
+ sizes[2] = 3;
+ sizes[3] = 3;
+
+ std::vector<bool> directions_of_periodicity(4, true);
+
+ std::vector<double> data(81, 0);
+
+ int number_of_all_elements = (2 * sizes[0]) * (2 * sizes[1]) * (2 * sizes[2]) * (2 * sizes[3]);
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ Bitmap_cubical_complex_periodic_boundary_conditions ba(sizes, data, directions_of_periodicity);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ int i = 1;
+
+ Bitmap_cubical_complex_periodic_boundary_conditions_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
+ bd != bdrange.end(); ++bd) {
+ Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ int j = 1;
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
+ bd2 != second_bdrange.end(); ++bd2) {
+ elems_in_boundary[*bd2] += i * j;
+ j *= -1;
+ }
+ i *= -1;
+ }
-BOOST_AUTO_TEST_CASE(all_cells_iterator_and_boundary_iterators_in_Bitmap_cubical_complex_base_check_range_check)
-{
- std::vector< double > expected_filtration;
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
-
- std::vector<unsigned> expected_dimension;
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(2);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
- expected_dimension.push_back(1);
- expected_dimension.push_back(0);
-
- std::vector<size_t> expected_boundary;
- expected_boundary.push_back(0);
- expected_boundary.push_back(2);
- expected_boundary.push_back(2);
- expected_boundary.push_back(4);
- expected_boundary.push_back(0);
- expected_boundary.push_back(10);
- expected_boundary.push_back(1);
- expected_boundary.push_back(11);
- expected_boundary.push_back(5);
- expected_boundary.push_back(7);
- expected_boundary.push_back(2);
- expected_boundary.push_back(12);
- expected_boundary.push_back(3);
- expected_boundary.push_back(13);
- expected_boundary.push_back(7);
- expected_boundary.push_back(9);
- expected_boundary.push_back(4);
- expected_boundary.push_back(14);
- expected_boundary.push_back(10);
- expected_boundary.push_back(12);
- expected_boundary.push_back(12);
- expected_boundary.push_back(14);
- expected_boundary.push_back(10);
- expected_boundary.push_back(20);
- expected_boundary.push_back(11);
- expected_boundary.push_back(21);
- expected_boundary.push_back(15);
- expected_boundary.push_back(17);
- expected_boundary.push_back(12);
- expected_boundary.push_back(22);
- expected_boundary.push_back(13);
- expected_boundary.push_back(23);
- expected_boundary.push_back(17);
- expected_boundary.push_back(19);
- expected_boundary.push_back(14);
- expected_boundary.push_back(24);
- expected_boundary.push_back(20);
- expected_boundary.push_back(22);
- expected_boundary.push_back(22);
- expected_boundary.push_back(24);
-
-
- std::vector<size_t> expected_coboundary;
- expected_coboundary.push_back(5);
- expected_coboundary.push_back(1);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(7);
- expected_coboundary.push_back(1);
- expected_coboundary.push_back(3);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(9);
- expected_coboundary.push_back(3);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(5);
- expected_coboundary.push_back(15);
- expected_coboundary.push_back(11);
- expected_coboundary.push_back(6);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(7);
- expected_coboundary.push_back(17);
- expected_coboundary.push_back(11);
- expected_coboundary.push_back(13);
- expected_coboundary.push_back(8);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(9);
- expected_coboundary.push_back(19);
- expected_coboundary.push_back(13);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(15);
- expected_coboundary.push_back(21);
- expected_coboundary.push_back(16);
- expected_coboundary.push_back(17);
- expected_coboundary.push_back(21);
- expected_coboundary.push_back(23);
- expected_coboundary.push_back(18);
- expected_coboundary.push_back(19);
- expected_coboundary.push_back(23);
-
-
-
- std::vector< unsigned > sizes(2);
- sizes[0] = 2;
- sizes[1] = 2;
-
- std::vector< double > data(4);
- data[0] = 0;
- data[1] = 1;
- data[2] = 2;
- data[3] = 3;
-
- Bitmap_cubical_complex_base ba( sizes , data );
- int i = 0;
- int bd_it = 0;
- int cbd_it = 0;
-
- Bitmap_cubical_complex_base::All_cells_range range = ba.all_cells_range();
- for ( Bitmap_cubical_complex_base::All_cells_iterator it = range.begin() ; it != range.end() ; ++it )
- {
- BOOST_CHECK( expected_filtration[i] == ba.get_cell_data( *it ) );
- BOOST_CHECK( expected_dimension[i] == ba.get_dimension_of_a_cell( *it ) );
-
- Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
- for ( Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin() ; bd != bdrange.end() ; ++bd )
- {
- BOOST_CHECK( expected_boundary[bd_it] == *bd );
- ++bd_it;
- }
-
- Bitmap_cubical_complex_base::Coboundary_range cbdrange = ba.coboundary_range(*it);
- for ( Bitmap_cubical_complex_base::Coboundary_iterator cbd = cbdrange.begin() ; cbd != cbdrange.end() ; ++cbd )
- {
- BOOST_CHECK( expected_coboundary[cbd_it] == *cbd );
- ++cbd_it;
- }
- ++i;
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
}
+ }
}
-BOOST_AUTO_TEST_CASE(Top_dimensional_cells_iterator_range_check)
-{
- std::vector< double > expected_filtration;
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(0);
- expected_filtration.push_back(1);
- expected_filtration.push_back(1);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(2);
- expected_filtration.push_back(3);
- expected_filtration.push_back(3);
-
-
- std::vector< unsigned > sizes(2);
- sizes[0] = 2;
- sizes[1] = 2;
-
- std::vector< double > data(4);
- data[0] = 0;
- data[1] = 1;
- data[2] = 2;
- data[3] = 3;
-
- Bitmap_cubical_complex_base ba( sizes , data );
- int i = 0;
-
- Bitmap_cubical_complex_base::Top_dimensional_cells_range range = ba.top_dimensional_cells_range();
- for ( Bitmap_cubical_complex_base::Top_dimensional_cells_iterator it = range.begin() ; it != range.end() ; ++it )
- {
- BOOST_CHECK( data[i] == ba.get_cell_data( *it ) );
- BOOST_CHECK( ba.get_dimension_of_a_cell( *it ) == 2 );
- ++i;
+BOOST_AUTO_TEST_CASE(compute_incidence_between_cells_test) {
+ std::vector<unsigned> sizes(3);
+ sizes[0] = 3;
+ sizes[1] = 3;
+ sizes[2] = 3;
+
+ std::vector<double> data(27, 0);
+
+ int number_of_all_elements = (2 * sizes[0] + 1) * (2 * sizes[1] + 1) * (2 * sizes[1] + 1);
+ Bitmap_cubical_complex_base ba(sizes, data);
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ for (Bitmap_cubical_complex_base::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ Bitmap_cubical_complex_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd = bdrange.begin(); bd != bdrange.end(); ++bd) {
+ Bitmap_cubical_complex_base::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ for (Bitmap_cubical_complex_base::Boundary_iterator bd2 = second_bdrange.begin(); bd2 != second_bdrange.end();
+ ++bd2) {
+ elems_in_boundary[*bd2] +=
+ ba.compute_incidence_between_cells(*it, *bd) * ba.compute_incidence_between_cells(*bd, *bd2);
+ }
}
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
+ }
+ }
}
+BOOST_AUTO_TEST_CASE(compute_incidence_between_cells_test_periodic_boundary_conditions) {
+ std::vector<unsigned> sizes(3);
+ sizes[0] = 3;
+ sizes[1] = 3;
+ sizes[2] = 3;
+
+ std::vector<bool> directions_of_periodicity(3, true);
+ std::vector<double> data(27, 0);
+
+ int number_of_all_elements = (2 * sizes[0]) * (2 * sizes[1]) * (2 * sizes[2]);
+ Bitmap_cubical_complex_periodic_boundary_conditions ba(sizes, data, directions_of_periodicity);
+
+ std::vector<int> elems_in_boundary(number_of_all_elements, 0);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::All_cells_iterator it = ba.all_cells_iterator_begin();
+ it != ba.all_cells_iterator_end(); ++it) {
+ Bitmap_cubical_complex_periodic_boundary_conditions_base::Boundary_range bdrange = ba.boundary_range(*it);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd = bdrange.begin();
+ bd != bdrange.end(); ++bd) {
+ // std::cout << *bd << " ";
+ Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_range second_bdrange = ba.boundary_range(*bd);
+ for (Bitmap_cubical_complex_periodic_boundary_conditions::Boundary_iterator bd2 = second_bdrange.begin();
+ bd2 != second_bdrange.end(); ++bd2) {
+ elems_in_boundary[*bd2] +=
+ ba.compute_incidence_between_cells(*it, *bd) * ba.compute_incidence_between_cells(*bd, *bd2);
+ }
+ }
+ // check if there is anything nonzero in elems_in_boundary
+ for (size_t i = 0; i != elems_in_boundary.size(); ++i) {
+ BOOST_CHECK(elems_in_boundary[i] == 0);
+ }
+ }
+}
diff --git a/src/Bitmap_cubical_complex/utilities/CMakeLists.txt b/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..676a730a
--- /dev/null
+++ b/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
@@ -0,0 +1,29 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bitmap_cubical_complex_utilities)
+
+add_executable ( cubical_complex_persistence cubical_complex_persistence.cpp )
+if (TBB_FOUND)
+ target_link_libraries(cubical_complex_persistence ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Bitmap_cubical_complex_utility_persistence_one_sphere COMMAND $<TARGET_FILE:cubical_complex_persistence>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalOneSphere.txt")
+
+add_test(NAME Bitmap_cubical_complex_utility_persistence_two_sphere COMMAND $<TARGET_FILE:cubical_complex_persistence>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt")
+
+add_executable ( periodic_cubical_complex_persistence periodic_cubical_complex_persistence.cpp )
+if (TBB_FOUND)
+ target_link_libraries(periodic_cubical_complex_persistence ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Bitmap_cubical_complex_utility_periodic_boundary_conditions_2d_torus
+ COMMAND $<TARGET_FILE:periodic_cubical_complex_persistence>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/2d_torus.txt")
+
+add_test(NAME Bitmap_cubical_complex_utility_periodic_boundary_conditions_3d_torus
+ COMMAND $<TARGET_FILE:periodic_cubical_complex_persistence>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt")
+
+install(TARGETS cubical_complex_persistence DESTINATION bin)
+install(TARGETS periodic_cubical_complex_persistence DESTINATION bin)
diff --git a/src/Bitmap_cubical_complex/utilities/README b/src/Bitmap_cubical_complex/utilities/README
new file mode 100644
index 00000000..ddff7034
--- /dev/null
+++ b/src/Bitmap_cubical_complex/utilities/README
@@ -0,0 +1,18 @@
+# Bitmap_cubical_complex #
+
+## `cubical_complex_persistence` ##
+This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style. See [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format.
+
+Example:
+
+* Create a Cubical Complex from the Perseus style file `CubicalTwoSphere.txt`, computes Persistence cohomology from it and writes the results in a persistence file `CubicalTwoSphere.txt_persistence`:
+`cubical_complex_persistence data/bitmap/CubicalTwoSphere.txt`
+
+## `periodic_cubical_complex_persistence` ##
+
+Same as above, but with periodic boundary conditions.
+
+Example:
+
+* Create a Periodical Cubical Complex from the Perseus style file `3d_torus.txt`, computes Persistence cohomology from it and writes the results in a persistence file `3d_torus.txt_persistence`:
+`periodic_cubical_complex_persistence data/bitmap/3d_torus.txt`
diff --git a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex.cpp b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp
index 67735ba1..9d1bc08c 100644
--- a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex.cpp
+++ b/src/Bitmap_cubical_complex/utilities/cubical_complex_persistence.cpp
@@ -20,7 +20,6 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-
#include <gudhi/reader_utils.h>
#include <gudhi/Bitmap_cubical_complex.h>
#include <gudhi/Persistent_cohomology.h>
@@ -32,16 +31,18 @@
#include <cstddef>
int main(int argc, char** argv) {
- std::cout << "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical " <<
- "complexes provided in text files in Perseus style (the only numbered in the first line is a dimension D of a" <<
- "bitmap. In the lines I between 2 and D+1 there are numbers of top dimensional cells in the direction I. Let " <<
- "N denote product of the numbers in the lines between 2 and D. In the lines D+2 to D+2+N there are " <<
- "filtrations of top dimensional cells. We assume that the cells are in the lexicographical order. See " <<
- "CubicalOneSphere.txt or CubicalTwoSphere.txt for example.\n" << std::endl;
+ std::cout
+ << "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical "
+ << "complexes provided in text files in Perseus style (the only numbered in the first line is a dimension D of a"
+ << "bitmap. In the lines I between 2 and D+1 there are numbers of top dimensional cells in the direction I. Let "
+ << "N denote product of the numbers in the lines between 2 and D. In the lines D+2 to D+2+N there are "
+ << "filtrations of top dimensional cells. We assume that the cells are in the lexicographical order. See "
+ << "CubicalOneSphere.txt or CubicalTwoSphere.txt for example.\n"
+ << std::endl;
if (argc != 2) {
- std::cerr << "Wrong number of parameters. Please provide the name of a file with a Perseus style bitmap at " <<
- "the input. The program will now terminate.\n";
+ std::cerr << "Wrong number of parameters. Please provide the name of a file with a Perseus style bitmap at "
+ << "the input. The program will now terminate.\n";
return 1;
}
@@ -54,7 +55,7 @@ int main(int argc, char** argv) {
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(b);
- int p = 2;
+ int p = 11;
double min_persistence = 0;
pcoh.init_coefficients(p); // initializes the coefficient field for homology
@@ -66,7 +67,7 @@ int main(int argc, char** argv) {
std::size_t last_in_path = output_file_name.find_last_of("/\\");
if (last_in_path != std::string::npos) {
- output_file_name = output_file_name.substr(last_in_path+1);
+ output_file_name = output_file_name.substr(last_in_path + 1);
}
std::ofstream out(output_file_name.c_str());
@@ -77,4 +78,3 @@ int main(int argc, char** argv) {
return 0;
}
-
diff --git a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex_periodic_boundary_conditions.cpp b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp
index f8754345..c812cb3a 100644
--- a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
+++ b/src/Bitmap_cubical_complex/utilities/periodic_cubical_complex_persistence.cpp
@@ -20,7 +20,6 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-
#include <gudhi/reader_utils.h>
#include <gudhi/Bitmap_cubical_complex.h>
#include <gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h>
@@ -30,24 +29,27 @@
#include <iostream>
#include <sstream>
#include <vector>
+#include <string>
int main(int argc, char** argv) {
- std::cout << "This program computes persistent homology, by using " <<
- "Bitmap_cubical_complex_periodic_boundary_conditions class, of cubical complexes provided in text files in " <<
- "Perseus style (the only numbered in the first line is a dimension D of a bitmap. In the lines I between 2 " <<
- "and D+1 there are numbers of top dimensional cells in the direction I. Let N denote product of the numbers " <<
- "in the lines between 2 and D. In the lines D+2 to D+2+N there are filtrations of top dimensional cells. We " <<
- "assume that the cells are in the lexicographical order. See CubicalOneSphere.txt or CubicalTwoSphere.txt for" <<
- " example.\n" << std::endl;
+ std::cout
+ << "This program computes persistent homology, by using "
+ << "Bitmap_cubical_complex_periodic_boundary_conditions class, of cubical complexes provided in text files in "
+ << "Perseus style (the only numbered in the first line is a dimension D of a bitmap. In the lines I between 2 "
+ << "and D+1 there are numbers of top dimensional cells in the direction I. Let N denote product of the numbers "
+ << "in the lines between 2 and D. In the lines D+2 to D+2+N there are filtrations of top dimensional cells. We "
+ << "assume that the cells are in the lexicographical order. See CubicalOneSphere.txt or CubicalTwoSphere.txt for"
+ << " example.\n"
+ << std::endl;
if (argc != 2) {
- std::cerr << "Wrong number of parameters. Please provide the name of a file with a Perseus style bitmap at " <<
- "the input. The program will now terminate.\n";
+ std::cerr << "Wrong number of parameters. Please provide the name of a file with a Perseus style bitmap at "
+ << "the input. The program will now terminate.\n";
return 1;
}
typedef Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double> Bitmap_base;
- typedef Gudhi::cubical_complex::Bitmap_cubical_complex< Bitmap_base > Bitmap_cubical_complex;
+ typedef Gudhi::cubical_complex::Bitmap_cubical_complex<Bitmap_base> Bitmap_cubical_complex;
Bitmap_cubical_complex b(argv[1]);
@@ -56,7 +58,7 @@ int main(int argc, char** argv) {
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(b, true);
- int p = 2;
+ int p = 11;
double min_persistence = 0;
pcoh.init_coefficients(p); // initializes the coefficient field for homology
pcoh.compute_persistent_cohomology(min_persistence);
@@ -67,7 +69,7 @@ int main(int argc, char** argv) {
std::size_t last_in_path = output_file_name.find_last_of("/\\");
if (last_in_path != std::string::npos) {
- output_file_name = output_file_name.substr(last_in_path+1);
+ output_file_name = output_file_name.substr(last_in_path + 1);
}
std::ofstream out(output_file_name.c_str());
@@ -78,4 +80,3 @@ int main(int argc, char** argv) {
return 0;
}
-
diff --git a/src/Bottleneck_distance/benchmark/CMakeLists.txt b/src/Bottleneck_distance/benchmark/CMakeLists.txt
index 170081ce..20a4e47b 100644
--- a/src/Bottleneck_distance/benchmark/CMakeLists.txt
+++ b/src/Bottleneck_distance/benchmark/CMakeLists.txt
@@ -1,9 +1,9 @@
cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_benchmark)
-if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
add_executable ( bottleneck_chrono bottleneck_chrono.cpp )
if (TBB_FOUND)
target_link_libraries(bottleneck_chrono ${TBB_LIBRARIES})
endif(TBB_FOUND)
-endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+endif(NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/example/CMakeLists.txt b/src/Bottleneck_distance/example/CMakeLists.txt
index dc1da31c..6095d6eb 100644
--- a/src/Bottleneck_distance/example/CMakeLists.txt
+++ b/src/Bottleneck_distance/example/CMakeLists.txt
@@ -1,30 +1,22 @@
cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_examples)
-if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
- add_executable (bottleneck_read_file_example bottleneck_read_file_example.cpp)
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
add_executable (bottleneck_basic_example bottleneck_basic_example.cpp)
-
add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp)
target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
if (TBB_FOUND)
- target_link_libraries(bottleneck_read_file_example ${TBB_LIBRARIES})
- target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES})
target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES})
+ target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES})
endif(TBB_FOUND)
-
+
add_test(NAME Bottleneck_distance_example_basic COMMAND $<TARGET_FILE:bottleneck_basic_example>)
-
add_test(NAME Bottleneck_distance_example_alpha_rips_persistence_bottleneck
COMMAND $<TARGET_FILE:alpha_rips_persistence_bottleneck_distance>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.15" "-m" "0.12" "-d" "3" "-p" "3")
- add_test(NAME Bottleneck_read_file_example
- COMMAND $<TARGET_FILE:bottleneck_read_file_example>
- "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/second.pers")
-
- install(TARGETS bottleneck_read_file_example DESTINATION bin)
install(TARGETS bottleneck_basic_example DESTINATION bin)
install(TARGETS alpha_rips_persistence_bottleneck_distance DESTINATION bin)
-
-endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/example/README b/src/Bottleneck_distance/example/README
new file mode 100644
index 00000000..01bcd74a
--- /dev/null
+++ b/src/Bottleneck_distance/example/README
@@ -0,0 +1,19 @@
+# Bottleneck_distance #
+
+## `alpha_rips_persistence_bottleneck_distance` ##
+This program computes the persistent homology with coefficient field Z/pZ of a Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+Usage:
+`alpha_rips_persistence_bottleneck_distance [options] <OFF input file>`
+
+Allowed options:
+
+* `-h [ --help ]` Produce help message
+* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the Rips complex construction.`
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Rips complex we want to compute.`
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
diff --git a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
index bdc47578..a6b9b021 100644
--- a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
+++ b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
@@ -44,16 +44,16 @@ struct Square_query {
typedef Internal_point Point_d;
typedef double FT;
bool contains(Point_d p) const {
- return std::abs(p.x()-c.x())<=size && std::abs(p.y()-c.y())<=size;
+ return std::abs(p.x()-c.x()) <= size && std::abs(p.y()-c.y()) <= size;
}
- bool inner_range_intersects(CGAL::Kd_tree_rectangle<FT,D> const&r) const {
+ bool inner_range_intersects(CGAL::Kd_tree_rectangle<FT, D> const&r) const {
return
r.max_coord(0) >= c.x() - size &&
r.min_coord(0) <= c.x() + size &&
r.max_coord(1) >= c.y() - size &&
r.min_coord(1) <= c.y() + size;
}
- bool outer_range_contains(CGAL::Kd_tree_rectangle<FT,D> const&r) const {
+ bool outer_range_contains(CGAL::Kd_tree_rectangle<FT, D> const&r) const {
return
r.min_coord(0) >= c.x() - size &&
r.max_coord(0) <= c.x() + size &&
@@ -146,7 +146,7 @@ inline int Neighbors_finder::pull_near(int u_point_index) {
// Is the query point near to a V point in the plane ?
Internal_point u_point = g.get_u_point(u_point_index);
auto neighbor = kd_t.search_any_point(Square_query{u_point, r});
- if(!neighbor)
+ if (!neighbor)
return null_point_index();
tmp = neighbor->point_index;
auto point = g.get_v_point(tmp);
diff --git a/src/Bottleneck_distance/test/CMakeLists.txt b/src/Bottleneck_distance/test/CMakeLists.txt
index a165d472..2676b82c 100644
--- a/src/Bottleneck_distance/test/CMakeLists.txt
+++ b/src/Bottleneck_distance/test/CMakeLists.txt
@@ -1,7 +1,7 @@
cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_tests)
-if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
include(GUDHI_test_coverage)
add_executable ( Bottleneck_distance_test_unit bottleneck_unit_test.cpp )
@@ -12,4 +12,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
gudhi_add_coverage_test(Bottleneck_distance_test_unit)
-endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/utilities/CMakeLists.txt b/src/Bottleneck_distance/utilities/CMakeLists.txt
new file mode 100644
index 00000000..d19e3b1c
--- /dev/null
+++ b/src/Bottleneck_distance/utilities/CMakeLists.txt
@@ -0,0 +1,16 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bottleneck_distance_utilities)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable (bottleneck_distance bottleneck_distance.cpp)
+ if (TBB_FOUND)
+ target_link_libraries(bottleneck_distance ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Bottleneck_distance_utilities_Bottleneck_read_file
+ COMMAND $<TARGET_FILE:bottleneck_distance>
+ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/second.pers")
+
+ install(TARGETS bottleneck_distance DESTINATION bin)
+
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/utilities/README b/src/Bottleneck_distance/utilities/README
new file mode 100644
index 00000000..d9fdd252
--- /dev/null
+++ b/src/Bottleneck_distance/utilities/README
@@ -0,0 +1,10 @@
+# Bottleneck_distance #
+
+## `bottleneck_read_file_example` ##
+This program computes the Bottleneck distance between two persistence diagram files.
+
+Usage:
+`bottleneck_read_file_example <file_1.pers> <file_2.pers> [<tolerance>]`
+where
+`<file_1.pers>` and `<file_2.pers>` must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPers).
+`<tolerance>` is an error bound on the bottleneck distance (set by default to the smallest positive double value).
diff --git a/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp
index 238d99ad..9dd52b31 100644
--- a/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp
+++ b/src/Bottleneck_distance/utilities/bottleneck_distance.cpp
@@ -25,21 +25,21 @@
#include <iostream>
#include <vector>
#include <utility> // for pair
-#include <fstream>
-#include <sstream>
#include <string>
+#include <limits> // for numeric_limits
int main(int argc, char** argv) {
if (argc < 3) {
- std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file " <<
- "should contain a birth-death pair per line. Third, optional parameter is an error bound on a bottleneck" <<
- " distance (set by default to zero). The program will now terminate \n";
+ std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file" <<
+ " should contain a birth-death pair per line. Third, optional parameter is an error bound on the bottleneck" <<
+ " distance (set by default to the smallest positive double value). If you set the error bound to 0, be" <<
+ " aware this version is exact but expensive. The program will now terminate \n";
return -1;
}
- std::vector<std::pair<double, double>> diag1 = read_persistence_intervals_in_dimension(argv[1]);
- std::vector<std::pair<double, double>> diag2 = read_persistence_intervals_in_dimension(argv[2]);
+ std::vector<std::pair<double, double>> diag1 = Gudhi::read_persistence_intervals_in_dimension(argv[1]);
+ std::vector<std::pair<double, double>> diag2 = Gudhi::read_persistence_intervals_in_dimension(argv[2]);
- double tolerance = 0.;
+ double tolerance = std::numeric_limits<double>::min();
if (argc == 4) {
tolerance = atof(argv[3]);
}
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index e02d3a2c..94587044 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -25,6 +25,7 @@ add_gudhi_module(Spatial_searching)
add_gudhi_module(Subsampling)
add_gudhi_module(Tangential_complex)
add_gudhi_module(Witness_complex)
+add_gudhi_module(Nerve_GIC)
message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"")
diff --git a/src/Contraction/example/CMakeLists.txt b/src/Contraction/example/CMakeLists.txt
index 83594c0e..a92d1685 100644
--- a/src/Contraction/example/CMakeLists.txt
+++ b/src/Contraction/example/CMakeLists.txt
@@ -1,9 +1,10 @@
cmake_minimum_required(VERSION 2.6)
project(Contraction_examples)
-
add_executable(RipsContraction Rips_contraction.cpp)
+
add_executable(GarlandHeckbert Garland_heckbert.cpp)
+target_link_libraries(GarlandHeckbert ${Boost_TIMER_LIBRARY})
add_test(NAME Contraction_example_tore3D_0.2 COMMAND $<TARGET_FILE:RipsContraction>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0.2")
diff --git a/src/Contraction/example/Garland_heckbert.cpp b/src/Contraction/example/Garland_heckbert.cpp
index f0cde95e..2b0dc973 100644
--- a/src/Contraction/example/Garland_heckbert.cpp
+++ b/src/Contraction/example/Garland_heckbert.cpp
@@ -29,7 +29,6 @@
#include <gudhi/Edge_contraction.h>
#include <gudhi/Skeleton_blocker.h>
#include <gudhi/Off_reader.h>
-#include <gudhi/Clock.h>
#include <iostream>
@@ -165,8 +164,6 @@ int main(int argc, char *argv[]) {
int num_contractions = atoi(argv[3]);
- Gudhi::Clock contraction_chrono("Time to simplify and enumerate simplices");
-
// constructs the contractor object with Garland Heckbert policies.
Complex_contractor contractor(complex,
new GH_cost(complex),
@@ -182,8 +179,6 @@ int main(int argc, char *argv[]) {
complex.num_edges() << " edges and " <<
complex.num_triangles() << " triangles." << std::endl;
- std::cout << contraction_chrono;
-
// write simplified complex
Gudhi::skeleton_blocker::Skeleton_blocker_off_writer<Complex> off_writer(argv[2], complex);
diff --git a/src/Doxyfile b/src/Doxyfile
index eb0b3e9e..429bf6a1 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -38,7 +38,7 @@ PROJECT_NAME = "GUDHI"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "2.0.0"
+PROJECT_NUMBER = "2.0.1"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -819,7 +819,8 @@ EXCLUDE_SYMBOLS =
# command).
EXAMPLE_PATH = biblio/ \
- example/
+ example/ \
+ utilities/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -852,6 +853,7 @@ IMAGE_PATH = doc/Skeleton_blocker/ \
doc/Spatial_searching/ \
doc/Tangential_complex/ \
doc/Bottleneck_distance/ \
+ doc/Nerve_GIC/ \
doc/Persistence_representations/
# The INPUT_FILTER tag can be used to specify a program that doxygen should
@@ -2116,7 +2118,7 @@ COLLABORATION_GRAPH = NO
# The default value is: YES.
# This tag requires that the tag HAVE_DOT is set to YES.
-GROUP_GRAPHS = NO
+GROUP_GRAPHS = YES
# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
# collaboration diagrams in a style similar to the OMG's Unified Modeling
diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt
index 57861946..374195d0 100644
--- a/src/GudhUI/CMakeLists.txt
+++ b/src/GudhUI/CMakeLists.txt
@@ -5,28 +5,6 @@ find_package(Qt5 COMPONENTS Widgets Xml OpenGL)
find_package(QGLViewer)
find_package(OpenGL)
-if (CGAL_VERSION VERSION_EQUAL 4.8.0)
- message(ERROR " GudhUI does not compile with CGAL 4.8.0. 4.8.1, 4.8.2 and 4.9 are OK.")
-endif()
-
-if (NOT CGAL_FOUND)
- message(ERROR " GudhUI requires CGAL and will not be compiled.")
-endif()
-
-if (NOT Qt5_FOUND)
- message(ERROR " GudhUI requires Qt5 and will not be compiled.")
-endif()
-
-if (NOT OPENGL_FOUND)
- message(ERROR " GudhUI requires OpenGL and will not be compiled.")
-endif()
-
-if (NOT QGLVIEWER_FOUND)
- message(ERROR " GudhUI requires QGLViewer and will not be compiled.")
-endif()
-
-
-
if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_VERSION VERSION_EQUAL 4.8.0)
set(CMAKE_AUTOMOC ON)
@@ -37,8 +15,6 @@ if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_
SET(Boost_USE_MULTITHREAD OFF)
include_directories (${QGLVIEWER_INCLUDE_DIR})
- #####################################################################
-
add_executable ( GudhUI
gui/gudhui.cpp
gui/MainWindow.cpp
@@ -52,14 +28,10 @@ if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_
target_link_libraries( GudhUI Qt5::Widgets Qt5::Xml Qt5::OpenGL )
target_link_libraries( GudhUI ${QGLVIEWER_LIBRARIES} )
target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
-if (TBB_FOUND)
- target_link_libraries( GudhUI ${TBB_LIBRARIES})
-endif()
+ if (TBB_FOUND)
+ target_link_libraries( GudhUI ${TBB_LIBRARIES})
+ endif()
install(TARGETS GudhUI DESTINATION bin)
-###############################################################################
-
-else()
- message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt5, and will not be compiled.")
endif()
diff --git a/src/Hasse_complex/include/gudhi/Hasse_complex.h b/src/Hasse_complex/include/gudhi/Hasse_complex.h
index 8b06b771..e67f7609 100644
--- a/src/Hasse_complex/include/gudhi/Hasse_complex.h
+++ b/src/Hasse_complex/include/gudhi/Hasse_complex.h
@@ -30,6 +30,7 @@
#include <algorithm>
#include <utility> // for pair
#include <vector>
+#include <limits> // for infinity value
#ifdef GUDHI_USE_TBB
#include <tbb/parallel_for.h>
@@ -104,7 +105,6 @@ class Hasse_complex {
Hasse_complex(Complex_ds & cpx)
: complex_(cpx.num_simplices())
, vertices_()
- , threshold_(cpx.filtration())
, num_vertices_()
, dim_max_(cpx.dimension()) {
int size = complex_.size();
@@ -125,7 +125,6 @@ class Hasse_complex {
Hasse_complex()
: complex_()
, vertices_()
- , threshold_(0)
, num_vertices_(0)
, dim_max_(-1) { }
@@ -157,15 +156,11 @@ class Hasse_complex {
Filtration_value filtration(Simplex_handle sh) {
if (sh == null_simplex()) {
- return filtration();
+ return std::numeric_limits<Filtration_value>::infinity();
}
return complex_[sh].filtration_;
}
- Filtration_value filtration() {
- return threshold_;
- }
-
int dimension(Simplex_handle sh) {
if (complex_[sh].boundary_.empty()) return 0;
return complex_[sh].boundary_.size() - 1;
@@ -206,7 +201,6 @@ class Hasse_complex {
std::vector< Hasse_simp, Gudhi::no_init_allocator<Hasse_simp> > complex_;
std::vector<Simplex_handle> vertices_;
- Filtration_value threshold_;
size_t num_vertices_;
int dim_max_;
};
@@ -245,7 +239,6 @@ std::istream& operator>>(std::istream & is
}
hcpx.dim_max_ = max_dim;
- hcpx.threshold_ = max_fil;
return is;
}
diff --git a/src/Nerve_GIC/doc/COPYRIGHT b/src/Nerve_GIC/doc/COPYRIGHT
new file mode 100644
index 00000000..0c36a526
--- /dev/null
+++ b/src/Nerve_GIC/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Mathieu Carrière
+
+Copyright (C) 2017 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Nerve_GIC/doc/GIC.jpg b/src/Nerve_GIC/doc/GIC.jpg
new file mode 100644
index 00000000..cb1b9b7f
--- /dev/null
+++ b/src/Nerve_GIC/doc/GIC.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/GIC.pdf b/src/Nerve_GIC/doc/GIC.pdf
new file mode 100644
index 00000000..30525745
--- /dev/null
+++ b/src/Nerve_GIC/doc/GIC.pdf
Binary files differ
diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
new file mode 100644
index 00000000..3a0d8154
--- /dev/null
+++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
@@ -0,0 +1,216 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carriere
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
+#define DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+/** \defgroup cover_complex Cover complex
+ *
+ * \author Mathieu Carrière
+ *
+ * @{
+ *
+ * Visualizations of the simplicial complexes can be done with either
+ * neato (from <a target="_blank" href="http://www.graphviz.org/">graphviz</a>),
+ * <a target="_blank" href="http://www.geomview.org/">geomview</a>,
+ * <a target="_blank" href="https://github.com/MLWave/kepler-mapper">KeplerMapper</a>.
+ * Input point clouds are assumed to be
+ * <a target="_blank" href="http://www.geomview.org/docs/html/OFF.html">OFF files</a>.
+ *
+ * \section covers Covers
+ *
+ * Nerves and Graph Induced Complexes require a cover C of the input point cloud P,
+ * that is a set of subsets of P whose union is P itself.
+ * Very often, this cover is obtained from the preimage of a family of intervals covering
+ * the image of some scalar-valued function f defined on P. This family is parameterized
+ * by its resolution, which can be either the number or the length of the intervals,
+ * and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values).
+ *
+ * \section nerves Nerves
+ *
+ * \subsection nervedefinition Nerve definition
+ *
+ * Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover
+ * is the simplicial complex that has one k-simplex per k-fold intersection of cover elements.
+ * See also <a target="_blank" href="https://en.wikipedia.org/wiki/Nerve_of_a_covering"> Wikipedia </a>.
+ *
+ * \image html "nerve.png" "Nerve of a double torus"
+ *
+ * \subsection nerveexample Example
+ *
+ * This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off).
+ * The cover C comes from the preimages of intervals (10 intervals with gain 0.3)
+ * covering the height function (coordinate 2),
+ * which are then refined into their connected components using the triangulation of the .OFF file.
+ *
+ * \include Nerve_GIC/Nerve.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./Nerve ../../../../data/points/human.off 2 10 0.3 --v
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Nerve_GIC/Nerve.txt
+ *
+ * The program also writes a file SC.txt. The first three lines in this file are the location of the input point cloud
+ * and the function used to compute the cover.
+ * The fourth line contains the number of vertices nv and edges ne of the Nerve.
+ * The next nv lines represent the vertices. Each line contains the vertex ID,
+ * the number of data points it contains, and their average color function value.
+ * Finally, the next ne lines represent the edges, characterized by the ID of their vertices.
+ *
+ * Using KeplerMapper, one can obtain the following visualization:
+ *
+ * \image html "nervevisu.jpg" "Visualization with KeplerMapper"
+ *
+ * \section gic Graph Induced Complexes (GIC)
+ *
+ * \subsection gicdefinition GIC definition
+ *
+ * Again, assume you are given a cover C of your point cloud P. Moreover, assume
+ * you are also given a graph G built on top of P. Then, for any clique in G
+ * whose nodes all belong to different elements of C, the GIC includes a corresponding
+ * simplex, whose dimension is the number of nodes in the clique minus one.
+ * See \cite Dey13 for more details.
+ *
+ * \image html "GIC.jpg" "GIC of a point cloud."
+ *
+ * \subsection gicexamplevor Example with cover from Voronoï
+ *
+ * This example builds the GIC of a point cloud sampled on a 3D human shape (human.off).
+ * We randomly subsampled 100 points in the point cloud, which act as seeds of
+ * a geodesic Voronoï diagram. Each cell of the diagram is then an element of C.
+ * The graph G (used to compute both the geodesics for Voronoï and the GIC)
+ * comes from the triangulation of the human shape. Note that the resulting simplicial complex is in dimension 3
+ * in this example.
+ *
+ * \include Nerve_GIC/VoronoiGIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./VoronoiGIC ../../../../data/points/human.off 700 --v
+ * \endcode
+ *
+ * the program outputs SC.off. Using e.g.
+ *
+ * \code $> geomview SC.off
+ * \endcode
+ *
+ * one can obtain the following visualization:
+ *
+ * \image html "gicvoronoivisu.jpg" "Visualization with Geomview"
+ *
+ * \subsection functionalGICdefinition Functional GIC
+ *
+ * If one restricts to the cliques in G whose nodes all belong to preimages of consecutive
+ * intervals (assuming the cover of the height function is minimal, i.e. no more than
+ * two intervals can intersect at a time), the GIC is of dimension one, i.e. a graph.
+ * We call this graph the functional GIC. See \cite Carriere16 for more details.
+ *
+ * \subsection functionalGICexample Example
+ *
+ * Functional GIC comes with automatic selection of the Rips threshold,
+ * the resolution and the gain of the function cover. See \cite Carriere17c for more details. In this example,
+ * we compute the functional GIC of a Klein bottle embedded in R^5,
+ * where the graph G comes from a Rips complex with automatic threshold,
+ * and the cover C comes from the preimages of intervals covering the first coordinate,
+ * with automatic resolution and gain. Note that automatic threshold, resolution and gain
+ * can be computed as well for the Nerve.
+ *
+ * \include Nerve_GIC/CoordGIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./CoordGIC ../../../../data/points/KleinBottle5D.off 0 --v
+ * \endcode
+ *
+ * the program outputs SC.dot. Using e.g.
+ *
+ * \code $> neato SC.dot -Tpdf -o SC.pdf
+ * \endcode
+ *
+ * one can obtain the following visualization:
+ *
+ * \image html "coordGICvisu2.jpg" "Visualization with Neato"
+ *
+ * where nodes are colored by the filter function values and, for each node, the first number is its ID
+ * and the second is the number of data points that its contain.
+ *
+ * We also provide an example on a set of 72 pictures taken around the same object (lucky_cat.off).
+ * The function is now the first eigenfunction given by PCA, whose values
+ * are written in a file (lucky_cat_PCA1). Threshold, resolution and gain are automatically selected as before.
+ *
+ * \include Nerve_GIC/FuncGIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./FuncGIC ../../data/points/COIL_database/lucky_cat.off ../../data/points/COIL_database/lucky_cat_PCA1 --v
+ * \endcode
+ *
+ * the program outputs again SC.dot which gives the following visualization after using neato:
+ *
+ * \image html "funcGICvisu.jpg" "Visualization with neato"
+ *
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup cover_complex
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
+
+
+/* * \subsection gicexample Example with cover from function
+ *
+ * This example builds the GIC of a point cloud sampled on a 3D human shape (human.off).
+ * The cover C comes from the preimages of intervals (with length 0.075 and gain 0)
+ * covering the height function (coordinate 2),
+ * and the graph G comes from a Rips complex built with threshold 0.075.
+ * Note that if the gain is too big, the number of cliques increases a lot,
+ * which make the computation time much larger.
+ *
+ * \include Nerve_GIC/GIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./GIC ../../../../data/points/human.off 0.075 2 0.075 0 --v
+ * \endcode
+ *
+ * the program outputs SC.txt, which can be visualized with python and firefox as before:
+ *
+ * \image html "gicvisu.jpg" "Visualization with KeplerMapper"
+ * */
+
+
+/* * Using e.g.
+ *
+ * \code $> python KeplerMapperVisuFromTxtFile.py && firefox SC.html
+ * \endcode */
diff --git a/src/Nerve_GIC/doc/coordGICvisu.pdf b/src/Nerve_GIC/doc/coordGICvisu.pdf
new file mode 100644
index 00000000..313aa1b5
--- /dev/null
+++ b/src/Nerve_GIC/doc/coordGICvisu.pdf
Binary files differ
diff --git a/src/Nerve_GIC/doc/coordGICvisu2.jpg b/src/Nerve_GIC/doc/coordGICvisu2.jpg
new file mode 100644
index 00000000..046feb2a
--- /dev/null
+++ b/src/Nerve_GIC/doc/coordGICvisu2.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/funcGICvisu.jpg b/src/Nerve_GIC/doc/funcGICvisu.jpg
new file mode 100644
index 00000000..f3da45ac
--- /dev/null
+++ b/src/Nerve_GIC/doc/funcGICvisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/gicvisu.jpg b/src/Nerve_GIC/doc/gicvisu.jpg
new file mode 100644
index 00000000..576dae47
--- /dev/null
+++ b/src/Nerve_GIC/doc/gicvisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/gicvoronoivisu.jpg b/src/Nerve_GIC/doc/gicvoronoivisu.jpg
new file mode 100644
index 00000000..cd86c411
--- /dev/null
+++ b/src/Nerve_GIC/doc/gicvoronoivisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/nerve.png b/src/Nerve_GIC/doc/nerve.png
new file mode 100644
index 00000000..b66da4a4
--- /dev/null
+++ b/src/Nerve_GIC/doc/nerve.png
Binary files differ
diff --git a/src/Nerve_GIC/doc/nervevisu.jpg b/src/Nerve_GIC/doc/nervevisu.jpg
new file mode 100644
index 00000000..67ae1d7e
--- /dev/null
+++ b/src/Nerve_GIC/doc/nervevisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt
new file mode 100644
index 00000000..461b6db2
--- /dev/null
+++ b/src/Nerve_GIC/example/CMakeLists.txt
@@ -0,0 +1,29 @@
+cmake_minimum_required(VERSION 2.6)
+project(Nerve_GIC_examples)
+
+add_executable ( Nerve Nerve.cpp )
+add_executable ( CoordGIC CoordGIC.cpp )
+add_executable ( FuncGIC FuncGIC.cpp )
+add_executable ( VoronoiGIC VoronoiGIC.cpp )
+
+if (TBB_FOUND)
+ target_link_libraries(Nerve ${TBB_LIBRARIES})
+ target_link_libraries(CoordGIC ${TBB_LIBRARIES})
+ target_link_libraries(FuncGIC ${TBB_LIBRARIES})
+ target_link_libraries(VoronoiGIC ${TBB_LIBRARIES})
+endif()
+
+file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+add_test(NAME Nerve_GIC_example_nerve COMMAND $<TARGET_FILE:Nerve>
+ "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3")
+
+add_test(NAME Nerve_GIC_example_VoronoiGIC COMMAND $<TARGET_FILE:VoronoiGIC>
+ "${CMAKE_SOURCE_DIR}/data/points/human.off" "100")
+
+add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0")
+
+add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC>
+ "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off"
+ "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1")
diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp
new file mode 100644
index 00000000..c03fcbb3
--- /dev/null
+++ b/src/Nerve_GIC/example/CoordGIC.cpp
@@ -0,0 +1,93 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off coordinate [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 3) && (argc != 4)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ int coord = atoi(argv[2]);
+ bool verb = 0;
+ if (argc == 4) verb = 1;
+
+ // -----------------------------------------
+ // Init of a functional GIC from an OFF file
+ // -----------------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_type("GIC");
+
+ GIC.set_color_from_coordinate(coord);
+ GIC.set_function_from_coordinate(coord);
+
+ GIC.set_graph_from_automatic_rips(Gudhi::Euclidean_distance());
+ GIC.set_automatic_resolution();
+ GIC.set_gain();
+ GIC.set_cover_from_function();
+
+ GIC.find_simplices();
+
+ GIC.plot_DOT();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // --------------------------------------------
+ // Display information about the functional GIC
+ // --------------------------------------------
+
+ if (verb) {
+ std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on functional GIC simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp
new file mode 100644
index 00000000..3762db4e
--- /dev/null
+++ b/src/Nerve_GIC/example/FuncGIC.cpp
@@ -0,0 +1,94 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off function [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/COIL_database/lucky_cat.off "
+ "../../data/points/COIL_database/lucky_cat_PCA1 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 3) && (argc != 4)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ std::string func_file_name = argv[2];
+ bool verb = 0;
+ if (argc == 4) verb = 1;
+
+ // -----------------------------------------
+ // Init of a functional GIC from an OFF file
+ // -----------------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_type("GIC");
+
+ GIC.set_color_from_file(func_file_name);
+ GIC.set_function_from_file(func_file_name);
+
+ GIC.set_graph_from_automatic_rips(Gudhi::Euclidean_distance());
+ GIC.set_automatic_resolution();
+ GIC.set_gain();
+ GIC.set_cover_from_function();
+
+ GIC.find_simplices();
+
+ GIC.plot_DOT();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // --------------------------------------------
+ // Display information about the functional GIC
+ // --------------------------------------------
+
+ if (verb) {
+ std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on functional GIC simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/GIC.cpp b/src/Nerve_GIC/example/GIC.cpp
new file mode 100644
index 00000000..2bc24a4d
--- /dev/null
+++ b/src/Nerve_GIC/example/GIC.cpp
@@ -0,0 +1,95 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off threshold coordinate resolution gain [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 0.075 2 0.075 0 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 6) && (argc != 7)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ double threshold = atof(argv[2]);
+ int coord = atoi(argv[3]);
+ double resolution = atof(argv[4]);
+ double gain = atof(argv[5]);
+ bool verb = 0;
+ if (argc == 7) verb = 1;
+
+ // ----------------------------------------------------------------------------
+ // Init of a graph induced complex from an OFF file
+ // ----------------------------------------------------------------------------
+
+ Gudhi::graph_induced_complex::Graph_induced_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_color_from_coordinate(coord);
+ GIC.set_function_from_coordinate(coord);
+
+ GIC.set_graph_from_rips(threshold, Gudhi::Euclidean_distance());
+
+ GIC.set_resolution_with_interval_length(resolution);
+ GIC.set_gain(gain);
+ GIC.set_cover_from_function();
+
+ GIC.find_GIC_simplices();
+
+ GIC.plot_TXT_for_KeplerMapper();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the graph induced complex
+ // ----------------------------------------------------------------------------
+
+ if (verb) {
+ std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on graph induced complex simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py
new file mode 100755
index 00000000..d2897774
--- /dev/null
+++ b/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+import km
+import numpy as np
+from collections import defaultdict
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Mathieu Carriere
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Mathieu Carriere"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+network = {}
+mapper = km.KeplerMapper(verbose=0)
+data = np.zeros((3,3))
+projected_data = mapper.fit_transform( data, projection="sum", scaler=None )
+
+f = open('SC.txt','r')
+nodes = defaultdict(list)
+links = defaultdict(list)
+custom = defaultdict(list)
+
+dat = f.readline()
+lens = f.readline()
+color = f.readline();
+param = [float(i) for i in f.readline().split(" ")]
+
+nums = [int(i) for i in f.readline().split(" ")]
+num_nodes = nums[0]
+num_edges = nums[1]
+
+for i in range(0,num_nodes):
+ point = [float(j) for j in f.readline().split(" ")]
+ nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ]
+ links[ str(int(point[0])) ] = []
+ custom[ int(point[0]) ] = point[1]
+
+m = min([custom[i] for i in range(0,num_nodes)])
+M = max([custom[i] for i in range(0,num_nodes)])
+
+for i in range(0,num_edges):
+ edge = [int(j) for j in f.readline().split(" ")]
+ links[ str(edge[0]) ].append( str(edge[1]) )
+ links[ str(edge[1]) ].append( str(edge[0]) )
+
+network["nodes"] = nodes
+network["links"] = links
+network["meta"] = lens
+
+mapper.visualize(network, color_function = color, path_html="SC.html", title=dat,
+graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0,
+height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M)
diff --git a/src/Nerve_GIC/example/Nerve.cpp b/src/Nerve_GIC/example/Nerve.cpp
new file mode 100644
index 00000000..4d5b009b
--- /dev/null
+++ b/src/Nerve_GIC/example/Nerve.cpp
@@ -0,0 +1,95 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 5) && (argc != 6)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ int coord = atoi(argv[2]);
+ int resolution = atoi(argv[3]);
+ double gain = atof(argv[4]);
+ bool verb = 0;
+ if (argc == 6) verb = 1;
+
+ // --------------------------------
+ // Init of a Nerve from an OFF file
+ // --------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> SC;
+ SC.set_verbose(verb);
+
+ bool check = SC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ SC.set_type("Nerve");
+
+ SC.set_color_from_coordinate(coord);
+ SC.set_function_from_coordinate(coord);
+
+ SC.set_graph_from_OFF();
+ SC.set_resolution_with_interval_number(resolution);
+ SC.set_gain(gain);
+ SC.set_cover_from_function();
+
+ SC.find_simplices();
+
+ SC.write_info();
+
+ Gudhi::Simplex_tree<> stree;
+ SC.create_complex(stree);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the graph induced complex
+ // ----------------------------------------------------------------------------
+
+ if (verb) {
+ std::cout << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
+ << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on Nerve simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/Nerve.txt b/src/Nerve_GIC/example/Nerve.txt
new file mode 100644
index 00000000..2a861c5f
--- /dev/null
+++ b/src/Nerve_GIC/example/Nerve.txt
@@ -0,0 +1,43 @@
+Nerve is of dimension 1 - 41 simplices - 21 vertices.
+Iterator on Nerve simplices
+0
+1
+2
+2 0
+3
+3 1
+4
+4 3
+5
+5 2
+6
+6 4
+7
+7 5
+8
+9
+9 6
+10
+10 7
+11
+12
+12 8
+13
+13 9
+13 10
+14
+14 11
+15
+15 13
+16
+16 12
+17
+17 14
+18
+18 15
+18 16
+18 17
+19
+19 18
+20
+20 19
diff --git a/src/Nerve_GIC/example/VoronoiGIC.cpp b/src/Nerve_GIC/example/VoronoiGIC.cpp
new file mode 100644
index 00000000..32431cc2
--- /dev/null
+++ b/src/Nerve_GIC/example/VoronoiGIC.cpp
@@ -0,0 +1,90 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off N [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 3) && (argc != 4)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ int m = atoi(argv[2]);
+ bool verb = 0;
+ if (argc == 4) verb = 1;
+
+ // ----------------------------------------------------------------------------
+ // Init of a graph induced complex from an OFF file
+ // ----------------------------------------------------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_type("GIC");
+
+ GIC.set_color_from_coordinate();
+
+ GIC.set_graph_from_OFF();
+ GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m);
+
+ GIC.find_simplices();
+
+ GIC.plot_OFF();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the graph induced complex
+ // ----------------------------------------------------------------------------
+
+ if (verb) {
+ std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on graph induced complex simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/km.py b/src/Nerve_GIC/example/km.py
new file mode 100755
index 00000000..53024aab
--- /dev/null
+++ b/src/Nerve_GIC/example/km.py
@@ -0,0 +1,390 @@
+from __future__ import division
+import numpy as np
+from collections import defaultdict
+import json
+import itertools
+from sklearn import cluster, preprocessing, manifold
+from datetime import datetime
+import sys
+
+class KeplerMapper(object):
+ # With this class you can build topological networks from (high-dimensional) data.
+ #
+ # 1) Fit a projection/lens/function to a dataset and transform it.
+ # For instance "mean_of_row(x) for x in X"
+ # 2) Map this projection with overlapping intervals/hypercubes.
+ # Cluster the points inside the interval
+ # (Note: we cluster on the inverse image/original data to lessen projection loss).
+ # If two clusters/nodes have the same members (due to the overlap), then:
+ # connect these with an edge.
+ # 3) Visualize the network using HTML and D3.js.
+ #
+ # functions
+ # ---------
+ # fit_transform: Create a projection (lens) from a dataset
+ # map: Apply Mapper algorithm on this projection and build a simplicial complex
+ # visualize: Turns the complex dictionary into a HTML/D3.js visualization
+
+ def __init__(self, verbose=2):
+ self.verbose = verbose
+
+ self.chunk_dist = []
+ self.overlap_dist = []
+ self.d = []
+ self.nr_cubes = 0
+ self.overlap_perc = 0
+ self.clusterer = False
+
+ def fit_transform(self, X, projection="sum", scaler=preprocessing.MinMaxScaler()):
+ # Creates the projection/lens from X.
+ #
+ # Input: X. Input features as a numpy array.
+ # Output: projected_X. original data transformed to a projection (lens).
+ #
+ # parameters
+ # ----------
+ # projection: Projection parameter is either a string,
+ # a scikit class with fit_transform, like manifold.TSNE(),
+ # or a list of dimension indices.
+ # scaler: if None, do no scaling, else apply scaling to the projection
+ # Default: Min-Max scaling
+
+ self.scaler = scaler
+ self.projection = str(projection)
+
+ # Detect if projection is a class (for scikit-learn)
+ #if str(type(projection))[1:6] == "class": #TODO: de-ugly-fy
+ # reducer = projection
+ # if self.verbose > 0:
+ # try:
+ # projection.set_params(**{"verbose":self.verbose})
+ # except:
+ # pass
+ # print("\n..Projecting data using: \n\t%s\n"%str(projection))
+ # X = reducer.fit_transform(X)
+
+ # Detect if projection is a string (for standard functions)
+ if isinstance(projection, str):
+ if self.verbose > 0:
+ print("\n..Projecting data using: %s"%(projection))
+ # Stats lenses
+ if projection == "sum": # sum of row
+ X = np.sum(X, axis=1).reshape((X.shape[0],1))
+ if projection == "mean": # mean of row
+ X = np.mean(X, axis=1).reshape((X.shape[0],1))
+ if projection == "median": # mean of row
+ X = np.median(X, axis=1).reshape((X.shape[0],1))
+ if projection == "max": # max of row
+ X = np.max(X, axis=1).reshape((X.shape[0],1))
+ if projection == "min": # min of row
+ X = np.min(X, axis=1).reshape((X.shape[0],1))
+ if projection == "std": # std of row
+ X = np.std(X, axis=1).reshape((X.shape[0],1))
+
+ if projection == "dist_mean": # Distance of x to mean of X
+ X_mean = np.mean(X, axis=0)
+ X = np.sum(np.sqrt((X - X_mean)**2), axis=1).reshape((X.shape[0],1))
+
+ # Detect if projection is a list (with dimension indices)
+ if isinstance(projection, list):
+ if self.verbose > 0:
+ print("\n..Projecting data using: %s"%(str(projection)))
+ X = X[:,np.array(projection)]
+
+ # Scaling
+ if scaler is not None:
+ if self.verbose > 0:
+ print("\n..Scaling with: %s\n"%str(scaler))
+ X = scaler.fit_transform(X)
+
+ return X
+
+ def map(self, projected_X, inverse_X=None, clusterer=cluster.DBSCAN(eps=0.5,min_samples=3), nr_cubes=10, overlap_perc=0.1):
+ # This maps the data to a simplicial complex. Returns a dictionary with nodes and links.
+ #
+ # Input: projected_X. A Numpy array with the projection/lens.
+ # Output: complex. A dictionary with "nodes", "links" and "meta information"
+ #
+ # parameters
+ # ----------
+ # projected_X projected_X. A Numpy array with the projection/lens. Required.
+ # inverse_X Numpy array or None. If None then the projection itself is used for clustering.
+ # clusterer Scikit-learn API compatible clustering algorithm. Default: DBSCAN
+ # nr_cubes Int. The number of intervals/hypercubes to create.
+ # overlap_perc Float. The percentage of overlap "between" the intervals/hypercubes.
+
+ start = datetime.now()
+
+ # Helper function
+ def cube_coordinates_all(nr_cubes, nr_dimensions):
+ # Helper function to get origin coordinates for our intervals/hypercubes
+ # Useful for looping no matter the number of cubes or dimensions
+ # Example: if there are 4 cubes per dimension and 3 dimensions
+ # return the bottom left (origin) coordinates of 64 hypercubes,
+ # as a sorted list of Numpy arrays
+ # TODO: elegance-ify...
+ l = []
+ for x in range(nr_cubes):
+ l += [x] * nr_dimensions
+ return [np.array(list(f)) for f in sorted(set(itertools.permutations(l,nr_dimensions)))]
+
+ nodes = defaultdict(list)
+ links = defaultdict(list)
+ complex = {}
+ self.nr_cubes = nr_cubes
+ self.clusterer = clusterer
+ self.overlap_perc = overlap_perc
+
+ if self.verbose > 0:
+ print("Mapping on data shaped %s using dimensions\n"%(str(projected_X.shape)))
+
+ # If inverse image is not provided, we use the projection as the inverse image (suffer projection loss)
+ if inverse_X is None:
+ inverse_X = projected_X
+
+ # We chop up the min-max column ranges into 'nr_cubes' parts
+ self.chunk_dist = (np.max(projected_X, axis=0) - np.min(projected_X, axis=0))/nr_cubes
+
+ # We calculate the overlapping windows distance
+ self.overlap_dist = self.overlap_perc * self.chunk_dist
+
+ # We find our starting point
+ self.d = np.min(projected_X, axis=0)
+
+ # Use a dimension index array on the projected X
+ # (For now this uses the entire dimensionality, but we keep for experimentation)
+ di = np.array([x for x in range(projected_X.shape[1])])
+
+ # Prefix'ing the data with ID's
+ ids = np.array([x for x in range(projected_X.shape[0])])
+ projected_X = np.c_[ids,projected_X]
+ inverse_X = np.c_[ids,inverse_X]
+
+ # Subdivide the projected data X in intervals/hypercubes with overlap
+ if self.verbose > 0:
+ total_cubes = len(cube_coordinates_all(nr_cubes,projected_X.shape[1]))
+ print("Creating %s hypercubes."%total_cubes)
+
+ for i, coor in enumerate(cube_coordinates_all(nr_cubes,di.shape[0])):
+ # Slice the hypercube
+ hypercube = projected_X[ np.invert(np.any((projected_X[:,di+1] >= self.d[di] + (coor * self.chunk_dist[di])) &
+ (projected_X[:,di+1] < self.d[di] + (coor * self.chunk_dist[di]) + self.chunk_dist[di] + self.overlap_dist[di]) == False, axis=1 )) ]
+
+ if self.verbose > 1:
+ print("There are %s points in cube_%s / %s with starting range %s"%
+ (hypercube.shape[0],i,total_cubes,self.d[di] + (coor * self.chunk_dist[di])))
+
+ # If at least one sample inside the hypercube
+ if hypercube.shape[0] > 0:
+ # Cluster the data point(s) in the cube, skipping the id-column
+ # Note that we apply clustering on the inverse image (original data samples) that fall inside the cube.
+ inverse_x = inverse_X[[int(nn) for nn in hypercube[:,0]]]
+
+ clusterer.fit(inverse_x[:,1:])
+
+ if self.verbose > 1:
+ print("Found %s clusters in cube_%s\n"%(np.unique(clusterer.labels_[clusterer.labels_ > -1]).shape[0],i))
+
+ #Now for every (sample id in cube, predicted cluster label)
+ for a in np.c_[hypercube[:,0],clusterer.labels_]:
+ if a[1] != -1: #if not predicted as noise
+ cluster_id = str(coor[0])+"_"+str(i)+"_"+str(a[1])+"_"+str(coor)+"_"+str(self.d[di] + (coor * self.chunk_dist[di])) # TODO: de-rudimentary-ify
+ nodes[cluster_id].append( int(a[0]) ) # Append the member id's as integers
+ else:
+ if self.verbose > 1:
+ print("Cube_%s is empty.\n"%(i))
+
+ # Create links when clusters from different hypercubes have members with the same sample id.
+ candidates = itertools.combinations(nodes.keys(),2)
+ for candidate in candidates:
+ # if there are non-unique members in the union
+ if len(nodes[candidate[0]]+nodes[candidate[1]]) != len(set(nodes[candidate[0]]+nodes[candidate[1]])):
+ links[candidate[0]].append( candidate[1] )
+
+ # Reporting
+ if self.verbose > 0:
+ nr_links = 0
+ for k in links:
+ nr_links += len(links[k])
+ print("\ncreated %s edges and %s nodes in %s."%(nr_links,len(nodes),str(datetime.now()-start)))
+
+ complex["nodes"] = nodes
+ complex["links"] = links
+ complex["meta"] = self.projection
+
+ return complex
+
+ def visualize(self, complex, color_function="", path_html="mapper_visualization_output.html", title="My Data",
+ graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=None, width_html=0,
+ height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=0,gain=0,minimum=0,maximum=0):
+ # Turns the dictionary 'complex' in a html file with d3.js
+ #
+ # Input: complex. Dictionary (output from calling .map())
+ # Output: a HTML page saved as a file in 'path_html'.
+ #
+ # parameters
+ # ----------
+ # color_function string. Not fully implemented. Default: "" (distance to origin)
+ # path_html file path as string. Where to save the HTML page.
+ # title string. HTML page document title and first heading.
+ # graph_link_distance int. Edge length.
+ # graph_gravity float. "Gravity" to center of layout.
+ # graph_charge int. charge between nodes.
+ # custom_tooltips None or Numpy Array. You could use "y"-label array for this.
+ # width_html int. Width of canvas. Default: 0 (full width)
+ # height_html int. Height of canvas. Default: 0 (full height)
+ # show_tooltips bool. default:True
+ # show_title bool. default:True
+ # show_meta bool. default:True
+
+ # Format JSON for D3 graph
+ json_s = {}
+ json_s["nodes"] = []
+ json_s["links"] = []
+ k2e = {} # a key to incremental int dict, used for id's when linking
+
+ for e, k in enumerate(complex["nodes"]):
+ # Tooltip and node color formatting, TODO: de-mess-ify
+ if custom_tooltips is not None:
+ tooltip_s = "<h2>Cluster %s</h2>"%k + " ".join(str(custom_tooltips[complex["nodes"][k][0]]).split(" "))
+ if maximum == minimum:
+ tooltip_i = 0
+ else:
+ tooltip_i = int(30*(custom_tooltips[complex["nodes"][k][0]]-minimum)/(maximum-minimum))
+ json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(complex["nodes"][k][2])), "color": tooltip_i})
+ else:
+ tooltip_s = "<h2>Cluster %s</h2>Contains %s members."%(k,len(complex["nodes"][k]))
+ json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(len(complex["nodes"][k]))), "color": str(k.split("_")[0])})
+ k2e[k] = e
+ for k in complex["links"]:
+ for link in complex["links"][k]:
+ json_s["links"].append({"source": k2e[k], "target":k2e[link],"value":1})
+
+ # Width and height of graph in HTML output
+ if width_html == 0:
+ width_css = "100%"
+ width_js = 'document.getElementById("holder").offsetWidth-20'
+ else:
+ width_css = "%spx" % width_html
+ width_js = "%s" % width_html
+ if height_html == 0:
+ height_css = "100%"
+ height_js = 'document.getElementById("holder").offsetHeight-20'
+ else:
+ height_css = "%spx" % height_html
+ height_js = "%s" % height_html
+
+ # Whether to show certain UI elements or not
+ if show_tooltips == False:
+ tooltips_display = "display: none;"
+ else:
+ tooltips_display = ""
+
+ if show_meta == False:
+ meta_display = "display: none;"
+ else:
+ meta_display = ""
+
+ if show_title == False:
+ title_display = "display: none;"
+ else:
+ title_display = ""
+
+ with open(path_html,"wb") as outfile:
+ html = """<!DOCTYPE html>
+ <meta charset="utf-8">
+ <meta name="generator" content="KeplerMapper">
+ <title>%s | KeplerMapper</title>
+ <link href='https://fonts.googleapis.com/css?family=Roboto:700,300' rel='stylesheet' type='text/css'>
+ <style>
+ * {margin: 0; padding: 0;}
+ html { height: 100%%;}
+ body {background: #111; height: 100%%; font: 100 16px Roboto, Sans-serif;}
+ .link { stroke: #999; stroke-opacity: .333; }
+ .divs div { border-radius: 50%%; background: red; position: absolute; }
+ .divs { position: absolute; top: 0; left: 0; }
+ #holder { position: relative; width: %s; height: %s; background: #111; display: block;}
+ h1 { %s padding: 20px; color: #fafafa; text-shadow: 0px 1px #000,0px -1px #000; position: absolute; font: 300 30px Roboto, Sans-serif;}
+ h2 { text-shadow: 0px 1px #000,0px -1px #000; font: 700 16px Roboto, Sans-serif;}
+ .meta { position: absolute; opacity: 0.9; width: 220px; top: 80px; left: 20px; display: block; %s background: #000; line-height: 25px; color: #fafafa; border: 20px solid #000; font: 100 16px Roboto, Sans-serif;}
+ div.tooltip { position: absolute; width: 380px; display: block; %s padding: 20px; background: #000; border: 0px; border-radius: 3px; pointer-events: none; z-index: 999; color: #FAFAFA;}
+ }
+ </style>
+ <body>
+ <div id="holder">
+ <h1>%s</h1>
+ <p class="meta">
+ <b>Lens</b><br>%s<br><br>
+ <b>Length of intervals</b><br>%s<br><br>
+ <b>Overlap percentage</b><br>%s%%<br><br>
+ <b>Color Function</b><br>%s
+ </p>
+ </div>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"></script>
+ <script>
+ var width = %s,
+ height = %s;
+ var color = d3.scale.ordinal()
+ .domain(["0","1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30"])
+ .range(["#FF0000","#FF1400","#FF2800","#FF3c00","#FF5000","#FF6400","#FF7800","#FF8c00","#FFa000","#FFb400","#FFc800","#FFdc00","#FFf000","#fdff00","#b0ff00","#65ff00","#17ff00","#00ff36","#00ff83","#00ffd0","#00e4ff","#00c4ff","#00a4ff","#00a4ff","#0084ff","#0064ff","#0044ff","#0022ff","#0002ff","#0100ff","#0300ff","#0500ff"]);
+ var force = d3.layout.force()
+ .charge(%s)
+ .linkDistance(%s)
+ .gravity(%s)
+ .size([width, height]);
+ var svg = d3.select("#holder").append("svg")
+ .attr("width", width)
+ .attr("height", height);
+
+ var div = d3.select("#holder").append("div")
+ .attr("class", "tooltip")
+ .style("opacity", 0.0);
+
+ var divs = d3.select('#holder').append('div')
+ .attr('class', 'divs')
+ .attr('style', function(d) { return 'overflow: hidden; width: ' + width + 'px; height: ' + height + 'px;'; });
+
+ graph = %s;
+ force
+ .nodes(graph.nodes)
+ .links(graph.links)
+ .start();
+ var link = svg.selectAll(".link")
+ .data(graph.links)
+ .enter().append("line")
+ .attr("class", "link")
+ .style("stroke-width", function(d) { return Math.sqrt(d.value); });
+ var node = divs.selectAll('div')
+ .data(graph.nodes)
+ .enter().append('div')
+ .on("mouseover", function(d) {
+ div.transition()
+ .duration(200)
+ .style("opacity", .9);
+ div .html(d.tooltip + "<br/>")
+ .style("left", (d3.event.pageX + 100) + "px")
+ .style("top", (d3.event.pageY - 28) + "px");
+ })
+ .on("mouseout", function(d) {
+ div.transition()
+ .duration(500)
+ .style("opacity", 0);
+ })
+ .call(force.drag);
+
+ node.append("title")
+ .text(function(d) { return d.name; });
+ force.on("tick", function() {
+ link.attr("x1", function(d) { return d.source.x; })
+ .attr("y1", function(d) { return d.source.y; })
+ .attr("x2", function(d) { return d.target.x; })
+ .attr("y2", function(d) { return d.target.y; });
+ node.attr("cx", function(d) { return d.x; })
+ .attr("cy", function(d) { return d.y; })
+ .attr('style', function(d) { return 'width: ' + (d.group * 2) + 'px; height: ' + (d.group * 2) + 'px; ' + 'left: '+(d.x-(d.group))+'px; ' + 'top: '+(d.y-(d.group))+'px; background: '+color(d.color)+'; box-shadow: 0px 0px 3px #111; box-shadow: 0px 0px 33px '+color(d.color)+', inset 0px 0px 5px rgba(0, 0, 0, 0.2);'})
+ ;
+ });
+ </script>"""%(title,width_css, height_css, title_display, meta_display, tooltips_display, title,complex["meta"],res,gain*100,color_function,width_js,height_js,graph_charge,graph_link_distance,graph_gravity,json.dumps(json_s))
+ outfile.write(html.encode("utf-8"))
+ if self.verbose > 0:
+ print("\nWrote d3.js graph to '%s'"%path_html)
diff --git a/src/Nerve_GIC/example/km.py.COPYRIGHT b/src/Nerve_GIC/example/km.py.COPYRIGHT
new file mode 100644
index 00000000..bef7b121
--- /dev/null
+++ b/src/Nerve_GIC/example/km.py.COPYRIGHT
@@ -0,0 +1,26 @@
+km.py is a fork of https://github.com/MLWave/kepler-mapper.
+Only the visualization part has been kept (Mapper part has been removed).
+
+This file has te following Copyright :
+
+The MIT License (MIT)
+
+Copyright (c) 2015 Triskelion - HJ van Veen - info@mlwave.com
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h
new file mode 100644
index 00000000..9f107a7e
--- /dev/null
+++ b/src/Nerve_GIC/include/gudhi/GIC.h
@@ -0,0 +1,1166 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Mathieu Carriere
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef GIC_H_
+#define GIC_H_
+
+#include <gudhi/Debug_utils.h>
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/reader_utils.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/distance_functions.h>
+
+#include <iostream>
+#include <vector>
+#include <map>
+#include <string>
+#include <limits> // for numeric_limits
+#include <utility> // for pair<>
+#include <algorithm> // for std::max
+#include <random>
+#include <cassert>
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+
+/**
+ * \class Cover_complex
+ * \brief Cover complex data structure.
+ *
+ * \ingroup cover_complex
+ *
+ * \details
+ * The data structure is a simplicial complex, representing a
+ * Graph Induced simplicial Complex (GIC) or a Nerve,
+ * and whose simplices are computed with a cover C of a point
+ * cloud P, which often comes from the preimages of intervals
+ * covering the image of a function f defined on P.
+ * These intervals are parameterized by their resolution
+ * (either their length or their number)
+ * and their gain (percentage of overlap).
+ * To compute a GIC, one also needs a graph G built on top of P,
+ * whose cliques with vertices belonging to different elements of C
+ * correspond to the simplices of the GIC.
+ *
+ */
+template <typename Point>
+class Cover_complex {
+ private:
+ // Graph_induced_complex(std::map<int, double> fun){func = fun;}
+ bool verbose = false; // whether to display information.
+ std::vector<Point> point_cloud;
+ std::vector<std::vector<int> > one_skeleton;
+ typedef int Cover_t; // elements of cover C are indexed by integers.
+ std::vector<std::vector<Cover_t> > simplices;
+ std::map<int, std::vector<Cover_t> > cover;
+ std::map<Cover_t, std::vector<int> > cover_back;
+ int maximal_dim; // maximal dimension of output simplicial complex.
+ int data_dimension; // dimension of input data.
+ int n; // number of points.
+ std::map<Cover_t, int>
+ cover_fct; // integer-valued function that allows to state if two elements of the cover are consecutive or not.
+ std::map<Cover_t, std::pair<int, double> >
+ cover_color; // size and coloring of the vertices of the output simplicial complex.
+ Simplex_tree st;
+
+ std::map<int, std::vector<int> > adjacency_matrix;
+ std::vector<std::vector<double> > distances;
+
+ int resolution_int = -1;
+ double resolution_double = -1;
+ double gain = -1;
+ double rate_constant = 10; // Constant in the subsampling.
+ double rate_power = 0.001; // Power in the subsampling.
+ int mask = 0; // Ignore nodes containing less than mask points.
+
+ std::map<int, double> func;
+ std::map<int, double> func_color;
+ std::vector<int> voronoi_subsamples;
+ std::string cover_name;
+ std::string point_cloud_name;
+ std::string color_name;
+ std::string type; // Nerve or GIC
+ bool functional_cover = false; // whether we use a cover with preimages of a function or not
+
+ // Point comparator
+ struct Less {
+ Less(std::map<int, double> func) { Fct = func; }
+ std::map<int, double> Fct;
+ bool operator()(int a, int b) {
+ if (Fct[a] == Fct[b])
+ return a < b;
+ else
+ return Fct[a] < Fct[b];
+ }
+ };
+
+ // DFS
+ private:
+ void dfs(std::map<int, std::vector<int> >& G, int p, std::vector<int>& cc, std::map<int, bool>& visit) {
+ cc.push_back(p);
+ visit[p] = true;
+ int neighb = G[p].size();
+ for (int i = 0; i < neighb; i++)
+ if (visit.find(G[p][i]) != visit.end())
+ if (!(visit[G[p][i]])) dfs(G, G[p][i], cc, visit);
+ }
+
+ // Find random number in [0,1].
+ double GetUniform() {
+ static std::default_random_engine re;
+ static std::uniform_real_distribution<double> Dist(0, 1);
+ return Dist(re);
+ }
+
+ // Subsample points.
+ void SampleWithoutReplacement(int populationSize, int sampleSize, std::vector<int>& samples) {
+ int t = 0;
+ int m = 0;
+ double u;
+ while (m < sampleSize) {
+ u = GetUniform();
+ if ((populationSize - t) * u >= sampleSize - m) {
+ t++;
+ } else {
+ samples[m] = t;
+ t++;
+ m++;
+ }
+ }
+ }
+
+ private:
+ void fill_adjacency_matrix_from_st() {
+ std::vector<int> empty;
+ for (int i = 0; i < n; i++) adjacency_matrix[i] = empty;
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<int> vertices;
+ for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
+ adjacency_matrix[vertices[0]].push_back(vertices[1]);
+ adjacency_matrix[vertices[1]].push_back(vertices[0]);
+ }
+ }
+ }
+
+ public:
+ /** \brief Specifies whether the type of the output simplicial complex.
+ *
+ * @param[in] t string (either "GIC" or "Nerve").
+ *
+ */
+ void set_type(const std::string& t) { type = t; }
+
+ public:
+ /** \brief Specifies whether the program should display information or not.
+ *
+ * @param[in] verb boolean (true = display info, false = do not display info).
+ *
+ */
+ void set_verbose(bool verb = false) { verbose = verb; }
+
+ public:
+ /** \brief Sets the constants used to subsample the data set. These constants are
+ * explained in \cite Carriere17c.
+ *
+ * @param[in] constant double.
+ * @param[in] power double.
+ *
+ */
+ void set_subsampling(double constant, double power) {
+ rate_constant = constant;
+ rate_power = power;
+ }
+
+ public:
+ /** \brief Sets the mask, which is a threshold integer such that nodes in the complex that contain a number of data
+ * points which is less than or equal to
+ * this threshold are not displayed.
+ *
+ * @param[in] nodemask integer.
+ *
+ */
+ void set_mask(int nodemask) { mask = nodemask; }
+
+ public:
+ /** \brief Reads and stores the input point cloud.
+ *
+ * @param[in] off_file_name name of the input .OFF or .nOFF file.
+ *
+ */
+ bool read_point_cloud(const std::string& off_file_name) {
+ point_cloud_name = off_file_name;
+ std::ifstream input(off_file_name);
+ std::string line;
+
+ char comment = '#';
+ while (comment == '#') {
+ getline(input, line);
+ if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace)) comment = line[line.find_first_not_of(' ')];
+ }
+ if (std::strcmp((char*)line.c_str(), "nOFF") == 0) {
+ comment = '#';
+ while (comment == '#') {
+ getline(input, line);
+ if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace))
+ comment = line[line.find_first_not_of(' ')];
+ }
+ std::stringstream stream(line);
+ stream >> data_dimension;
+ } else {
+ data_dimension = 3;
+ }
+
+ comment = '#';
+ int numedges, numfaces, i, num;
+ while (comment == '#') {
+ getline(input, line);
+ if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace)) comment = line[line.find_first_not_of(' ')];
+ }
+ std::stringstream stream(line);
+ stream >> n;
+ stream >> numfaces;
+ stream >> numedges;
+
+ i = 0;
+ while (i < n) {
+ getline(input, line);
+ if (!line.empty() && line[line.find_first_not_of(' ')] != '#' &&
+ !std::all_of(line.begin(), line.end(), isspace)) {
+ std::vector<double> point;
+ std::istringstream iss(line);
+ point.assign(std::istream_iterator<double>(iss), std::istream_iterator<double>());
+ point_cloud.emplace_back(point.begin(), point.begin() + data_dimension);
+ i++;
+ }
+ }
+
+ i = 0;
+ while (i < numfaces) {
+ getline(input, line);
+ if (!line.empty() && line[line.find_first_not_of(' ')] != '#' &&
+ !std::all_of(line.begin(), line.end(), isspace)) {
+ std::vector<int> simplex;
+ std::istringstream iss(line);
+ simplex.assign(std::istream_iterator<int>(iss), std::istream_iterator<int>());
+ num = simplex[0];
+ std::vector<int> edge(2);
+ for (int j = 1; j <= num; j++) {
+ for (int k = j + 1; k <= num; k++) {
+ edge[0] = simplex[j];
+ edge[1] = simplex[k];
+ one_skeleton.push_back(edge);
+ }
+ }
+ i++;
+ }
+ }
+
+ return input.is_open();
+ }
+
+ // *******************************************************************************************************************
+ // Graphs.
+ // *******************************************************************************************************************
+
+ public: // Set graph from file.
+ /** \brief Creates a graph G from a file containing the edges.
+ *
+ * @param[in] graph_file_name name of the input graph file.
+ * The graph file contains one edge per line,
+ * each edge being represented by the IDs of its two nodes.
+ *
+ */
+ void set_graph_from_file(const std::string& graph_file_name) {
+ int neighb;
+ std::ifstream input(graph_file_name);
+ std::string line;
+ int edge[2];
+ int n = 0;
+ while (std::getline(input, line)) {
+ std::stringstream stream(line);
+ stream >> edge[0];
+ while (stream >> neighb) {
+ edge[1] = neighb;
+ st.insert_simplex_and_subfaces(edge);
+ }
+ n++;
+ }
+
+ fill_adjacency_matrix_from_st();
+ }
+
+ public: // Set graph from OFF file.
+ /** \brief Creates a graph G from the triangulation given by the input .OFF file.
+ *
+ */
+ void set_graph_from_OFF() {
+ int num_edges = one_skeleton.size();
+ if (num_edges > 0) {
+ for (int i = 0; i < num_edges; i++) st.insert_simplex_and_subfaces(one_skeleton[i]);
+ fill_adjacency_matrix_from_st();
+ } else {
+ std::cout << "No triangulation read in OFF file!" << std::endl;
+ }
+ }
+
+ public: // Set graph from Rips complex.
+ /** \brief Creates a graph G from a Rips complex.
+ *
+ * @param[in] threshold threshold value for the Rips complex.
+ * @param[in] distance distance used to compute the Rips complex.
+ *
+ */
+ template <typename Distance>
+ void set_graph_from_rips(double threshold, Distance distance) {
+ Rips_complex rips_complex_from_points(point_cloud, threshold, distance);
+ rips_complex_from_points.create_complex(st, 1);
+ fill_adjacency_matrix_from_st();
+ }
+
+ public: // Pairwise distances.
+ /** \private \brief Computes all pairwise distances.
+ */
+ template <typename Distance>
+ void compute_pairwise_distances(Distance ref_distance) {
+ double d;
+ std::vector<double> zeros(n);
+ for (int i = 0; i < n; i++) distances.push_back(zeros);
+ std::string distance = point_cloud_name;
+ distance.append("_dist");
+ std::ifstream input(distance.c_str(), std::ios::out | std::ios::binary);
+
+ if (input.good()) {
+ if (verbose) std::cout << "Reading distances..." << std::endl;
+ for (int i = 0; i < n; i++) {
+ for (int j = i; j < n; j++) {
+ input.read((char*)&d, 8);
+ distances[i][j] = d;
+ distances[j][i] = d;
+ }
+ }
+ input.close();
+ } else {
+ if (verbose) std::cout << "Computing distances..." << std::endl;
+ input.close();
+ std::ofstream output(distance, std::ios::out | std::ios::binary);
+ for (int i = 0; i < n; i++) {
+ int state = (int)floor(100 * (i * 1.0 + 1) / n) % 10;
+ if (state == 0 && verbose) std::cout << "\r" << state << "%" << std::flush;
+ for (int j = i; j < n; j++) {
+ double dis = ref_distance(point_cloud[i], point_cloud[j]);
+ distances[i][j] = dis;
+ distances[j][i] = dis;
+ output.write((char*)&dis, 8);
+ }
+ }
+ output.close();
+ if (verbose) std::cout << std::endl;
+ }
+ }
+
+ public: // Automatic tuning of Rips complex.
+ /** \brief Creates a graph G from a Rips complex whose threshold value is automatically tuned with subsampling---see
+ * \cite Carriere17c.
+ *
+ * @param[in] distance distance between data points.
+ * @param[in] N number of subsampling iteration (the default reasonable value is 100, but there is no guarantee on
+ * how to choose it).
+ * @result delta threshold used for computing the Rips complex.
+ *
+ */
+ template <typename Distance>
+ double set_graph_from_automatic_rips(Distance distance, int N = 100) {
+ int m = floor(n / std::exp((1 + rate_power) * std::log(std::log(n) / std::log(rate_constant))));
+ m = std::min(m, n - 1);
+ std::vector<int> samples(m);
+ double delta = 0;
+
+ if (verbose) std::cout << n << " points in R^" << data_dimension << std::endl;
+ if (verbose) std::cout << "Subsampling " << m << " points" << std::endl;
+
+ if (distances.size() == 0) compute_pairwise_distances(distance);
+
+ // #pragma omp parallel for
+ for (int i = 0; i < N; i++) {
+ SampleWithoutReplacement(n, m, samples);
+ double hausdorff_dist = 0;
+ for (int j = 0; j < n; j++) {
+ double mj = distances[j][samples[0]];
+ for (int k = 1; k < m; k++) mj = std::min(mj, distances[j][samples[k]]);
+ hausdorff_dist = std::max(hausdorff_dist, mj);
+ }
+ delta += hausdorff_dist / N;
+ }
+
+ if (verbose) std::cout << "delta = " << delta << std::endl;
+ Rips_complex rips_complex_from_points(point_cloud, delta, distance);
+ rips_complex_from_points.create_complex(st, 1);
+ fill_adjacency_matrix_from_st();
+
+ return delta;
+ }
+
+ // *******************************************************************************************************************
+ // Functions.
+ // *******************************************************************************************************************
+
+ public: // Set function from file.
+ /** \brief Creates the function f from a file containing the function values.
+ *
+ * @param[in] func_file_name name of the input function file.
+ *
+ */
+ void set_function_from_file(const std::string& func_file_name) {
+ int vertex_id = 0;
+ std::ifstream input(func_file_name);
+ std::string line;
+ double f;
+ while (std::getline(input, line)) {
+ std::stringstream stream(line);
+ stream >> f;
+ func.emplace(vertex_id, f);
+ vertex_id++;
+ }
+ functional_cover = true;
+ cover_name = func_file_name;
+ }
+
+ public: // Set function from kth coordinate
+ /** \brief Creates the function f from the k-th coordinate of the point cloud P.
+ *
+ * @param[in] k coordinate to use (start at 0).
+ *
+ */
+ void set_function_from_coordinate(int k) {
+ for (int i = 0; i < n; i++) func.emplace(i, point_cloud[i][k]);
+ char coordinate[100];
+ sprintf(coordinate, "coordinate %d", k);
+ functional_cover = true;
+ cover_name = coordinate;
+ }
+
+ public: // Set function from vector.
+ /** \brief Creates the function f from a vector stored in memory.
+ *
+ * @param[in] function input vector of values.
+ *
+ */
+ template <class InputRange>
+ void set_function_from_range(InputRange const& function) {
+ functional_cover = true;
+ int index = 0;
+ for (auto v : function) {
+ func.emplace(index, v);
+ index++;
+ }
+ }
+
+ // *******************************************************************************************************************
+ // Covers.
+ // *******************************************************************************************************************
+
+ public: // Automatic tuning of resolution.
+ /** \brief Computes the optimal length of intervals
+ * (i.e. the smallest interval length avoiding discretization artifacts---see \cite Carriere17c) for a functional
+ * cover.
+ *
+ * @result reso interval length used to compute the cover.
+ *
+ */
+ double set_automatic_resolution() {
+ if (!functional_cover) {
+ std::cout << "Cover needs to come from the preimages of a function." << std::endl;
+ return 0;
+ }
+ if (type != "Nerve" && type != "GIC") {
+ std::cout << "Type of complex needs to be specified." << std::endl;
+ return 0;
+ }
+
+ double reso = 0;
+
+ if (type == "GIC") {
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<int> vertices;
+ for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
+ reso = std::max(reso, std::abs(func[vertices[0]] - func[vertices[1]]));
+ }
+ }
+ if (verbose) std::cout << "resolution = " << reso << std::endl;
+ resolution_double = reso;
+ }
+
+ if (type == "Nerve") {
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<int> vertices;
+ for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
+ reso = std::max(reso, (std::abs(func[vertices[0]] - func[vertices[1]])) / gain);
+ }
+ }
+ if (verbose) std::cout << "resolution = " << reso << std::endl;
+ resolution_double = reso;
+ }
+
+ return reso;
+ }
+
+ public:
+ /** \brief Sets a length of intervals from a value stored in memory.
+ *
+ * @param[in] reso length of intervals.
+ *
+ */
+ void set_resolution_with_interval_length(double reso) { resolution_double = reso; }
+ /** \brief Sets a number of intervals from a value stored in memory.
+ *
+ * @param[in] reso number of intervals.
+ *
+ */
+ void set_resolution_with_interval_number(int reso) { resolution_int = reso; }
+ /** \brief Sets a gain from a value stored in memory (default value 0.3).
+ *
+ * @param[in] g gain.
+ *
+ */
+ void set_gain(double g = 0.3) { gain = g; }
+
+ public: // Set cover with preimages of function.
+ /** \brief Creates a cover C from the preimages of the function f.
+ *
+ */
+ void set_cover_from_function() {
+ if (resolution_double == -1 && resolution_int == -1) {
+ std::cout << "Number and/or length of intervals not specified" << std::endl;
+ return;
+ }
+ if (gain == -1) {
+ std::cout << "Gain not specified" << std::endl;
+ return;
+ }
+
+ // Read function values and compute min and max
+ std::map<int, double>::iterator it;
+ double maxf, minf;
+ minf = std::numeric_limits<float>::max();
+ maxf = std::numeric_limits<float>::min();
+ for (it = func.begin(); it != func.end(); it++) {
+ minf = std::min(minf, it->second);
+ maxf = std::max(maxf, it->second);
+ }
+ int n = func.size();
+ if (verbose) std::cout << "Min function value = " << minf << " and Max function value = " << maxf << std::endl;
+
+ // Compute cover of im(f)
+ std::vector<std::pair<double, double> > intervals;
+ int res;
+
+ if (resolution_double == -1) { // Case we use an integer for the number of intervals.
+ double incr = (maxf - minf) / resolution_int;
+ double x = minf;
+ double alpha = (incr * gain) / (2 - 2 * gain);
+ double y = minf + incr + alpha;
+ std::pair<double, double> interm(x, y);
+ intervals.push_back(interm);
+ for (int i = 1; i < resolution_int - 1; i++) {
+ x = minf + i * incr - alpha;
+ y = minf + (i + 1) * incr + alpha;
+ std::pair<double, double> inter(x, y);
+ intervals.push_back(inter);
+ }
+ x = minf + (resolution_int - 1) * incr - alpha;
+ y = maxf;
+ std::pair<double, double> interM(x, y);
+ intervals.push_back(interM);
+ res = intervals.size();
+ if (verbose) {
+ for (int i = 0; i < res; i++)
+ std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ << std::endl;
+ }
+ } else {
+ if (resolution_int == -1) { // Case we use a double for the length of the intervals.
+ double x = minf;
+ double y = x + resolution_double;
+ while (y <= maxf && maxf - (y - gain * resolution_double) >= resolution_double) {
+ std::pair<double, double> inter(x, y);
+ intervals.push_back(inter);
+ x = y - gain * resolution_double;
+ y = x + resolution_double;
+ }
+ std::pair<double, double> interM(x, maxf);
+ intervals.push_back(interM);
+ res = intervals.size();
+ if (verbose) {
+ for (int i = 0; i < res; i++)
+ std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ << std::endl;
+ }
+ } else { // Case we use an integer and a double for the length of the intervals.
+ double x = minf;
+ double y = x + resolution_double;
+ int count = 0;
+ while (count < resolution_int && y <= maxf && maxf - (y - gain * resolution_double) >= resolution_double) {
+ std::pair<double, double> inter(x, y);
+ intervals.push_back(inter);
+ count++;
+ x = y - gain * resolution_double;
+ y = x + resolution_double;
+ }
+ res = intervals.size();
+ if (verbose) {
+ for (int i = 0; i < res; i++)
+ std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ << std::endl;
+ }
+ }
+ }
+
+ // Sort points according to function values
+ std::vector<int> points(n);
+ for (int i = 0; i < n; i++) points[i] = i;
+ std::sort(points.begin(), points.end(), Less(this->func));
+ int id = 0;
+ int pos = 0;
+
+ for (int i = 0; i < res; i++) {
+ // Find points in the preimage
+ std::map<int, std::vector<int> > prop;
+ std::pair<double, double> inter1 = intervals[i];
+ int tmp = pos;
+
+ if (i != res - 1) {
+ if (i != 0) {
+ std::pair<double, double> inter3 = intervals[i - 1];
+ while (func[points[tmp]] < inter3.second && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+ }
+
+ std::pair<double, double> inter2 = intervals[i + 1];
+ while (func[points[tmp]] < inter2.first && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+
+ pos = tmp;
+ while (func[points[tmp]] < inter1.second && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+
+ } else {
+ std::pair<double, double> inter3 = intervals[i - 1];
+ while (func[points[tmp]] < inter3.second && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+
+ while (tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+ }
+
+ // Compute the connected components with DFS
+ std::map<int, bool> visit;
+ if (verbose) std::cout << "Preimage of interval " << i << std::endl;
+ for (std::map<int, std::vector<int> >::iterator it = prop.begin(); it != prop.end(); it++)
+ visit[it->first] = false;
+ if (!(prop.empty())) {
+ for (std::map<int, std::vector<int> >::iterator it = prop.begin(); it != prop.end(); it++) {
+ if (!(visit[it->first])) {
+ std::vector<int> cc;
+ cc.clear();
+ dfs(prop, it->first, cc, visit);
+ int cci = cc.size();
+ if (verbose) std::cout << "one CC with " << cci << " points, ";
+ double average_col = 0;
+ for (int j = 0; j < cci; j++) {
+ cover[cc[j]].push_back(id);
+ cover_back[id].push_back(cc[j]);
+ average_col += func_color[cc[j]] / cci;
+ }
+ cover_fct[id] = i;
+ cover_color[id] = std::pair<int, double>(cci, average_col);
+ id++;
+ }
+ }
+ }
+ if (verbose) std::cout << std::endl;
+ }
+
+ maximal_dim = id - 1;
+ }
+
+ public: // Set cover from file.
+ /** \brief Creates the cover C from a file containing the cover elements of each point (the order has to be the same
+ * as in the input file!).
+ *
+ * @param[in] cover_file_name name of the input cover file.
+ *
+ */
+ void set_cover_from_file(const std::string& cover_file_name) {
+ int vertex_id = 0;
+ Cover_t cov;
+ std::vector<Cover_t> cov_elts, cov_number;
+ std::ifstream input(cover_file_name);
+ std::string line;
+ while (std::getline(input, line)) {
+ cov_elts.clear();
+ std::stringstream stream(line);
+ while (stream >> cov) {
+ cov_elts.push_back(cov);
+ cov_number.push_back(cov);
+ cover_fct[cov] = cov;
+ cover_color[cov].second += func_color[vertex_id];
+ cover_color[cov].first++;
+ cover_back[cov].push_back(vertex_id);
+ }
+ cover[vertex_id] = cov_elts;
+ vertex_id++;
+ }
+ std::vector<Cover_t>::iterator it;
+ std::sort(cov_number.begin(), cov_number.end());
+ it = std::unique(cov_number.begin(), cov_number.end());
+ cov_number.resize(std::distance(cov_number.begin(), it));
+ maximal_dim = cov_number.size() - 1;
+ for (int i = 0; i <= maximal_dim; i++) cover_color[i].second /= cover_color[i].first;
+ cover_name = cover_file_name;
+ }
+
+ public: // Set cover from Voronoi
+ /** \brief Creates the cover C from the Voronoï cells of a subsampling of the point cloud.
+ *
+ * @param[in] distance distance between the points.
+ * @param[in] m number of points in the subsample.
+ *
+ */
+ template <typename Distance>
+ void set_cover_from_Voronoi(Distance distance, int m = 100) {
+ voronoi_subsamples.resize(m);
+ SampleWithoutReplacement(n, m, voronoi_subsamples);
+ if (distances.size() == 0) compute_pairwise_distances(distance);
+ std::vector<double> mindist(n);
+ for (int j = 0; j < n; j++) mindist[j] = std::numeric_limits<double>::max();
+
+ // Compute the geodesic distances to subsamples with Dijkstra
+ for (int i = 0; i < m; i++) {
+ if (verbose) std::cout << "Computing geodesic distances to seed " << i << "..." << std::endl;
+ int seed = voronoi_subsamples[i];
+ std::vector<double> dist(n);
+ std::vector<int> process(n);
+ for (int j = 0; j < n; j++) {
+ dist[j] = std::numeric_limits<double>::max();
+ process[j] = j;
+ }
+ dist[seed] = 0;
+ int curr_size = process.size();
+ int min_point, min_index;
+ double min_dist;
+ std::vector<int> neighbors;
+ int num_neighbors;
+
+ while (curr_size > 0) {
+ min_dist = std::numeric_limits<double>::max();
+ min_index = -1;
+ min_point = -1;
+ for (int j = 0; j < curr_size; j++) {
+ if (dist[process[j]] < min_dist) {
+ min_point = process[j];
+ min_dist = dist[process[j]];
+ min_index = j;
+ }
+ }
+ assert(min_index != -1);
+ process.erase(process.begin() + min_index);
+ assert(min_point != -1);
+ neighbors = adjacency_matrix[min_point];
+ num_neighbors = neighbors.size();
+ for (int j = 0; j < num_neighbors; j++) {
+ double d = dist[min_point] + distances[min_point][neighbors[j]];
+ dist[neighbors[j]] = std::min(dist[neighbors[j]], d);
+ }
+ curr_size = process.size();
+ }
+
+ for (int j = 0; j < n; j++)
+ if (mindist[j] > dist[j]) {
+ mindist[j] = dist[j];
+ if (cover[j].size() == 0)
+ cover[j].push_back(i);
+ else
+ cover[j][0] = i;
+ }
+ }
+
+ for (int i = 0; i < n; i++) {
+ cover_back[cover[i][0]].push_back(i);
+ cover_color[cover[i][0]].second += func_color[i];
+ cover_color[cover[i][0]].first++;
+ }
+ for (int i = 0; i < m; i++) cover_color[i].second /= cover_color[i].first;
+ maximal_dim = m - 1;
+ cover_name = "Voronoi";
+ }
+
+ public: // return subset of data corresponding to a node
+ /** \brief Returns the data subset corresponding to a specific node of the created complex.
+ *
+ * @param[in] c ID of the node.
+ * @result cover_back(c) vector of IDs of data points.
+ *
+ */
+ const std::vector<int>& subpopulation(Cover_t c) { return cover_back[c]; }
+
+ // *******************************************************************************************************************
+ // Visualization.
+ // *******************************************************************************************************************
+
+ public: // Set color from file.
+ /** \brief Computes the function used to color the nodes of the simplicial complex from a file containing the function
+ * values.
+ *
+ * @param[in] color_file_name name of the input color file.
+ *
+ */
+ void set_color_from_file(const std::string& color_file_name) {
+ int vertex_id = 0;
+ std::ifstream input(color_file_name);
+ std::string line;
+ double f;
+ while (std::getline(input, line)) {
+ std::stringstream stream(line);
+ stream >> f;
+ func_color.emplace(vertex_id, f);
+ vertex_id++;
+ }
+ color_name = color_file_name;
+ }
+
+ public: // Set color from kth coordinate
+ /** \brief Computes the function used to color the nodes of the simplicial complex from the k-th coordinate.
+ *
+ * @param[in] k coordinate to use (start at 0).
+ *
+ */
+ void set_color_from_coordinate(int k = 0) {
+ for (int i = 0; i < n; i++) func_color.emplace(i, point_cloud[i][k]);
+ color_name = "coordinate ";
+ color_name.append(std::to_string(k));
+ }
+
+ public: // Set color from vector.
+ /** \brief Computes the function used to color the nodes of the simplicial complex from a vector stored in memory.
+ *
+ * @param[in] color input vector of values.
+ *
+ */
+ void set_color_from_vector(std::vector<double> color) {
+ for (unsigned int i = 0; i < color.size(); i++) func_color.emplace(i, color[i]);
+ }
+
+ public: // Create a .dot file that can be compiled with neato to produce a .pdf file.
+ /** \brief Creates a .dot file called SC.dot for neato (part of the graphviz package) once the simplicial complex is
+ * computed to get a visualization
+ * of its 1-skeleton in a .pdf file.
+ */
+ void plot_DOT() {
+ char mapp[11] = "SC.dot";
+ std::ofstream graphic(mapp);
+ graphic << "graph GIC {" << std::endl;
+ double maxv, minv;
+ maxv = std::numeric_limits<double>::min();
+ minv = std::numeric_limits<double>::max();
+ for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
+ iit++) {
+ maxv = std::max(maxv, iit->second.second);
+ minv = std::min(minv, iit->second.second);
+ }
+ int k = 0;
+ std::vector<int> nodes;
+ nodes.clear();
+ for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
+ iit++) {
+ if (iit->second.first > mask) {
+ nodes.push_back(iit->first);
+ graphic << iit->first << "[shape=circle fontcolor=black color=black label=\"" << iit->first << ":"
+ << iit->second.first << "\" style=filled fillcolor=\""
+ << (1 - (maxv - iit->second.second) / (maxv - minv)) * 0.6 << ", 1, 1\"]" << std::endl;
+ k++;
+ }
+ }
+ int ke = 0;
+ int num_simplices = simplices.size();
+ for (int i = 0; i < num_simplices; i++)
+ if (simplices[i].size() == 2) {
+ if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) {
+ graphic << " " << simplices[i][0] << " -- " << simplices[i][1] << " [weight=15];" << std::endl;
+ ke++;
+ }
+ }
+ graphic << "}";
+ graphic.close();
+ std::cout << "SC.dot generated. It can be visualized with e.g. neato." << std::endl;
+ }
+
+ public: // Create a .txt file that can be compiled with KeplerMapper.
+ /** \brief Creates a .txt file called SC.txt describing the 1-skeleton, which can then be plotted with e.g.
+ * KeplerMapper.
+ */
+ void write_info() {
+ int num_simplices = simplices.size();
+ int num_edges = 0;
+ char mapp[11] = "SC.txt";
+ std::ofstream graphic(mapp);
+ for (int i = 0; i < num_simplices; i++)
+ if (simplices[i].size() == 2)
+ if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) num_edges++;
+
+ graphic << point_cloud_name << std::endl;
+ graphic << cover_name << std::endl;
+ graphic << color_name << std::endl;
+ graphic << resolution_double << " " << gain << std::endl;
+ graphic << cover_color.size() << " " << num_edges << std::endl;
+
+ for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
+ iit++)
+ graphic << iit->first << " " << iit->second.second << " " << iit->second.first << std::endl;
+
+ for (int i = 0; i < num_simplices; i++)
+ if (simplices[i].size() == 2)
+ if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask)
+ graphic << simplices[i][0] << " " << simplices[i][1] << std::endl;
+ graphic.close();
+ std::cout << "SC.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox."
+ << std::endl;
+ }
+
+ public: // Create a .off file that can be visualized (e.g. with Geomview).
+ /** \brief Creates a .off file called SC.off for 3D visualization, which contains the 2-skeleton of the GIC.
+ * This function assumes that the cover has been computed with Voronoi. If data points are in 1D or 2D,
+ * the remaining coordinates of the points embedded in 3D are set to 0.
+ */
+ void plot_OFF() {
+ assert(cover_name == "Voronoi");
+ char gic[11] = "SC.off";
+ std::ofstream graphic(gic);
+ graphic << "OFF" << std::endl;
+ int m = voronoi_subsamples.size();
+ int numedges = 0;
+ int numfaces = 0;
+ std::vector<std::vector<int> > edges, faces;
+ int numsimplices = simplices.size();
+ for (int i = 0; i < numsimplices; i++) {
+ if (simplices[i].size() == 2) {
+ numedges++;
+ edges.push_back(simplices[i]);
+ }
+ if (simplices[i].size() == 3) {
+ numfaces++;
+ faces.push_back(simplices[i]);
+ }
+ }
+ graphic << m << " " << numedges + numfaces << std::endl;
+ for (int i = 0; i < m; i++) {
+ if (data_dimension <= 3) {
+ for (int j = 0; j < data_dimension; j++) graphic << point_cloud[voronoi_subsamples[i]][j] << " ";
+ for (int j = data_dimension; j < 3; j++) graphic << 0 << " ";
+ graphic << std::endl;
+ } else {
+ for (int j = 0; j < 3; j++) graphic << point_cloud[voronoi_subsamples[i]][j] << " ";
+ }
+ }
+ for (int i = 0; i < numedges; i++) graphic << 2 << " " << edges[i][0] << " " << edges[i][1] << std::endl;
+ for (int i = 0; i < numfaces; i++)
+ graphic << 3 << " " << faces[i][0] << " " << faces[i][1] << " " << faces[i][2] << std::endl;
+ graphic.close();
+ std::cout << "SC.off generated. It can be visualized with e.g. geomview." << std::endl;
+ }
+
+ // *******************************************************************************************************************
+ // *******************************************************************************************************************
+
+ public:
+ /** \brief Creates the simplicial complex.
+ *
+ * @param[in] complex SimplicialComplexForRips to be created.
+ *
+ */
+ template <typename SimplicialComplexForRips>
+ void create_complex(SimplicialComplexForRips& complex) {
+ unsigned int dimension = 0;
+ for (auto const& simplex : simplices) {
+ complex.insert_simplex_and_subfaces(simplex);
+ if (dimension < simplex.size() - 1) dimension = simplex.size() - 1;
+ }
+ complex.set_dimension(dimension);
+ }
+
+ public:
+ /** \brief Computes the simplices of the simplicial complex.
+ */
+ void find_simplices() {
+ if (type != "Nerve" && type != "GIC") {
+ std::cout << "Type of complex needs to be specified." << std::endl;
+ return;
+ }
+
+ if (type == "Nerve") {
+ for (std::map<int, std::vector<Cover_t> >::iterator it = cover.begin(); it != cover.end(); it++)
+ simplices.push_back(it->second);
+ std::vector<std::vector<Cover_t> >::iterator it;
+ std::sort(simplices.begin(), simplices.end());
+ it = std::unique(simplices.begin(), simplices.end());
+ simplices.resize(std::distance(simplices.begin(), it));
+ }
+
+ if (type == "GIC") {
+ if (functional_cover) {
+ // Computes the simplices in the GIC by looking at all the edges of the graph and adding the
+ // corresponding edges in the GIC if the images of the endpoints belong to consecutive intervals.
+
+ if (gain >= 0.5)
+ throw std::invalid_argument(
+ "the output of this function is correct ONLY if the cover is minimal, i.e. the gain is less than 0.5.");
+
+ int v1, v2;
+
+ // Loop on all points.
+ for (std::map<int, std::vector<Cover_t> >::iterator it = cover.begin(); it != cover.end(); it++) {
+ int vid = it->first;
+ std::vector<int> neighbors = adjacency_matrix[vid];
+ int num_neighb = neighbors.size();
+
+ // Find cover of current point (vid).
+ if (cover[vid].size() == 2)
+ v1 = std::min(cover[vid][0], cover[vid][1]);
+ else
+ v1 = cover[vid][0];
+ std::vector<int> node(1);
+ node[0] = v1;
+ simplices.push_back(node);
+
+ // Loop on neighbors.
+ for (int i = 0; i < num_neighb; i++) {
+ int neighb = neighbors[i];
+
+ // Find cover of neighbor (neighb).
+ if (cover[neighb].size() == 2)
+ v2 = std::max(cover[neighb][0], cover[neighb][1]);
+ else
+ v2 = cover[neighb][0];
+
+ // If neighbor is in next interval, add edge.
+ if (cover_fct[v2] == cover_fct[v1] + 1) {
+ std::vector<int> edge(2);
+ edge[0] = v1;
+ edge[1] = v2;
+ simplices.push_back(edge);
+ }
+ }
+ }
+ std::vector<std::vector<Cover_t> >::iterator it;
+ std::sort(simplices.begin(), simplices.end());
+ it = std::unique(simplices.begin(), simplices.end());
+ simplices.resize(std::distance(simplices.begin(), it));
+
+ } else {
+ // Find IDs of edges to remove
+ std::vector<int> simplex_to_remove;
+ int simplex_id = 0;
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<std::vector<Cover_t> > comp;
+ for (auto vertex : st.simplex_vertex_range(simplex)) comp.push_back(cover[vertex]);
+ if (comp[0].size() == 1 && comp[0] == comp[1]) simplex_to_remove.push_back(simplex_id);
+ }
+ simplex_id++;
+ }
+
+ // Remove edges
+ if (simplex_to_remove.size() > 1) {
+ int current_id = 1;
+ auto simplex = st.complex_simplex_range().begin();
+ int num_rem = 0;
+ for (int i = 0; i < simplex_id - 1; i++) {
+ int j = i + 1;
+ auto simplex_tmp = simplex;
+ simplex_tmp++;
+ if (j == simplex_to_remove[current_id]) {
+ st.remove_maximal_simplex(*simplex_tmp);
+ current_id++;
+ num_rem++;
+ } else {
+ simplex++;
+ }
+ }
+ simplex = st.complex_simplex_range().begin();
+ for (int i = 0; i < simplex_to_remove[0]; i++) simplex++;
+ st.remove_maximal_simplex(*simplex);
+ }
+
+ // Build the Simplex Tree corresponding to the graph
+ st.expansion(maximal_dim);
+
+ // Find simplices of GIC
+ simplices.clear();
+ for (auto simplex : st.complex_simplex_range()) {
+ if (!st.has_children(simplex)) {
+ std::vector<Cover_t> simplx;
+ for (auto vertex : st.simplex_vertex_range(simplex)) {
+ unsigned int sz = cover[vertex].size();
+ for (unsigned int i = 0; i < sz; i++) {
+ simplx.push_back(cover[vertex][i]);
+ }
+ }
+
+ std::sort(simplx.begin(), simplx.end());
+ std::vector<Cover_t>::iterator it = std::unique(simplx.begin(), simplx.end());
+ simplx.resize(std::distance(simplx.begin(), it));
+ simplices.push_back(simplx);
+ }
+ }
+ std::vector<std::vector<Cover_t> >::iterator it;
+ std::sort(simplices.begin(), simplices.end());
+ it = std::unique(simplices.begin(), simplices.end());
+ simplices.resize(std::distance(simplices.begin(), it));
+ }
+ }
+ }
+};
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // GIC_H_
diff --git a/src/Nerve_GIC/test/CMakeLists.txt b/src/Nerve_GIC/test/CMakeLists.txt
new file mode 100644
index 00000000..03fe47ca
--- /dev/null
+++ b/src/Nerve_GIC/test/CMakeLists.txt
@@ -0,0 +1,14 @@
+cmake_minimum_required(VERSION 2.6)
+project(Graph_induced_complex_tests)
+
+include(GUDHI_test_coverage)
+
+add_executable ( Nerve_GIC_test_unit test_GIC.cpp )
+target_link_libraries(Nerve_GIC_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Nerve_GIC_test_unit ${TBB_LIBRARIES})
+endif()
+
+file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+gudhi_add_coverage_test(Nerve_GIC_test_unit)
diff --git a/src/Nerve_GIC/test/data/cloud b/src/Nerve_GIC/test/data/cloud
new file mode 100644
index 00000000..4a0c170d
--- /dev/null
+++ b/src/Nerve_GIC/test/data/cloud
@@ -0,0 +1,6 @@
+nOFF
+3
+3 0 0
+0 0 0
+2 1 0
+4 0 0 \ No newline at end of file
diff --git a/src/Nerve_GIC/test/data/cover b/src/Nerve_GIC/test/data/cover
new file mode 100644
index 00000000..5f5fbe75
--- /dev/null
+++ b/src/Nerve_GIC/test/data/cover
@@ -0,0 +1,3 @@
+1
+2
+3 \ No newline at end of file
diff --git a/src/Nerve_GIC/test/data/graph b/src/Nerve_GIC/test/data/graph
new file mode 100644
index 00000000..37142800
--- /dev/null
+++ b/src/Nerve_GIC/test/data/graph
@@ -0,0 +1,3 @@
+0 1
+0 2
+1 2 \ No newline at end of file
diff --git a/src/Nerve_GIC/test/test_GIC.cpp b/src/Nerve_GIC/test/test_GIC.cpp
new file mode 100644
index 00000000..a8b1e7f7
--- /dev/null
+++ b/src/Nerve_GIC/test/test_GIC.cpp
@@ -0,0 +1,90 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "graph_induced_complex"
+
+#include <boost/test/unit_test.hpp>
+#include <cmath> // float comparison
+#include <limits>
+#include <string>
+#include <vector>
+#include <algorithm> // std::max
+#include <gudhi/GIC.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/reader_utils.h>
+
+BOOST_AUTO_TEST_CASE(check_nerve) {
+ using Point = std::vector<float>;
+ Gudhi::cover_complex::Cover_complex<Point> N;
+ N.set_type("Nerve");
+ std::string cloud_file_name("data/cloud");
+ N.read_point_cloud(cloud_file_name);
+ std::string graph_file_name("data/graph");
+ N.set_graph_from_file(graph_file_name);
+ std::string cover_file_name("data/cover");
+ N.set_cover_from_file(cover_file_name);
+ N.find_simplices();
+ Gudhi::Simplex_tree<> stree;
+ N.create_complex(stree);
+
+ BOOST_CHECK(stree.num_vertices() == 3);
+ BOOST_CHECK((stree.num_simplices() - stree.num_vertices()) == 0);
+ BOOST_CHECK(stree.dimension() == 0);
+}
+
+BOOST_AUTO_TEST_CASE(check_GIC) {
+ using Point = std::vector<float>;
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_type("GIC");
+ std::string cloud_file_name("data/cloud");
+ GIC.read_point_cloud(cloud_file_name);
+ std::string graph_file_name("data/graph");
+ GIC.set_graph_from_file(graph_file_name);
+ std::string cover_file_name("data/cover");
+ GIC.set_cover_from_file(cover_file_name);
+ GIC.find_simplices();
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ BOOST_CHECK(stree.num_vertices() == 3);
+ BOOST_CHECK((stree.num_simplices() - stree.num_vertices()) == 4);
+ BOOST_CHECK(stree.dimension() == 2);
+}
+
+BOOST_AUTO_TEST_CASE(check_voronoiGIC) {
+ using Point = std::vector<float>;
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_type("GIC");
+ std::string cloud_file_name("data/cloud");
+ GIC.read_point_cloud(cloud_file_name);
+ std::string graph_file_name("data/graph");
+ GIC.set_graph_from_file(graph_file_name);
+ GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), 2);
+ GIC.find_simplices();
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ BOOST_CHECK(stree.num_vertices() == 2);
+ BOOST_CHECK((stree.num_simplices() - stree.num_vertices()) == 1);
+ BOOST_CHECK(stree.dimension() == 1);
+}
diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
index e17e5926..62bbbfc5 100644
--- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
+++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
@@ -143,8 +143,8 @@ namespace persistent_cohomology {
We provide several example files: run these examples with -h for details on their use, and read the README file.
-\li <a href="_persistent_cohomology_2rips_persistence_8cpp-example.html">
-Persistent_cohomology/rips_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its persistence
+\li <a href="_rips_complex_2rips_persistence_8cpp-example.html">
+Rips_complex/rips_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its persistence
diagram.
\code $> ./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3 \endcode
\code The complex contains 177838 simplices
@@ -158,12 +158,12 @@ diagram.
Persistent_cohomology/rips_multifield_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its
persistence diagram with a family of field coefficients.
-\li <a href="_persistent_cohomology_2rips_distance_matrix_persistence_8cpp-example.html">
-Persistent_cohomology/rips_distance_matrix_persistence.cpp</a> computes the Rips complex of a distance matrix and
+\li <a href="_rips_complex_2rips_distance_matrix_persistence_8cpp-example.html">
+Rips_complex/rips_distance_matrix_persistence.cpp</a> computes the Rips complex of a distance matrix and
outputs its persistence diagram.
-\li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
\code $> ./alpha_complex_3d_persistence ../../data/points/tore3D_300.off 2 0.45 \endcode
\code Simplex_tree dim: 3
@@ -172,8 +172,8 @@ Persistent_cohomology/alpha_complex_3d_persistence.cpp</a> computes the persiste
2 1 0.0934117 1.00003
2 2 0.56444 1.03938 \endcode
-\li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
Here, as CGAL computes the exact values, it is slower, but it is necessary when points are on a grid
for instance.
@@ -182,20 +182,20 @@ for instance.
2 0 0 inf
2 2 0.0002 0.2028 \endcode
-\li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the weighted alpha complex on points sampling from an OFF file
and a weights file.
\code $> ./weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off
../../data/points/tore3D_300.weights 2 0.45 \endcode
\code Simplex_tree dim: 3
-2 -0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938 \endcode
+2 0 -1 inf
+2 1 -0.931784 0.000103311
+2 1 -0.906588 2.60165e-05
+2 2 -0.43556 0.0393798 \endcode
-\li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
-Persistent_cohomology/alpha_complex_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+Alpha_complex/alpha_complex_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/p\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
\code $> ./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off \endcode
\code Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
@@ -205,10 +205,11 @@ Simplex_tree dim: 3
2 1 0.0934117 1.00003
2 2 0.56444 1.03938 \endcode
-\li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the periodic alpha complex on points sampling from an OFF file.
-\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off 3 1.0 \endcode
+\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off
+../../data/points/iso_cuboid_3_in_0_1.txt 3 1.0 \endcode
\code Periodic Delaunay computed.
Simplex_tree dim: 3
3 0 0 inf
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index f47de4c3..18e2913b 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -5,12 +5,6 @@ add_executable(plain_homology plain_homology.cpp)
add_executable(persistence_from_simple_simplex_tree persistence_from_simple_simplex_tree.cpp)
-add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
-target_link_libraries(rips_distance_matrix_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
-add_executable(rips_persistence rips_persistence.cpp)
-target_link_libraries(rips_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
add_executable(rips_persistence_step_by_step rips_persistence_step_by_step.cpp)
target_link_libraries(rips_persistence_step_by_step ${Boost_PROGRAM_OPTIONS_LIBRARY})
@@ -23,8 +17,6 @@ target_link_libraries(persistence_from_file ${Boost_PROGRAM_OPTIONS_LIBRARY})
if (TBB_FOUND)
target_link_libraries(plain_homology ${TBB_LIBRARIES})
target_link_libraries(persistence_from_simple_simplex_tree ${TBB_LIBRARIES})
- target_link_libraries(rips_distance_matrix_persistence ${TBB_LIBRARIES})
- target_link_libraries(rips_persistence ${TBB_LIBRARIES})
target_link_libraries(rips_persistence_step_by_step ${TBB_LIBRARIES})
target_link_libraries(rips_persistence_via_boundary_matrix ${TBB_LIBRARIES})
target_link_libraries(persistence_from_file ${TBB_LIBRARIES})
@@ -33,10 +25,6 @@ endif()
add_test(NAME Persistent_cohomology_example_plain_homology COMMAND $<TARGET_FILE:plain_homology>)
add_test(NAME Persistent_cohomology_example_from_simple_simplex_tree COMMAND $<TARGET_FILE:persistence_from_simple_simplex_tree>
"1" "0")
-add_test(NAME Persistent_cohomology_example_from_rips_distance_matrix COMMAND $<TARGET_FILE:rips_distance_matrix_persistence>
- "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" "-r" "1.0" "-d" "3" "-p" "3" "-m" "0")
-add_test(NAME Persistent_cohomology_example_from_rips_on_tore_3D COMMAND $<TARGET_FILE:rips_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
add_test(NAME Persistent_cohomology_example_from_rips_step_by_step_on_tore_3D COMMAND $<TARGET_FILE:rips_persistence_step_by_step>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
add_test(NAME Persistent_cohomology_example_via_boundary_matrix COMMAND $<TARGET_FILE:rips_persistence_via_boundary_matrix>
@@ -48,8 +36,6 @@ add_test(NAME Persistent_cohomology_example_from_file_3_3_100 COMMAND $<TARGET_F
install(TARGETS plain_homology DESTINATION bin)
install(TARGETS persistence_from_simple_simplex_tree DESTINATION bin)
-install(TARGETS rips_distance_matrix_persistence DESTINATION bin)
-install(TARGETS rips_persistence DESTINATION bin)
install(TARGETS rips_persistence_step_by_step DESTINATION bin)
install(TARGETS rips_persistence_via_boundary_matrix DESTINATION bin)
install(TARGETS persistence_from_file DESTINATION bin)
@@ -69,53 +55,15 @@ if(GMP_FOUND)
endif(GMP_FOUND)
if(CGAL_FOUND)
- add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp)
- target_link_libraries(alpha_complex_3d_persistence ${CGAL_LIBRARY})
- add_executable(exact_alpha_complex_3d_persistence exact_alpha_complex_3d_persistence.cpp)
- target_link_libraries(exact_alpha_complex_3d_persistence ${CGAL_LIBRARY})
- add_executable(weighted_alpha_complex_3d_persistence weighted_alpha_complex_3d_persistence.cpp)
- target_link_libraries(weighted_alpha_complex_3d_persistence ${CGAL_LIBRARY})
-
- if (TBB_FOUND)
- target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES})
- target_link_libraries(exact_alpha_complex_3d_persistence ${TBB_LIBRARIES})
- target_link_libraries(weighted_alpha_complex_3d_persistence ${TBB_LIBRARIES})
- endif(TBB_FOUND)
- add_test(NAME Persistent_cohomology_example_alpha_complex_3d COMMAND $<TARGET_FILE:alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "2" "0.45")
- add_test(NAME Persistent_cohomology_example_exact_alpha_complex_3d COMMAND $<TARGET_FILE:exact_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "2" "0.45")
- add_test(NAME Persistent_cohomology_example_weighted_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.weights" "2" "0.45")
-
- install(TARGETS alpha_complex_3d_persistence DESTINATION bin)
- install(TARGETS exact_alpha_complex_3d_persistence DESTINATION bin)
- install(TARGETS weighted_alpha_complex_3d_persistence DESTINATION bin)
-
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
- add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
- target_link_libraries(alpha_complex_persistence
- ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
- target_link_libraries(periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY})
-
add_executable(custom_persistence_sort custom_persistence_sort.cpp)
target_link_libraries(custom_persistence_sort ${CGAL_LIBRARY})
if (TBB_FOUND)
- target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
- target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES})
endif(TBB_FOUND)
- add_test(NAME Persistent_cohomology_example_alpha_complex COMMAND $<TARGET_FILE:alpha_complex_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
- add_test(NAME Persistent_cohomology_example_periodic_alpha_complex_3d COMMAND $<TARGET_FILE:periodic_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "2" "0")
add_test(NAME Persistent_cohomology_example_custom_persistence_sort COMMAND $<TARGET_FILE:custom_persistence_sort>)
- install(TARGETS alpha_complex_persistence DESTINATION bin)
- install(TARGETS periodic_alpha_complex_3d_persistence DESTINATION bin)
install(TARGETS custom_persistence_sort DESTINATION bin)
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
diff --git a/src/Persistent_cohomology/example/README b/src/Persistent_cohomology/example/README
index 794b94ae..f39d9584 100644
--- a/src/Persistent_cohomology/example/README
+++ b/src/Persistent_cohomology/example/README
@@ -1,43 +1,14 @@
-To build the example, run in a Terminal:
+To build the examples, run in a Terminal:
-cd /path-to-example/
+cd /path-to-examples/
cmake .
make
***********************************************************************************************************************
Example of use of RIPS:
-Computation of the persistent homology with Z/2Z coefficients of the Rips complex on points
-sampling a Klein bottle:
-
-./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2
-
-output:
-2 0 0 inf
-2 1 0.0983494 inf
-2 1 0.104347 inf
-2 2 0.138335 inf
-
-
-Every line is of this format: p1*...*pr dim b d
-where
- p1*...*pr is the product of prime numbers pi such that the homology feature exists in homology with Z/piZ coefficients.
- dim is the dimension of the homological feature,
- b and d are respectively the birth and death of the feature and
-
-
-
-with Z/3Z coefficients:
-
-./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3
-
-output:
-3 0 0 inf
-3 1 0.0983494 inf
-3 1 0.104347 inf
-3 2 0.138335 inf
-
-and the computation with Z/2Z and Z/3Z coefficients simultaneously:
+Computation of the persistent homology with Z/2Z and Z/3Z coefficients simultaneously of the Rips complex
+on points sampling a 3D torus:
./rips_multifield_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.12 -d 3 -p 2 -q 3
@@ -53,7 +24,13 @@ output:
6 0 0 0.12047
6 0 0 0.120414
-and finally the computation with all Z/pZ for 2 <= p <= 71 (20 first prime numbers):
+Every line is of this format: p1*...*pr dim b d
+where
+ p1*...*pr is the product of prime numbers pi such that the homology feature exists in homology with Z/piZ coefficients.
+ dim is the dimension of the homological feature,
+ b and d are respectively the birth and death of the feature and
+
+and the computation with all Z/pZ for 2 <= p <= 71 (20 first prime numbers):
./rips_multifield_persistence ../../data/points/Kl.off -r 0.25 -m 0.5 -d 3 -p 2 -q 71
@@ -70,82 +47,6 @@ output:
557940830126698960967415390 0 0 0.120414
***********************************************************************************************************************
-Example of use of ALPHA:
-
-For a more verbose mode, please run cmake with option "DEBUG_TRACES=TRUE" and recompile the programs.
-
-1) 3D special case
-------------------
-Computation of the persistent homology with Z/2Z coefficients of the alpha complex on points
-sampling a torus 3D:
-
-./alpha_complex_3d_persistence ../../data/points/tore3D_300.off 2 0.45
-
-output:
-Simplex_tree dim: 3
-2 0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938
-
-Here we retrieve expected Betti numbers on a tore 3D:
-Betti numbers[0] = 1
-Betti numbers[1] = 2
-Betti numbers[2] = 1
-
-N.B.: - alpha_complex_3d_persistence accepts only OFF files in 3D dimension.
- - filtration values are alpha square values
-
-2) d-Dimension case
--------------------
-Computation of the persistent homology with Z/2Z coefficients of the alpha complex on points
-sampling a torus 3D:
-
-./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off
-
-output:
-Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
-Simplex_tree dim: 3
-2 0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938
-
-Here we retrieve expected Betti numbers on a tore 3D:
-Betti numbers[0] = 1
-Betti numbers[1] = 2
-Betti numbers[2] = 1
-
-N.B.: - alpha_complex_persistence accepts OFF files in d-Dimension.
- - filtration values are alpha square values
-
-3) 3D periodic special case
----------------------------
-./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt 3 1.0
-
-output:
-Periodic Delaunay computed.
-Simplex_tree dim: 3
-3 0 0 inf
-3 1 0.0025 inf
-3 1 0.0025 inf
-3 1 0.0025 inf
-3 2 0.005 inf
-3 2 0.005 inf
-3 2 0.005 inf
-3 3 0.0075 inf
-
-Here we retrieve expected Betti numbers on a tore 3D:
-Betti numbers[0] = 1
-Betti numbers[1] = 3
-Betti numbers[2] = 3
-Betti numbers[3] = 1
-
-N.B.: - periodic_alpha_complex_3d_persistence accepts only OFF files in 3D dimension. In this example, the periodic cube
-is hard coded to { x = [0,1]; y = [0,1]; z = [0,1] }
- - filtration values are alpha square values
-
-***********************************************************************************************************************
Example of use of PLAIN HOMOLOGY:
This example computes the plain homology of the following simplicial complex without filtration values:
diff --git a/src/Persistent_cohomology/example/persistence_from_file.cpp b/src/Persistent_cohomology/example/persistence_from_file.cpp
index 67235467..eafa3fd5 100644
--- a/src/Persistent_cohomology/example/persistence_from_file.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_file.cpp
@@ -61,8 +61,7 @@ int main(int argc, char * argv[]) {
simplex_tree_stream >> simplex_tree;
std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << simplex_tree.dimension() << " - filtration " << simplex_tree.filtration()
- << std::endl;
+ std::cout << " - dimension " << simplex_tree.dimension() << std::endl;
/*
std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
diff --git a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
index 7ca9410a..8ef479d4 100644
--- a/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_simple_simplex_tree.cpp
@@ -142,12 +142,10 @@ int main(int argc, char * const argv[]) {
/* An edge [11,6] */
/* An edge [10,12,2] */
- st.set_dimension(2);
- st.set_filtration(0.4);
std::cout << "The complex contains " << st.num_simplices() << " simplices - " << st.num_vertices() << " vertices "
<< std::endl;
- std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << " - dimension " << st.dimension() << std::endl;
std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:"
<< std::endl;
std::cout << "**************************************************************" << std::endl;
diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp
index 50f692f2..a5ae09c8 100644
--- a/src/Persistent_cohomology/example/plain_homology.cpp
+++ b/src/Persistent_cohomology/example/plain_homology.cpp
@@ -64,8 +64,6 @@ int main() {
st.insert_simplex_and_subfaces(edge03);
st.insert_simplex(edge13);
st.insert_simplex(vertex4);
- // FIXME: Remove this line
- st.set_dimension(2);
// Sort the simplices in the order of the filtration
st.initialize_filtration();
diff --git a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
index 554eeba6..c1de0ef8 100644
--- a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
@@ -45,14 +45,7 @@
using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Vertex_handle = Simplex_tree::Vertex_handle;
using Filtration_value = Simplex_tree::Filtration_value;
-using Graph_t = boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
-, boost::property < vertex_filtration_t, Filtration_value >
-, boost::property < edge_filtration_t, Filtration_value >
->;
-using Edge_t = std::pair< Vertex_handle, Vertex_handle >;
-
-template< typename InputPointRange, typename Distance >
-Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold, Distance distance);
+using Proximity_graph = Gudhi::Proximity_graph<Simplex_tree>;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
@@ -81,8 +74,9 @@ int main(int argc, char * argv[]) {
Points_off_reader off_reader(off_file_points);
// Compute the proximity graph of the points
- Graph_t prox_graph = compute_proximity_graph(off_reader.get_point_cloud(), threshold
- , Gudhi::Euclidean_distance());
+ Proximity_graph prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(off_reader.get_point_cloud(),
+ threshold,
+ Gudhi::Euclidean_distance());
// Construct the Rips complex in a Simplex Tree
Simplex_tree st;
@@ -170,48 +164,3 @@ void program_options(int argc, char * argv[]
std::abort();
}
}
-
-/** Output the proximity graph of the points.
- *
- * If points contains n elements, the proximity graph is the graph
- * with n vertices, and an edge [u,v] iff the distance function between
- * points u and v is smaller than threshold.
- *
- * The type PointCloud furnishes .begin() and .end() methods, that return
- * iterators with value_type Point.
- */
-template< typename InputPointRange, typename Distance >
-Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold, Distance distance) {
- std::vector< Edge_t > edges;
- std::vector< Filtration_value > edges_fil;
-
- Vertex_handle idx_u, idx_v;
- Filtration_value fil;
- idx_u = 0;
- for (auto it_u = points.begin(); it_u != points.end(); ++it_u) {
- idx_v = idx_u + 1;
- for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) {
- fil = distance(*it_u, *it_v);
- if (fil <= threshold) {
- edges.emplace_back(idx_u, idx_v);
- edges_fil.push_back(fil);
- }
- }
- ++idx_u;
- }
-
- Graph_t skel_graph(edges.begin()
- , edges.end()
- , edges_fil.begin()
- , idx_u); // number of points labeled from 0 to idx_u-1
-
- auto vertex_prop = boost::get(vertex_filtration_t(), skel_graph);
-
- boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
- for (std::tie(vi, vi_end) = boost::vertices(skel_graph);
- vi != vi_end; ++vi) {
- boost::put(vertex_prop, *vi, 0.);
- }
-
- return skel_graph;
-}
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
index 672fda48..e0a147b3 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
@@ -591,10 +591,17 @@ class Persistent_cohomology {
std::ofstream diagram_out(diagram_name.c_str());
cmp_intervals_by_length cmp(cpx_);
std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp);
+ bool has_infinity = std::numeric_limits<Filtration_value>::has_infinity;
for (auto pair : persistent_pairs_) {
- diagram_out << cpx_->dimension(get<0>(pair)) << " "
- << cpx_->filtration(get<0>(pair)) << " "
- << cpx_->filtration(get<1>(pair)) << std::endl;
+ // Special case on windows, inf is "1.#INF"
+ if (has_infinity && cpx_->filtration(get<1>(pair)) == std::numeric_limits<Filtration_value>::infinity()) {
+ diagram_out << cpx_->dimension(get<0>(pair)) << " "
+ << cpx_->filtration(get<0>(pair)) << " inf" << std::endl;
+ } else {
+ diagram_out << cpx_->dimension(get<0>(pair)) << " "
+ << cpx_->filtration(get<0>(pair)) << " "
+ << cpx_->filtration(get<1>(pair)) << std::endl;
+ }
}
}
diff --git a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
index da418034..0a08d200 100644
--- a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
+++ b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp
@@ -62,8 +62,6 @@ BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers )
st.insert_simplex_and_subfaces(edge04);
st.insert_simplex(edge14);
st.insert_simplex(vertex5);
- // FIXME: Remove this line
- st.set_dimension(3);
// Sort the simplices in the order of the filtration
st.initialize_filtration();
@@ -170,8 +168,6 @@ BOOST_AUTO_TEST_CASE( betti_numbers )
st.insert_simplex_and_subfaces(edge04, 2.0);
st.insert_simplex(edge14, 2.0);
st.insert_simplex(vertex5, 1.0);
- // FIXME: Remove this line
- st.set_dimension(3);
// Sort the simplices in the order of the filtration
st.initialize_filtration();
diff --git a/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp b/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp
index f8174020..a1c106d5 100644
--- a/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp
+++ b/src/Persistent_cohomology/test/persistent_cohomology_unit_test.cpp
@@ -31,12 +31,11 @@ std::string test_rips_persistence(int coefficient, int min_persistence) {
// Display the Simplex_tree
std::cout << "The complex contains " << st.num_simplices() << " simplices" << " - dimension= " << st.dimension()
- << " - filtration= " << st.filtration() << std::endl;
+ << std::endl;
// Check
BOOST_CHECK(st.num_simplices() == 98);
BOOST_CHECK(st.dimension() == 3);
- BOOST_CHECK(st.filtration() == 1.89);
// Sort the simplices in the order of the filtration
st.initialize_filtration();
@@ -197,8 +196,6 @@ BOOST_AUTO_TEST_CASE( persistence_constructor_exception )
// To make number of simplices = 255
const short simplex_0[] = {0, 1, 2, 3, 4, 5, 6, 7};
st.insert_simplex_and_subfaces(simplex_0);
- // FIXME: Remove this line
- st.set_dimension(8);
// Sort the simplices in the order of the filtration
st.initialize_filtration();
diff --git a/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp b/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
index 3537cfa4..9e767943 100644
--- a/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
+++ b/src/Persistent_cohomology/test/persistent_cohomology_unit_test_multi_field.cpp
@@ -31,12 +31,11 @@ std::string test_rips_persistence(int min_coefficient, int max_coefficient, doub
// Display the Simplex_tree
std::cout << "The complex contains " << st.num_simplices() << " simplices" << " - dimension= " << st.dimension()
- << " - filtration= " << st.filtration() << std::endl;
+ << std::endl;
// Check
BOOST_CHECK(st.num_simplices() == 58);
BOOST_CHECK(st.dimension() == 3);
- BOOST_CHECK(st.filtration() == 0.4);
// Sort the simplices in the order of the filtration
st.initialize_filtration();
diff --git a/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp b/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
index 7ae8126f..9e182f1e 100644
--- a/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
+++ b/src/Rips_complex/example/example_rips_complex_from_csv_distance_matrix_file.cpp
@@ -32,7 +32,7 @@ int main(int argc, char **argv) {
// Init of a Rips complex from a distance matrix in a csv file
// Default separator is ';'
// ----------------------------------------------------------------------------
- Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
+ Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
Rips_complex rips_complex_from_file(distances, threshold);
std::streambuf* streambufffer;
diff --git a/src/Rips_complex/test/test_rips_complex.cpp b/src/Rips_complex/test/test_rips_complex.cpp
index fc2179f2..fc83f5f7 100644
--- a/src/Rips_complex/test/test_rips_complex.cpp
+++ b/src/Rips_complex/test/test_rips_complex.cpp
@@ -244,7 +244,7 @@ BOOST_AUTO_TEST_CASE(Rips_doc_csv_file) {
std::cout << "========== CSV FILE NAME = " << csv_file_name << " - Rips threshold=" <<
rips_threshold << "==========" << std::endl;
- Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
+ Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
Rips_complex rips_complex_from_file(distances, rips_threshold);
const int DIMENSION_1 = 1;
diff --git a/src/Rips_complex/utilities/CMakeLists.txt b/src/Rips_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..baa571fa
--- /dev/null
+++ b/src/Rips_complex/utilities/CMakeLists.txt
@@ -0,0 +1,21 @@
+cmake_minimum_required(VERSION 2.6)
+project(Rips_complex_utilities)
+
+add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
+target_link_libraries(rips_distance_matrix_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+add_executable(rips_persistence rips_persistence.cpp)
+target_link_libraries(rips_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+if (TBB_FOUND)
+ target_link_libraries(rips_distance_matrix_persistence ${TBB_LIBRARIES})
+ target_link_libraries(rips_persistence ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Rips_complex_utility_from_rips_distance_matrix COMMAND $<TARGET_FILE:rips_distance_matrix_persistence>
+ "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" "-r" "1.0" "-d" "3" "-p" "3" "-m" "0")
+add_test(NAME Rips_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:rips_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
+
+install(TARGETS rips_distance_matrix_persistence DESTINATION bin)
+install(TARGETS rips_persistence DESTINATION bin)
diff --git a/src/Rips_complex/utilities/README b/src/Rips_complex/utilities/README
new file mode 100644
index 00000000..4d20c806
--- /dev/null
+++ b/src/Rips_complex/utilities/README
@@ -0,0 +1,74 @@
+# Rips_complex #
+
+## `rips_persistence` ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+`rips_persistence [options] <OFF input file>`
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the Rips complex construction.
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Rips complex we want to compute.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+Beware: this program may use a lot of RAM and take a lot of time if `max-edge-length` is set to a large value.
+
+**Example 1 with Z/2Z coefficients**
+`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2`
+
+outputs:
+```
+2 0 0 inf
+2 1 0.0983494 inf
+2 1 0.104347 inf
+2 2 0.138335 inf
+```
+
+**Example 2 with Z/3Z coefficients**
+
+rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3
+
+outputs:
+```
+3 0 0 inf
+3 1 0.0983494 inf
+3 1 0.104347 inf
+3 2 0.138335 inf
+```
+
+
+
+
+## `rips_distance_matrix_persistence` ##
+Same as `rips_persistence` but taking a distance matrix as input.
+
+**Usage**
+`rips_persistence [options] <CSV input file>`
+where
+`<CSV input file>` is the path to the file containing a distance matrix. Can be square or lower triangular matrix. Separator is ';'.
+
+**Example**
+`rips_distance_matrix_persistence data/distance_matrix/full_square_distance_matrix.csv -r 15 -d 3 -p 3 -m 0`
+
+outputs:
+```
+The complex contains 46 simplices
+ and has dimension 3
+3 0 0 inf
+3 0 0 8.94427
+3 0 0 7.28011
+3 0 0 6.08276
+3 0 0 5.83095
+3 0 0 5.38516
+3 0 0 5
+3 1 11 12.0416
+3 1 6.32456 6.7082
+```
diff --git a/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
index 8517e7f6..d38808c7 100644
--- a/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
@@ -57,7 +57,7 @@ int main(int argc, char * argv[]) {
program_options(argc, argv, csv_matrix_file, filediag, threshold, dim_max, p, min_persistence);
- Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_matrix_file);
+ Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_matrix_file);
Rips_complex rips_complex_from_file(distances, threshold);
// Construct the Rips complex in a Simplex Tree
diff --git a/src/Persistent_cohomology/example/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp
index d504798b..d504798b 100644
--- a/src/Persistent_cohomology/example/rips_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_persistence.cpp
diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h
index f5b72ff6..769491d9 100644
--- a/src/Simplex_tree/doc/Intro_simplex_tree.h
+++ b/src/Simplex_tree/doc/Intro_simplex_tree.h
@@ -67,10 +67,13 @@ Information of the Simplex Tree:
Number of vertices = 10 Number of simplices = 98 \endcode
*
* \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
- * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a> - Simplex tree is computed and displayed from a 3D alpha
- * complex (Requires CGAL, GMP and GMPXX to be installed)
- *
+ * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a> - Simplex tree is computed and displayed
+ * from a 3D alpha complex (Requires CGAL, GMP and GMPXX to be installed).
*
+ * \li <a href="_simplex_tree_2graph_expansion_with_blocker_8cpp-example.html">
+ * Simplex_tree/graph_expansion_with_blocker.cpp</a> - Simple simplex tree construction from a one-skeleton graph with
+ * a simple blocker expansion method.
+ *
* \subsection filteredcomplexeshassecomplex Hasse complex
* The second one is the Hasse_complex. The Hasse complex is a data structure representing explicitly all co-dimension
* 1 incidence relations in a complex. It is consequently faster when accessing the boundary of a simplex, but is less
diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt
index e22cc92c..b33b2d05 100644
--- a/src/Simplex_tree/example/CMakeLists.txt
+++ b/src/Simplex_tree/example/CMakeLists.txt
@@ -35,4 +35,21 @@ if(GMP_FOUND AND CGAL_FOUND)
install(TARGETS Simplex_tree_example_alpha_shapes_3_from_off DESTINATION bin)
+ add_executable ( Simplex_tree_example_cech_complex_cgal_mini_sphere_3d cech_complex_cgal_mini_sphere_3d.cpp )
+ target_link_libraries(Simplex_tree_example_cech_complex_cgal_mini_sphere_3d ${Boost_PROGRAM_OPTIONS_LIBRARY} ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Simplex_tree_example_cech_complex_cgal_mini_sphere_3d ${TBB_LIBRARIES})
+ endif()
+ add_test(NAME Simplex_tree_example_cech_complex_cgal_mini_sphere_3d COMMAND $<TARGET_FILE:Simplex_tree_example_cech_complex_cgal_mini_sphere_3d>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" -r 0.3 -d 3)
+
+ install(TARGETS Simplex_tree_example_alpha_shapes_3_from_off DESTINATION bin)
endif()
+
+add_executable ( Simplex_tree_example_graph_expansion_with_blocker graph_expansion_with_blocker.cpp )
+if (TBB_FOUND)
+ target_link_libraries(Simplex_tree_example_graph_expansion_with_blocker ${TBB_LIBRARIES})
+endif()
+add_test(NAME Simplex_tree_example_graph_expansion_with_blocker COMMAND $<TARGET_FILE:Simplex_tree_example_graph_expansion_with_blocker>)
+
+install(TARGETS Simplex_tree_example_graph_expansion_with_blocker DESTINATION bin)
diff --git a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
new file mode 100644
index 00000000..217e251f
--- /dev/null
+++ b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
@@ -0,0 +1,234 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Clément Maria
+ *
+ * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Points_off_io.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Min_sphere_of_spheres_d.h>
+#include <CGAL/Min_sphere_of_points_d_traits_d.h>
+
+#include <boost/program_options.hpp>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+#include <utility> // for pair
+#include <map>
+
+// -------------------------------------------------------------------------------
+// cech_complex_cgal_mini_sphere_3d is an example of each step that is required to
+// build a Cech over a Simplex_tree. Please refer to cech_persistence to see
+// how to do the same thing with the Cech_complex wrapper for less detailed
+// steps.
+// -------------------------------------------------------------------------------
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Vertex_handle = Simplex_tree::Vertex_handle;
+using Simplex_handle = Simplex_tree::Simplex_handle;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Siblings = Simplex_tree::Siblings;
+using Graph_t = boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
+, boost::property < Gudhi::vertex_filtration_t, Filtration_value >
+, boost::property < Gudhi::edge_filtration_t, Filtration_value >
+>;
+using Edge_t = std::pair< Vertex_handle, Vertex_handle >;
+
+using Kernel = CGAL::Epick_d< CGAL::Dimension_tag<3> >;
+using Point = Kernel::Point_d;
+using Traits = CGAL::Min_sphere_of_points_d_traits_d<Kernel,Filtration_value,3>;
+using Min_sphere = CGAL::Min_sphere_of_spheres_d<Traits>;
+
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
+
+class Cech_blocker {
+ public:
+ bool operator()(Simplex_handle sh) {
+ std::vector<Point> points;
+#if DEBUG_TRACES
+ std::cout << "Cech_blocker on [";
+#endif // DEBUG_TRACES
+ for (auto vertex : simplex_tree_.simplex_vertex_range(sh)) {
+ points.push_back(point_cloud_[vertex]);
+#if DEBUG_TRACES
+ std::cout << vertex << ", ";
+#endif // DEBUG_TRACES
+ }
+ Min_sphere ms(points.begin(),points.end());
+ Filtration_value radius = ms.radius();
+#if DEBUG_TRACES
+ std::cout << "] - radius = " << radius << " - returns " << (radius > threshold_) << std::endl;
+#endif // DEBUG_TRACES
+ simplex_tree_.assign_filtration(sh, radius);
+ return (radius > threshold_);
+ }
+ Cech_blocker(Simplex_tree& simplex_tree, Filtration_value threshold, const std::vector<Point>& point_cloud)
+ : simplex_tree_(simplex_tree),
+ threshold_(threshold),
+ point_cloud_(point_cloud) { }
+ private:
+ Simplex_tree simplex_tree_;
+ Filtration_value threshold_;
+ std::vector<Point> point_cloud_;
+};
+
+template< typename InputPointRange>
+Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold);
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , Filtration_value & threshold
+ , int & dim_max);
+
+int main(int argc, char * argv[]) {
+ std::string off_file_points;
+ Filtration_value threshold;
+ int dim_max;
+
+ program_options(argc, argv, off_file_points, threshold, dim_max);
+
+ // Extract the points from the file filepoints
+ Points_off_reader off_reader(off_file_points);
+
+ // Compute the proximity graph of the points
+ Graph_t prox_graph = compute_proximity_graph(off_reader.get_point_cloud(), threshold);
+
+ //Min_sphere sph1(off_reader.get_point_cloud()[0], off_reader.get_point_cloud()[1], off_reader.get_point_cloud()[2]);
+ // Construct the Rips complex in a Simplex Tree
+ Simplex_tree st;
+ // insert the proximity graph in the simplex tree
+ st.insert_graph(prox_graph);
+ // expand the graph until dimension dim_max
+ st.expansion_with_blockers(dim_max, Cech_blocker(st, threshold, off_reader.get_point_cloud()));
+
+ std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << st.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ st.initialize_filtration();
+
+#if DEBUG_TRACES
+ std::cout << "********************************************************************\n";
+ // Display the Simplex_tree - Can not be done in the middle of 2 inserts
+ std::cout << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << static_cast<int>(vertex) << " ";
+ }
+ std::cout << std::endl;
+ }
+#endif // DEBUG_TRACES
+ return 0;
+}
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , Filtration_value & threshold
+ , int & dim_max) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a 3d point set.\n");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Cech complex construction.")
+ ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Cech complex we want to compute.");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Construct a Cech complex defined on a set of input points.\n \n";
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
+
+/** Output the proximity graph of the points.
+ *
+ * If points contains n elements, the proximity graph is the graph
+ * with n vertices, and an edge [u,v] iff the distance function between
+ * points u and v is smaller than threshold.
+ *
+ * The type PointCloud furnishes .begin() and .end() methods, that return
+ * iterators with value_type Point.
+ */
+template< typename InputPointRange>
+Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold) {
+ std::vector< Edge_t > edges;
+ std::vector< Filtration_value > edges_fil;
+
+ Kernel k;
+ Vertex_handle idx_u, idx_v;
+ Filtration_value fil;
+ idx_u = 0;
+ for (auto it_u = points.begin(); it_u != points.end(); ++it_u) {
+ idx_v = idx_u + 1;
+ for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) {
+ fil = k.squared_distance_d_object()(*it_u, *it_v);
+ // For Cech Complex, threshold is a radius (distance /2)
+ fil = std::sqrt(fil) / 2.;
+ if (fil <= threshold) {
+ edges.emplace_back(idx_u, idx_v);
+ edges_fil.push_back(fil);
+ }
+ }
+ ++idx_u;
+ }
+
+ Graph_t skel_graph(edges.begin()
+ , edges.end()
+ , edges_fil.begin()
+ , idx_u); // number of points labeled from 0 to idx_u-1
+
+ auto vertex_prop = boost::get(Gudhi::vertex_filtration_t(), skel_graph);
+
+ boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
+ for (std::tie(vi, vi_end) = boost::vertices(skel_graph);
+ vi != vi_end; ++vi) {
+ boost::put(vertex_prop, *vi, 0.);
+ }
+
+ return skel_graph;
+}
diff --git a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
index ff2eebcb..d8289ba9 100644
--- a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
+++ b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
@@ -28,6 +28,8 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Delaunay_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
#include <CGAL/iterator.h>
#include <fstream>
diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
new file mode 100644
index 00000000..86bfb8cb
--- /dev/null
+++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
@@ -0,0 +1,79 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2014
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Simplex_tree.h>
+
+#include <iostream>
+
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Simplex_handle = Simplex_tree::Simplex_handle;
+
+int main(int argc, char * const argv[]) {
+
+ // Construct the Simplex Tree with a 1-skeleton graph example
+ Simplex_tree simplexTree;
+
+ simplexTree.insert_simplex({0, 1}, 0.);
+ simplexTree.insert_simplex({0, 2}, 1.);
+ simplexTree.insert_simplex({0, 3}, 2.);
+ simplexTree.insert_simplex({1, 2}, 3.);
+ simplexTree.insert_simplex({1, 3}, 4.);
+ simplexTree.insert_simplex({2, 3}, 5.);
+ simplexTree.insert_simplex({2, 4}, 6.);
+ simplexTree.insert_simplex({3, 6}, 7.);
+ simplexTree.insert_simplex({4, 5}, 8.);
+ simplexTree.insert_simplex({4, 6}, 9.);
+ simplexTree.insert_simplex({5, 6}, 10.);
+ simplexTree.insert_simplex({6}, 10.);
+
+ simplexTree.expansion_with_blockers(3, [&](Simplex_handle sh){
+ bool result = false;
+ std::cout << "Blocker on [";
+ // User can loop on the vertices from the given simplex_handle i.e.
+ for (auto vertex : simplexTree.simplex_vertex_range(sh)) {
+ // We block the expansion, if the vertex '6' is in the given list of vertices
+ if (vertex == 6)
+ result = true;
+ std::cout << vertex << ", ";
+ }
+ std::cout << "] ( " << simplexTree.filtration(sh);
+ // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
+ simplexTree.assign_filtration(sh, simplexTree.filtration(sh) + 1.);
+
+ std::cout << " + 1. ) = " << result << std::endl;
+
+ return result;
+ });
+
+ std::cout << "********************************************************************\n";
+ std::cout << "* The complex contains " << simplexTree.num_simplices() << " simplices";
+ std::cout << " - dimension " << simplexTree.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : simplexTree.filtration_simplex_range()) {
+ std::cout << " " << "[" << simplexTree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplexTree.simplex_vertex_range(f_simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ return 0;
+}
diff --git a/src/Simplex_tree/example/mini_simplex_tree.cpp b/src/Simplex_tree/example/mini_simplex_tree.cpp
index ad99df23..19e45361 100644
--- a/src/Simplex_tree/example/mini_simplex_tree.cpp
+++ b/src/Simplex_tree/example/mini_simplex_tree.cpp
@@ -52,8 +52,6 @@ int main() {
auto edge03 = {0, 3};
st.insert_simplex_and_subfaces(triangle012);
st.insert_simplex_and_subfaces(edge03);
- // FIXME: Remove this line
- st.set_dimension(2);
auto edge02 = {0, 2};
ST::Simplex_handle e = st.find(edge02);
diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp
index 60f9a35e..b6b65b88 100644
--- a/src/Simplex_tree/example/simple_simplex_tree.cpp
+++ b/src/Simplex_tree/example/simple_simplex_tree.cpp
@@ -185,19 +185,16 @@ int main(int argc, char * const argv[]) {
}
// ++ GENERAL VARIABLE SET
- simplexTree.set_filtration(FOURTH_FILTRATION_VALUE); // Max filtration value
- simplexTree.set_dimension(2); // Max dimension = 2 -> (2,1,0)
std::cout << "********************************************************************\n";
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
std::cout << "* The complex contains " << simplexTree.num_simplices() << " simplices\n";
- std::cout << " - dimension " << simplexTree.dimension() << " - filtration " << simplexTree.filtration() << "\n";
+ std::cout << " - dimension " << simplexTree.dimension() << "\n";
std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplexTree.filtration_simplex_range()) {
std::cout << " " << "[" << simplexTree.filtration(f_simplex) << "] ";
- for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) {
- std::cout << static_cast<int>(vertex) << " ";
- }
+ for (auto vertex : simplexTree.simplex_vertex_range(f_simplex))
+ std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
// [0.1] 0
@@ -250,5 +247,34 @@ int main(int argc, char * const argv[]) {
std::cout << "***+ YES IT IS!\n";
else
std::cout << "***- NO IT ISN'T\n";
+
+ simplexFound = simplexTree.find({ 0, 1 });
+ std::cout << "**************IS THE SIMPLEX {0,1} IN THE SIMPLEX TREE ?\n";
+ if (simplexFound != simplexTree.null_simplex())
+ std::cout << "***+ YES IT IS!\n";
+ else
+ std::cout << "***- NO IT ISN'T\n";
+
+ std::cout << "**************COFACES OF {0,1} IN CODIMENSION 1 ARE\n";
+ for (auto& simplex : simplexTree.cofaces_simplex_range(simplexTree.find({0,1}), 1)) {
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ std::cout << "**************STARS OF {0,1} ARE\n";
+ for (auto& simplex : simplexTree.star_simplex_range(simplexTree.find({0,1}))) {
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ std::cout << "**************BOUNDARIES OF {0,1,2} ARE\n";
+ for (auto& simplex : simplexTree.boundary_simplex_range(simplexTree.find({0,1,2}))) {
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
return 0;
}
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 317bce23..cb6ab309 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -289,7 +289,6 @@ class Simplex_tree {
/** \brief Constructs an empty simplex tree. */
Simplex_tree()
: null_vertex_(-1),
- threshold_(0),
root_(nullptr, null_vertex_),
filtration_vect_(),
dimension_(-1) { }
@@ -297,7 +296,6 @@ class Simplex_tree {
/** \brief User-defined copy constructor reproduces the whole tree structure. */
Simplex_tree(const Simplex_tree& simplex_source)
: null_vertex_(simplex_source.null_vertex_),
- threshold_(simplex_source.threshold_),
root_(nullptr, null_vertex_ , simplex_source.root_.members_),
filtration_vect_(),
dimension_(simplex_source.dimension_) {
@@ -323,12 +321,10 @@ class Simplex_tree {
/** \brief User-defined move constructor moves the whole tree structure. */
Simplex_tree(Simplex_tree && old)
: null_vertex_(std::move(old.null_vertex_)),
- threshold_(std::move(old.threshold_)),
root_(std::move(old.root_)),
filtration_vect_(std::move(old.filtration_vect_)),
dimension_(std::move(old.dimension_)) {
old.dimension_ = -1;
- old.threshold_ = 0;
old.root_ = Siblings(nullptr, null_vertex_);
}
@@ -356,7 +352,6 @@ class Simplex_tree {
/** \brief Checks if two simplex trees are equal. */
bool operator==(Simplex_tree& st2) {
if ((null_vertex_ != st2.null_vertex_) ||
- (threshold_ != st2.threshold_) ||
(dimension_ != st2.dimension_))
return false;
return rec_equal(&root_, &st2.root_);
@@ -396,25 +391,25 @@ class Simplex_tree {
return sh->second.key();
}
- /** \brief Returns the simplex associated to a key.
+ /** \brief Returns the simplex that has index idx in the filtration.
*
* The filtration must be initialized.
* \pre SimplexTreeOptions::store_key
*/
- Simplex_handle simplex(Simplex_key key) const {
- return filtration_vect_[key];
+ Simplex_handle simplex(Simplex_key idx) const {
+ return filtration_vect_[idx];
}
/** \brief Returns the filtration value of a simplex.
*
- * Called on the null_simplex, returns INFINITY.
+ * Called on the null_simplex, it returns infinity.
* If SimplexTreeOptions::store_filtration is false, returns 0.
*/
static Filtration_value filtration(Simplex_handle sh) {
if (sh != null_simplex()) {
return sh->second.filtration();
} else {
- return INFINITY;
+ return std::numeric_limits<Filtration_value>::infinity();
}
}
@@ -427,11 +422,6 @@ class Simplex_tree {
sh->second.assign_filtration(fv);
}
- /** \brief Returns an upper bound of the filtration values of the simplices. */
- Filtration_value filtration() const {
- return threshold_;
- }
-
/** \brief Returns a Simplex_handle different from all Simplex_handles
* associated to the simplices in the simplicial complex.
*
@@ -492,7 +482,17 @@ class Simplex_tree {
}
/** \brief Returns an upper bound on the dimension of the simplicial complex. */
- int dimension() const {
+ int upper_bound_dimension() const {
+ return dimension_;
+ }
+
+ /** \brief Returns the dimension of the simplicial complex.
+ \details This function is not constant time because it can recompute dimension if required (can be triggered by
+ `remove_maximal_simplex()` or `prune_above_filtration()`).
+ */
+ int dimension() {
+ if (dimension_to_be_lowered_)
+ lower_upper_bound_dimension();
return dimension_;
}
@@ -601,7 +601,11 @@ class Simplex_tree {
// if filtration value unchanged
return std::pair<Simplex_handle, bool>(null_simplex(), false);
}
- // otherwise the insertion has succeeded
+ // otherwise the insertion has succeeded - size is a size_type
+ if (static_cast<int>(simplex.size()) - 1 > dimension_) {
+ // Update dimension if needed
+ dimension_ = static_cast<int>(simplex.size()) - 1;
+ }
return res_insert;
}
@@ -757,13 +761,12 @@ class Simplex_tree {
return &root_;
}
- /** Set an upper bound for the filtration values. */
- void set_filtration(Filtration_value fil) {
- threshold_ = fil;
- }
-
- /** Set a dimension for the simplicial complex. */
+ /** \brief Set a dimension for the simplicial complex.
+ * \details This function must be used with caution because it disables dimension recomputation when required
+ * (this recomputation can be triggered by `remove_maximal_simplex()` or `prune_above_filtration()`).
+ */
void set_dimension(int dimension) {
+ dimension_to_be_lowered_ = false;
dimension_ = dimension;
}
@@ -1082,6 +1085,120 @@ class Simplex_tree {
}
public:
+ /** \brief Expands a simplex tree containing only a graph. Simplices corresponding to cliques in the graph are added
+ * incrementally, faces before cofaces, unless the simplex has dimension larger than `max_dim` or `block_simplex`
+ * returns true for this simplex.
+ *
+ * @param[in] max_dim Expansion maximal dimension value.
+ * @param[in] block_simplex Blocker oracle. Its concept is <CODE>bool block_simplex(Simplex_handle sh)</CODE>
+ *
+ * The function identifies a candidate simplex whose faces are all already in the complex, inserts
+ * it with a filtration value corresponding to the maximum of the filtration values of the faces, then calls
+ * `block_simplex` on a `Simplex_handle` for this new simplex. If `block_simplex` returns true, the simplex is
+ * removed, otherwise it is kept. Note that the evaluation of `block_simplex` is a good time to update the
+ * filtration value of the simplex if you want a customized value. The algorithm then proceeds with the next
+ * candidate.
+ *
+ * @warning several candidates of the same dimension may be inserted simultaneously before calling `block_simplex`,
+ * so if you examine the complex in `block_simplex`, you may hit a few simplices of the same dimension that have not
+ * been vetted by `block_simplex` yet, or have already been rejected but not yet removed.
+ */
+ template< typename Blocker >
+ void expansion_with_blockers(int max_dim, Blocker block_simplex) {
+ // Loop must be from the end to the beginning, as higher dimension simplex are always on the left part of the tree
+ for (auto& simplex : boost::adaptors::reverse(root_.members())) {
+ if (has_children(&simplex)) {
+ siblings_expansion_with_blockers(simplex.second.children(), max_dim, max_dim - 1, block_simplex);
+ }
+ }
+ }
+
+ private:
+ /** \brief Recursive expansion with blockers of the simplex tree.*/
+ template< typename Blocker >
+ void siblings_expansion_with_blockers(Siblings* siblings, int max_dim, int k, Blocker block_simplex) {
+ if (dimension_ < max_dim - k) {
+ dimension_ = max_dim - k;
+ }
+ if (k == 0)
+ return;
+ // No need to go deeper
+ if (siblings->members().size() < 2)
+ return;
+ // Reverse loop starting before the last one for 'next' to be the last one
+ for (auto simplex = siblings->members().rbegin() + 1; simplex != siblings->members().rend(); simplex++) {
+ std::vector<std::pair<Vertex_handle, Node> > intersection;
+ for(auto next = siblings->members().rbegin(); next != simplex; next++) {
+ bool to_be_inserted = true;
+ Filtration_value filt = simplex->second.filtration();
+ // If all the boundaries are present, 'next' needs to be inserted
+ for (Simplex_handle border : boundary_simplex_range(simplex)) {
+ Simplex_handle border_child = find_child(border, next->first);
+ if (border_child == null_simplex()) {
+ to_be_inserted=false;
+ break;
+ }
+ filt = std::max(filt, filtration(border_child));
+ }
+ if (to_be_inserted) {
+ intersection.emplace_back(next->first, Node(nullptr, filt));
+ }
+ }
+ if (intersection.size() != 0) {
+ // Reverse the order to insert
+ Siblings * new_sib = new Siblings(siblings, // oncles
+ simplex->first, // parent
+ boost::adaptors::reverse(intersection)); // boost::container::ordered_unique_range_t
+ std::vector<Vertex_handle> blocked_new_sib_vertex_list;
+ // As all intersections are inserted, we can call the blocker function on all new_sib members
+ for (auto new_sib_member = new_sib->members().begin();
+ new_sib_member != new_sib->members().end();
+ new_sib_member++) {
+ bool blocker_result = block_simplex(new_sib_member);
+ // new_sib member has been blocked by the blocker function
+ // add it to the list to be removed - do not perform it while looping on it
+ if (blocker_result) {
+ blocked_new_sib_vertex_list.push_back(new_sib_member->first);
+ }
+ }
+ if (blocked_new_sib_vertex_list.size() == new_sib->members().size()) {
+ // Specific case where all have to be deleted
+ delete new_sib;
+ // ensure the children property
+ simplex->second.assign_children(siblings);
+ } else {
+ for (auto& blocked_new_sib_member : blocked_new_sib_vertex_list) {
+ new_sib->members().erase(blocked_new_sib_member);
+ }
+ // ensure recursive call
+ simplex->second.assign_children(new_sib);
+ siblings_expansion_with_blockers(new_sib, max_dim, k - 1, block_simplex);
+ }
+ } else {
+ // ensure the children property
+ simplex->second.assign_children(siblings);
+ }
+ }
+ }
+
+ /* \private Returns the Simplex_handle composed of the vertex list (from the Simplex_handle), plus the given
+ * Vertex_handle if the Vertex_handle is found in the Simplex_handle children list.
+ * Returns null_simplex() if it does not exist
+ */
+ Simplex_handle find_child(Simplex_handle sh, Vertex_handle vh) const {
+ if (!has_children(sh))
+ return null_simplex();
+
+ Simplex_handle child = sh->second.children()->find(vh);
+ // Specific case of boost::flat_map does not find, returns boost::flat_map::end()
+ // in simplex tree we want a null_simplex()
+ if (child == sh->second.children()->members().end())
+ return null_simplex();
+
+ return child;
+ }
+
+ public:
/** \brief Write the hasse diagram of the simplicial complex in os.
*
* Each row in the file correspond to a simplex. A line is written:
@@ -1157,6 +1274,9 @@ class Simplex_tree {
* \post Some simplex tree functions require the filtration to be valid. `prune_above_filtration()`
* function is not launching `initialize_filtration()` but returns the filtration modification information. If the
* complex has changed , please call `initialize_filtration()` to recompute it.
+ * \post Note that the dimension of the simplicial complex may be lower after calling `prune_above_filtration()`
+ * than it was before. However, `upper_bound_dimension()` will return the old value, which remains a valid upper
+ * bound. If you care, you can call `dimension()` to recompute the exact dimension.
*/
bool prune_above_filtration(Filtration_value filtration) {
return rec_prune_above_filtration(root(), filtration);
@@ -1168,6 +1288,8 @@ class Simplex_tree {
auto last = std::remove_if(list.begin(), list.end(), [=](Dit_value_t& simplex) {
if (simplex.second.filtration() <= filt) return false;
if (has_children(&simplex)) rec_delete(simplex.second.children());
+ // dimension may need to be lowered
+ dimension_to_be_lowered_ = true;
return true;
});
@@ -1176,6 +1298,8 @@ class Simplex_tree {
// Removing the whole siblings, parent becomes a leaf.
sib->oncles()->members()[sib->parent()].assign_children(sib->oncles());
delete sib;
+ // dimension may need to be lowered
+ dimension_to_be_lowered_ = true;
return true;
} else {
// Keeping some elements of siblings. Remove the others, and recurse in the remaining ones.
@@ -1187,12 +1311,45 @@ class Simplex_tree {
return modified;
}
+ private:
+ /** \brief Deep search simplex tree dimension recompute.
+ * @return True if the dimension was modified, false otherwise.
+ * \pre Be sure the simplex tree has not a too low dimension value as the deep search stops when the former dimension
+ * has been reached (cf. `upper_bound_dimension()` and `set_dimension()` methods).
+ */
+ bool lower_upper_bound_dimension() {
+ // reset automatic detection to recompute
+ dimension_to_be_lowered_ = false;
+ int new_dimension = -1;
+ // Browse the tree from the left to the right as higher dimension cells are more likely on the left part of the tree
+ for (Simplex_handle sh : complex_simplex_range()) {
+#ifdef DEBUG_TRACES
+ for (auto vertex : simplex_vertex_range(sh)) {
+ std::cout << " " << vertex;
+ }
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+
+ int sh_dimension = dimension(sh);
+ if (sh_dimension >= dimension_)
+ // Stop browsing as soon as the dimension is reached, no need to go furter
+ return false;
+ new_dimension = std::max(new_dimension, sh_dimension);
+ }
+ dimension_ = new_dimension;
+ return true;
+ }
+
+
public:
/** \brief Remove a maximal simplex.
* @param[in] sh Simplex handle on the maximal simplex to remove.
* \pre Please check the simplex has no coface before removing it.
* \exception std::invalid_argument In debug mode, if sh has children.
* \post Be aware that removing is shifting data in a flat_map (initialize_filtration to be done).
+ * \post Note that the dimension of the simplicial complex may be lower after calling `remove_maximal_simplex()`
+ * than it was before. However, `upper_bound_dimension()` will return the old value, which remains a valid upper
+ * bound. If you care, you can call `dimension()` to recompute the exact dimension.
*/
void remove_maximal_simplex(Simplex_handle sh) {
// Guarantee the simplex has no children
@@ -1210,13 +1367,13 @@ class Simplex_tree {
// Sibling is emptied : must be deleted, and its parent must point on his own Sibling
child->oncles()->members().at(child->parent()).assign_children(child->oncles());
delete child;
+ // dimension may need to be lowered
+ dimension_to_be_lowered_ = true;
}
}
private:
Vertex_handle null_vertex_;
- /** \brief Upper bound on the filtration values of the simplices.*/
- Filtration_value threshold_;
/** \brief Total number of simplices in the complex, without the empty simplex.*/
/** \brief Set of simplex tree Nodes representing the vertices.*/
Siblings root_;
@@ -1224,6 +1381,7 @@ class Simplex_tree {
std::vector<Simplex_handle> filtration_vect_;
/** \brief Upper bound on the dimension of the simplicial complex.*/
int dimension_;
+ bool dimension_to_be_lowered_ = false;
};
// Print a Simplex_tree in os.
@@ -1244,7 +1402,6 @@ std::istream& operator>>(std::istream & is, Simplex_tree<T...> & st) {
typedef Simplex_tree<T...> ST;
std::vector<typename ST::Vertex_handle> simplex;
typename ST::Filtration_value fil;
- typename ST::Filtration_value max_fil = 0;
int max_dim = -1;
while (read_simplex(is, simplex, fil)) {
// read all simplices in the file as a list of vertices
@@ -1253,15 +1410,11 @@ std::istream& operator>>(std::istream & is, Simplex_tree<T...> & st) {
if (max_dim < dim) {
max_dim = dim;
}
- if (max_fil < fil) {
- max_fil = fil;
- }
// insert every simplex in the simplex tree
st.insert_simplex(simplex, fil);
simplex.clear();
}
st.set_dimension(max_dim);
- st.set_filtration(max_fil);
return is;
}
diff --git a/src/Simplex_tree/test/CMakeLists.txt b/src/Simplex_tree/test/CMakeLists.txt
index 81999de6..8684ad2a 100644
--- a/src/Simplex_tree/test/CMakeLists.txt
+++ b/src/Simplex_tree/test/CMakeLists.txt
@@ -3,13 +3,29 @@ project(Simplex_tree_tests)
include(GUDHI_test_coverage)
+# Do not forget to copy test files in current binary dir
+file(COPY "simplex_tree_for_unit_test.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
add_executable ( Simplex_tree_test_unit simplex_tree_unit_test.cpp )
target_link_libraries(Simplex_tree_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Simplex_tree_test_unit ${TBB_LIBRARIES})
endif()
-# Do not forget to copy test files in current binary dir
-file(COPY "simplex_tree_for_unit_test.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
-
gudhi_add_coverage_test(Simplex_tree_test_unit)
+
+add_executable ( Simplex_tree_remove_test_unit simplex_tree_remove_unit_test.cpp )
+target_link_libraries(Simplex_tree_remove_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Simplex_tree_remove_test_unit ${TBB_LIBRARIES})
+endif()
+
+gudhi_add_coverage_test(Simplex_tree_remove_test_unit)
+
+add_executable ( Simplex_tree_iostream_operator_test_unit simplex_tree_iostream_operator_unit_test.cpp )
+target_link_libraries(Simplex_tree_iostream_operator_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Simplex_tree_iostream_operator_test_unit ${TBB_LIBRARIES})
+endif()
+
+gudhi_add_coverage_test(Simplex_tree_iostream_operator_test_unit)
diff --git a/src/Simplex_tree/test/README b/src/Simplex_tree/test/README
index 21c3d871..df2ab89a 100644
--- a/src/Simplex_tree/test/README
+++ b/src/Simplex_tree/test/README
@@ -9,6 +9,6 @@ make
To launch with details:
***********************
-./SimplexTreeUT --report_level=detailed --log_level=all
+./Simplex_tree_test_unit --report_level=detailed --log_level=all
==> echo $? returns 0 in case of success (non-zero otherwise)
diff --git a/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp
new file mode 100644
index 00000000..19ce3321
--- /dev/null
+++ b/src/Simplex_tree/test/simplex_tree_graph_expansion_unit_test.cpp
@@ -0,0 +1,235 @@
+#include <iostream>
+#include <fstream>
+#include <string>
+#include <algorithm>
+#include <utility> // std::pair, std::make_pair
+#include <cmath> // float comparison
+#include <limits>
+#include <functional> // greater
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "simplex_tree"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+// ^
+// /!\ Nothing else from Simplex_tree shall be included to test includes are well defined.
+#include "gudhi/Simplex_tree.h"
+
+using namespace Gudhi;
+
+typedef boost::mpl::list<Simplex_tree<>, Simplex_tree<Simplex_tree_options_fast_persistence>> list_of_tested_variants;
+
+
+bool AreAlmostTheSame(float a, float b) {
+ return std::fabs(a - b) < std::numeric_limits<float>::epsilon();
+}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_with_blockers_3, typeST, list_of_tested_variants) {
+ using Simplex_handle = typename typeST::Simplex_handle;
+ // Construct the Simplex Tree with a 1-skeleton graph example
+ typeST simplex_tree;
+
+ simplex_tree.insert_simplex({0, 1}, 0.);
+ simplex_tree.insert_simplex({0, 2}, 1.);
+ simplex_tree.insert_simplex({0, 3}, 2.);
+ simplex_tree.insert_simplex({1, 2}, 3.);
+ simplex_tree.insert_simplex({1, 3}, 4.);
+ simplex_tree.insert_simplex({2, 3}, 5.);
+ simplex_tree.insert_simplex({2, 4}, 6.);
+ simplex_tree.insert_simplex({3, 6}, 7.);
+ simplex_tree.insert_simplex({4, 5}, 8.);
+ simplex_tree.insert_simplex({4, 6}, 9.);
+ simplex_tree.insert_simplex({5, 6}, 10.);
+ simplex_tree.insert_simplex({6}, 10.);
+
+ simplex_tree.expansion_with_blockers(3, [&](Simplex_handle sh){
+ bool result = false;
+ std::cout << "Blocker on [";
+ // User can loop on the vertices from the given simplex_handle i.e.
+ for (auto vertex : simplex_tree.simplex_vertex_range(sh)) {
+ // We block the expansion, if the vertex '6' is in the given list of vertices
+ if (vertex == 6)
+ result = true;
+ std::cout << vertex << ", ";
+ }
+ std::cout << "] ( " << simplex_tree.filtration(sh);
+ // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
+ simplex_tree.assign_filtration(sh, simplex_tree.filtration(sh) + 1.);
+
+ std::cout << " + 1. ) = " << result << std::endl;
+
+ return result;
+ });
+
+ std::cout << "********************************************************************\n";
+ std::cout << "simplex_tree_expansion_with_blockers_3\n";
+ std::cout << "********************************************************************\n";
+ std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::cout << " - dimension " << simplex_tree.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(simplex_tree.num_simplices() == 23);
+ BOOST_CHECK(simplex_tree.dimension() == 3);
+ // {4, 5, 6} shall be blocked
+ BOOST_CHECK(simplex_tree.find({4, 5, 6}) == simplex_tree.null_simplex());
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,2})), 4.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,3})), 5.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,2,3})), 6.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({1,2,3})), 6.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,2,3})), 7.));
+
+}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_with_blockers_2, typeST, list_of_tested_variants) {
+ using Simplex_handle = typename typeST::Simplex_handle;
+ // Construct the Simplex Tree with a 1-skeleton graph example
+ typeST simplex_tree;
+
+ simplex_tree.insert_simplex({0, 1}, 0.);
+ simplex_tree.insert_simplex({0, 2}, 1.);
+ simplex_tree.insert_simplex({0, 3}, 2.);
+ simplex_tree.insert_simplex({1, 2}, 3.);
+ simplex_tree.insert_simplex({1, 3}, 4.);
+ simplex_tree.insert_simplex({2, 3}, 5.);
+ simplex_tree.insert_simplex({2, 4}, 6.);
+ simplex_tree.insert_simplex({3, 6}, 7.);
+ simplex_tree.insert_simplex({4, 5}, 8.);
+ simplex_tree.insert_simplex({4, 6}, 9.);
+ simplex_tree.insert_simplex({5, 6}, 10.);
+ simplex_tree.insert_simplex({6}, 10.);
+
+ simplex_tree.expansion_with_blockers(2, [&](Simplex_handle sh){
+ bool result = false;
+ std::cout << "Blocker on [";
+ // User can loop on the vertices from the given simplex_handle i.e.
+ for (auto vertex : simplex_tree.simplex_vertex_range(sh)) {
+ // We block the expansion, if the vertex '6' is in the given list of vertices
+ if (vertex == 6)
+ result = true;
+ std::cout << vertex << ", ";
+ }
+ std::cout << "] ( " << simplex_tree.filtration(sh);
+ // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
+ simplex_tree.assign_filtration(sh, simplex_tree.filtration(sh) + 1.);
+
+ std::cout << " + 1. ) = " << result << std::endl;
+
+ return result;
+ });
+
+ std::cout << "********************************************************************\n";
+ std::cout << "simplex_tree_expansion_with_blockers_2\n";
+ std::cout << "********************************************************************\n";
+ std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::cout << " - dimension " << simplex_tree.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(simplex_tree.num_simplices() == 22);
+ BOOST_CHECK(simplex_tree.dimension() == 2);
+ // {4, 5, 6} shall be blocked
+ BOOST_CHECK(simplex_tree.find({4, 5, 6}) == simplex_tree.null_simplex());
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,2})), 4.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,3})), 5.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,2,3})), 6.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({1,2,3})), 6.));
+ BOOST_CHECK(simplex_tree.find({0,1,2,3}) == simplex_tree.null_simplex());
+}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion, typeST, list_of_tested_variants) {
+ // Construct the Simplex Tree with a 1-skeleton graph example
+ typeST simplex_tree;
+
+ simplex_tree.insert_simplex({0, 1}, 0.);
+ simplex_tree.insert_simplex({0, 2}, 1.);
+ simplex_tree.insert_simplex({0, 3}, 2.);
+ simplex_tree.insert_simplex({1, 2}, 3.);
+ simplex_tree.insert_simplex({1, 3}, 4.);
+ simplex_tree.insert_simplex({2, 3}, 5.);
+ simplex_tree.insert_simplex({2, 4}, 6.);
+ simplex_tree.insert_simplex({3, 6}, 7.);
+ simplex_tree.insert_simplex({4, 5}, 8.);
+ simplex_tree.insert_simplex({4, 6}, 9.);
+ simplex_tree.insert_simplex({5, 6}, 10.);
+ simplex_tree.insert_simplex({6}, 10.);
+
+ simplex_tree.expansion(3);
+ std::cout << "********************************************************************\n";
+ std::cout << "simplex_tree_expansion_3\n";
+ std::cout << "********************************************************************\n";
+ std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::cout << " - dimension " << simplex_tree.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(simplex_tree.num_simplices() == 24);
+ BOOST_CHECK(simplex_tree.dimension() == 3);
+
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({4,5,6})), 10.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,2})), 3.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,3})), 4.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,2,3})), 5.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({1,2,3})), 5.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,2,3})), 5.));
+
+}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_expansion_2, typeST, list_of_tested_variants) {
+ // Construct the Simplex Tree with a 1-skeleton graph example
+ typeST simplex_tree;
+
+ simplex_tree.insert_simplex({0, 1}, 0.);
+ simplex_tree.insert_simplex({0, 2}, 1.);
+ simplex_tree.insert_simplex({0, 3}, 2.);
+ simplex_tree.insert_simplex({1, 2}, 3.);
+ simplex_tree.insert_simplex({1, 3}, 4.);
+ simplex_tree.insert_simplex({2, 3}, 5.);
+ simplex_tree.insert_simplex({2, 4}, 6.);
+ simplex_tree.insert_simplex({3, 6}, 7.);
+ simplex_tree.insert_simplex({4, 5}, 8.);
+ simplex_tree.insert_simplex({4, 6}, 9.);
+ simplex_tree.insert_simplex({5, 6}, 10.);
+ simplex_tree.insert_simplex({6}, 10.);
+
+ simplex_tree.expansion(2);
+
+ std::cout << "********************************************************************\n";
+ std::cout << "simplex_tree_expansion_2\n";
+ std::cout << "********************************************************************\n";
+ std::cout << "* The complex contains " << simplex_tree.num_simplices() << " simplices";
+ std::cout << " - dimension " << simplex_tree.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ std::cout << " " << "[" << simplex_tree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex))
+ std::cout << "(" << vertex << ")";
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(simplex_tree.num_simplices() == 23);
+ BOOST_CHECK(simplex_tree.dimension() == 2);
+
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({4,5,6})), 10.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,2})), 3.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,1,3})), 4.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({0,2,3})), 5.));
+ BOOST_CHECK(AreAlmostTheSame(simplex_tree.filtration(simplex_tree.find({1,2,3})), 5.));
+ BOOST_CHECK(simplex_tree.find({0,1,2,3}) == simplex_tree.null_simplex());
+}
diff --git a/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp
new file mode 100644
index 00000000..ecb9f025
--- /dev/null
+++ b/src/Simplex_tree/test/simplex_tree_iostream_operator_unit_test.cpp
@@ -0,0 +1,136 @@
+#include <iostream>
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "simplex_tree_iostream_operator"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+// ^
+// /!\ Nothing else from Simplex_tree shall be included to test includes are well defined.
+#include "gudhi/Simplex_tree.h"
+
+using namespace Gudhi;
+
+struct MyOptions : Simplex_tree_options_full_featured {
+ // Not doing persistence, so we don't need those
+ static const bool store_key = false;
+ static const bool store_filtration = false;
+ // I have few vertices
+ typedef short Vertex_handle;
+};
+
+typedef boost::mpl::list<Simplex_tree<>,
+ Simplex_tree<Simplex_tree_options_fast_persistence>
+ > list_of_tested_variants;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(iostream_operator, Stree_type, list_of_tested_variants) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "SIMPLEX TREE IOSTREAM OPERATOR" << std::endl;
+
+ Stree_type st;
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7}, 4.0);
+ st.insert_simplex_and_subfaces({3, 4, 5}, 3.0);
+ st.insert_simplex_and_subfaces({3, 0}, 2.0);
+ st.insert_simplex_and_subfaces({2, 1, 0}, 3.0);
+
+ st.initialize_filtration();
+ // Display the Simplex_tree
+ std::cout << "The ORIGINAL complex contains " << st.num_simplices() << " simplices - dimension = "
+ << st.dimension() << std::endl;
+ std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ // st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+ std::string iostream_file("simplex_tree_for_iostream_operator_unit_test.txt");
+ std::ofstream simplex_tree_ostream(iostream_file.c_str());
+ simplex_tree_ostream << st;
+ simplex_tree_ostream.close();
+
+ Stree_type read_st;
+ std::ifstream simplex_tree_istream(iostream_file.c_str());
+ simplex_tree_istream >> read_st;
+
+ // Display the Simplex_tree
+ std::cout << "The READ complex contains " << read_st.num_simplices() << " simplices - dimension = "
+ << read_st.dimension() << std::endl;
+ std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : read_st.filtration_simplex_range()) {
+ std::cout << " " << "[" << read_st.filtration(f_simplex) << "] ";
+ for (auto vertex : read_st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(st == read_st);
+}
+
+
+BOOST_AUTO_TEST_CASE(mini_iostream_operator) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "MINI SIMPLEX TREE IOSTREAM OPERATOR" << std::endl;
+
+ Simplex_tree<MyOptions> st;
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7});
+ st.insert_simplex_and_subfaces({3, 4, 5});
+ st.insert_simplex_and_subfaces({3, 0});
+ st.insert_simplex_and_subfaces({2, 1, 0});
+
+ st.initialize_filtration();
+ // Display the Simplex_tree
+ std::cout << "The ORIGINAL complex contains " << st.num_simplices() << " simplices - dimension = "
+ << st.dimension() << std::endl;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ // st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+ std::string iostream_file("simplex_tree_for_iostream_operator_unit_test.txt");
+ std::ofstream simplex_tree_ostream(iostream_file.c_str());
+ simplex_tree_ostream << st;
+ simplex_tree_ostream.close();
+
+ Simplex_tree<MyOptions> read_st;
+ std::ifstream simplex_tree_istream(iostream_file.c_str());
+ simplex_tree_istream >> read_st;
+
+ // Display the Simplex_tree
+ std::cout << "The READ complex contains " << read_st.num_simplices() << " simplices - dimension = "
+ << read_st.dimension() << std::endl;
+ std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : read_st.filtration_simplex_range()) {
+ std::cout << " " << "[" << read_st.filtration(f_simplex) << "] ";
+ for (auto vertex : read_st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(st == read_st);
+}
diff --git a/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp
new file mode 100644
index 00000000..dc37375c
--- /dev/null
+++ b/src/Simplex_tree/test/simplex_tree_remove_unit_test.cpp
@@ -0,0 +1,427 @@
+#include <iostream>
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "simplex_tree_remove"
+#include <boost/test/unit_test.hpp>
+
+// ^
+// /!\ Nothing else from Simplex_tree shall be included to test includes are well defined.
+#include "gudhi/Simplex_tree.h"
+
+using namespace Gudhi;
+
+struct MyOptions : Simplex_tree_options_full_featured {
+ // Not doing persistence, so we don't need those
+ static const bool store_key = false;
+ static const bool store_filtration = false;
+ // I have few vertices
+ typedef short Vertex_handle;
+};
+
+using Mini_stree = Simplex_tree<MyOptions>;
+using Stree = Simplex_tree<>;
+
+BOOST_AUTO_TEST_CASE(remove_maximal_simplex) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "REMOVE MAXIMAL SIMPLEX" << std::endl;
+
+ Mini_stree st;
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7});
+ st.insert_simplex_and_subfaces({3, 4, 5});
+
+ // Constructs a copy at this state for further test purpose
+ Mini_stree st_pruned = st;
+
+ st.insert_simplex_and_subfaces({3, 0});
+ st.insert_simplex_and_subfaces({2, 1, 0});
+
+ // Constructs a copy at this state for further test purpose
+ Mini_stree st_complete = st;
+ // st_complete and st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+ // st_pruned:
+ // 1 6
+ // o---o
+ // \7/
+ // o o---o
+ // 0 3\X/4
+ // o
+ // 5
+
+#ifdef GUDHI_DEBUG
+ std::cout << "Check exception throw in debug mode" << std::endl;
+ // throw excpt because sh has children
+ BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({0, 1, 6})), std::invalid_argument);
+ BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({3})), std::invalid_argument);
+ BOOST_CHECK(st == st_complete);
+#endif
+ std::cout << "st.remove_maximal_simplex({0, 2})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 2}));
+ std::cout << "st.remove_maximal_simplex({0, 1, 2})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 1, 2}));
+ std::cout << "st.remove_maximal_simplex({1, 2})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 2}));
+ std::cout << "st.remove_maximal_simplex({2})" << std::endl;
+ st.remove_maximal_simplex(st.find({2}));
+ std::cout << "st.remove_maximal_simplex({3})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 3}));
+
+ BOOST_CHECK(st == st_pruned);
+ // Remove all, but as the simplex tree is not storing filtration, there is no modification
+ st.prune_above_filtration(0.0);
+ BOOST_CHECK(st == st_pruned);
+
+ Mini_stree st_wo_seven;
+
+ st_wo_seven.insert_simplex_and_subfaces({0, 1, 6});
+ st_wo_seven.insert_simplex_and_subfaces({3, 4, 5});
+ // st_wo_seven:
+ // 1 6
+ // o---o
+ // \X/
+ // o o---o
+ // 0 3\X/4
+ // o
+ // 5
+
+ // Remove all 7 to test the both remove_maximal_simplex cases (when _members is empty or not)
+ std::cout << "st.remove_maximal_simplex({0, 1, 6, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 1, 6, 7}));
+ std::cout << "st.remove_maximal_simplex({0, 1, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 1, 7}));
+ std::cout << "st.remove_maximal_simplex({0, 6, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 6, 7}));
+ std::cout << "st.remove_maximal_simplex({0, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 7}));
+ std::cout << "st.remove_maximal_simplex({1, 6, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 6, 7}));
+ std::cout << "st.remove_maximal_simplex({1, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 7}));
+ std::cout << "st.remove_maximal_simplex({6, 7})" << std::endl;
+ st.remove_maximal_simplex(st.find({6, 7}));
+ std::cout << "st.remove_maximal_simplex({7})" << std::endl;
+ st.remove_maximal_simplex(st.find({7}));
+
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+
+ // Check dimension calls lower_upper_bound_dimension to recompute dimension
+ BOOST_CHECK(st.dimension() == 2);
+ BOOST_CHECK(st.upper_bound_dimension() == 2);
+
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension()
+ << " | st_wo_seven.upper_bound_dimension()=" << st_wo_seven.upper_bound_dimension() << std::endl;
+ std::cout << "st.dimension()=" << st.dimension() << " | st_wo_seven.dimension()=" << st_wo_seven.dimension() << std::endl;
+ BOOST_CHECK(st == st_wo_seven);
+}
+
+BOOST_AUTO_TEST_CASE(auto_dimension_set) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "DIMENSION ON REMOVE MAXIMAL SIMPLEX" << std::endl;
+
+ Mini_stree st;
+
+ st.insert_simplex_and_subfaces({0, 1, 2});
+ st.insert_simplex_and_subfaces({0, 1, 3});
+ st.insert_simplex_and_subfaces({1, 2, 3, 4});
+ st.insert_simplex_and_subfaces({1, 2, 3, 5});
+ st.insert_simplex_and_subfaces({6, 7, 8, 9});
+ st.insert_simplex_and_subfaces({6, 7, 8, 10});
+
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.remove_maximal_simplex({6, 7, 8, 10})" << std::endl;
+ st.remove_maximal_simplex(st.find({6, 7, 8, 10}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.remove_maximal_simplex({6, 7, 8, 9})" << std::endl;
+ st.remove_maximal_simplex(st.find({6, 7, 8, 9}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.remove_maximal_simplex({1, 2, 3, 4})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 2, 3, 4}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.remove_maximal_simplex({1, 2, 3, 5})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 2, 3, 5}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 2);
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+
+ std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 5})" << std::endl;
+ st.insert_simplex_and_subfaces({1, 2, 3, 5});
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 4})" << std::endl;
+ st.insert_simplex_and_subfaces({1, 2, 3, 4});
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+
+ std::cout << "st.remove_maximal_simplex({1, 2, 3, 5})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 2, 3, 5}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+
+ std::cout << "st.remove_maximal_simplex({1, 2, 3, 4})" << std::endl;
+ st.remove_maximal_simplex(st.find({1, 2, 3, 4}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 2);
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+
+ std::cout << "st.insert_simplex_and_subfaces({0, 1, 3, 4})" << std::endl;
+ st.insert_simplex_and_subfaces({0, 1, 3, 4});
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.remove_maximal_simplex({0, 1, 3, 4})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 1, 3, 4}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 2);
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+
+ std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 5})" << std::endl;
+ st.insert_simplex_and_subfaces({1, 2, 3, 5});
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.insert_simplex_and_subfaces({1, 2, 3, 4})" << std::endl;
+ st.insert_simplex_and_subfaces({1, 2, 3, 4});
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+
+ // Check you can override the dimension
+ // This is a limit test case - shall not happen
+ st.set_dimension(1);
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 1);
+ // check dimension() and lower_upper_bound_dimension() is not giving the right answer because dimension is too low
+ BOOST_CHECK(st.dimension() == 1);
+
+
+ // Check you can override the dimension
+ // This is a limit test case - shall not happen
+ st.set_dimension(6);
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 6);
+ // check dimension() do not launch lower_upper_bound_dimension()
+ BOOST_CHECK(st.dimension() == 6);
+
+
+ // Reset with the correct value
+ st.set_dimension(3);
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+ BOOST_CHECK(st.dimension() == 3);
+
+ std::cout << "st.insert_simplex_and_subfaces({0, 1, 2, 3, 4, 5, 6})" << std::endl;
+ st.insert_simplex_and_subfaces({0, 1, 2, 3, 4, 5, 6});
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 6);
+ BOOST_CHECK(st.dimension() == 6);
+
+ std::cout << "st.remove_maximal_simplex({0, 1, 2, 3, 4, 5, 6})" << std::endl;
+ st.remove_maximal_simplex(st.find({0, 1, 2, 3, 4, 5, 6}));
+ std::cout << "st.upper_bound_dimension()=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 6);
+ BOOST_CHECK(st.dimension() == 5);
+
+}
+
+BOOST_AUTO_TEST_CASE(prune_above_filtration) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "PRUNE ABOVE FILTRATION" << std::endl;
+
+ Stree st;
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7}, 1.0);
+ st.insert_simplex_and_subfaces({3, 4, 5}, 2.0);
+
+ // Constructs a copy at this state for further test purpose
+ Stree st_pruned = st;
+ st_pruned.initialize_filtration(); // reset
+
+ st.insert_simplex_and_subfaces({3, 0}, 3.0);
+ st.insert_simplex_and_subfaces({2, 1, 0}, 4.0);
+
+ // Constructs a copy at this state for further test purpose
+ Stree st_complete = st;
+ // st_complete and st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+ // st_pruned:
+ // 1 6
+ // o---o
+ // \7/
+ // o o---o
+ // 0 3\X/4
+ // o
+ // 5
+
+ bool simplex_is_changed = false;
+ // Check the no action cases
+ // greater than initial filtration value
+ simplex_is_changed = st.prune_above_filtration(10.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ BOOST_CHECK(st == st_complete);
+ BOOST_CHECK(!simplex_is_changed);
+ // equal to initial filtration value
+ simplex_is_changed = st.prune_above_filtration(6.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ BOOST_CHECK(st == st_complete);
+ BOOST_CHECK(!simplex_is_changed);
+ // lower than initial filtration value, but still greater than the maximum filtration value
+ simplex_is_changed = st.prune_above_filtration(5.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ BOOST_CHECK(st == st_complete);
+ BOOST_CHECK(!simplex_is_changed);
+
+ // Display the Simplex_tree
+ std::cout << "The complex contains " << st.num_simplices() << " simplices";
+ std::cout << " - dimension " << st.dimension() << std::endl;
+ std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ // Check the pruned cases
+ simplex_is_changed = st.prune_above_filtration(2.5);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ BOOST_CHECK(st == st_pruned);
+ BOOST_CHECK(simplex_is_changed);
+
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at 2.5 contains " << st.num_simplices() << " simplices";
+ std::cout << " - dimension " << st.dimension() << std::endl;
+
+ simplex_is_changed = st.prune_above_filtration(2.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+
+ std::cout << "The complex pruned at 2.0 contains " << st.num_simplices() << " simplices";
+ std::cout << " - dimension " << st.dimension() << std::endl;
+
+ BOOST_CHECK(st == st_pruned);
+ BOOST_CHECK(!simplex_is_changed);
+
+ Stree st_empty;
+ simplex_is_changed = st.prune_above_filtration(0.0);
+ BOOST_CHECK(simplex_is_changed == true);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices";
+ std::cout << " - upper_bound_dimension " << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == 3);
+
+ BOOST_CHECK(st.dimension() == -1);
+ std::cout << "upper_bound_dimension=" << st.upper_bound_dimension() << std::endl;
+ BOOST_CHECK(st.upper_bound_dimension() == -1);
+
+ BOOST_CHECK(st == st_empty);
+ BOOST_CHECK(simplex_is_changed);
+
+ // Test case to the limit
+ simplex_is_changed = st.prune_above_filtration(-1.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ BOOST_CHECK(st == st_empty);
+ BOOST_CHECK(!simplex_is_changed);
+}
+
+BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "MINI PRUNE ABOVE FILTRATION" << std::endl;
+
+ Mini_stree st;
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7});
+ st.insert_simplex_and_subfaces({3, 4, 5});
+ st.insert_simplex_and_subfaces({3, 0});
+ st.insert_simplex_and_subfaces({2, 1, 0});
+
+ // st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+
+ st.initialize_filtration();
+
+ // Display the Simplex_tree
+ std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ BOOST_CHECK(st.num_simplices() == 27);
+
+ // Test case to the limit - With these options, there is no filtration, which means filtration is 0
+ bool simplex_is_changed = st.prune_above_filtration(1.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at 1.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ BOOST_CHECK(!simplex_is_changed);
+ BOOST_CHECK(st.num_simplices() == 27);
+
+ simplex_is_changed = st.prune_above_filtration(0.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ BOOST_CHECK(!simplex_is_changed);
+ BOOST_CHECK(st.num_simplices() == 27);
+
+ // Test case to the limit
+ simplex_is_changed = st.prune_above_filtration(-1.0);
+ if (simplex_is_changed)
+ st.initialize_filtration();
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at -1.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ BOOST_CHECK(simplex_is_changed);
+ BOOST_CHECK(st.num_simplices() == 0);
+
+ // Display the Simplex_tree
+ std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+
+}
diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
index b06d7ec9..580d610a 100644
--- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
@@ -26,7 +26,6 @@ void test_empty_simplex_tree(typeST& tst) {
typedef typename typeST::Vertex_handle Vertex_handle;
const Vertex_handle DEFAULT_VERTEX_VALUE = Vertex_handle(- 1);
BOOST_CHECK(tst.null_vertex() == DEFAULT_VERTEX_VALUE);
- BOOST_CHECK(tst.filtration() == 0.0);
BOOST_CHECK(tst.num_vertices() == (size_t) 0);
BOOST_CHECK(tst.num_simplices() == (size_t) 0);
typename typeST::Siblings* STRoot = tst.root();
@@ -98,12 +97,11 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_from_file, typeST, list_of_tested_var
// Display the Simplex_tree
std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << " - dimension " << st.dimension() << std::endl;
// Check
BOOST_CHECK(st.num_simplices() == 143353);
BOOST_CHECK(st.dimension() == 3);
- BOOST_CHECK(AreAlmostTheSame(st.filtration(), 0.4));
int previous_size = 0;
for (auto f_simplex : st.filtration_simplex_range()) {
@@ -147,7 +145,6 @@ void test_simplex_tree_insert_returns_true(const typePairSimplexBool& returnValu
}
// Global variables
-double max_fil = 0.0;
int dim_max = -1;
template<class typeST, class Filtration_value>
@@ -158,15 +155,8 @@ void set_and_test_simplex_tree_dim_fil(typeST& simplexTree, int vectorSize, cons
std::cout << " set_and_test_simplex_tree_dim_fil - dim_max=" << dim_max
<< std::endl;
}
- if (fil > max_fil) {
- max_fil = fil;
- simplexTree.set_filtration(max_fil);
- std::cout << " set_and_test_simplex_tree_dim_fil - max_fil=" << max_fil
- << std::endl;
- }
BOOST_CHECK(simplexTree.dimension() == dim_max);
- BOOST_CHECK(AreAlmostTheSame(simplexTree.filtration(), max_fil));
// Another way to count simplices:
size_t num_simp = 0;
@@ -190,7 +180,6 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
const Filtration_value FOURTH_FILTRATION_VALUE = 0.4;
// reset since we run the test several times
dim_max = -1;
- max_fil = 0.0;
// TEST OF INSERTION
std::cout << "********************************************************************" << std::endl;
@@ -308,9 +297,9 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
// Simplex_handle = boost::container::flat_map< typeST::Vertex_handle, Node >::iterator
typename typeST::Simplex_handle shReturned = returnValue.first;
BOOST_CHECK(shReturned == typename typeST::Simplex_handle(nullptr));
+ std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
BOOST_CHECK(st.num_vertices() == (size_t) 4); // Not incremented !!
BOOST_CHECK(st.dimension() == dim_max);
- BOOST_CHECK(AreAlmostTheSame(st.filtration(), max_fil));
// ++ ELEVENTH
std::cout << " - INSERT (2,1,0) (already inserted)" << std::endl;
@@ -325,7 +314,6 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
BOOST_CHECK(shReturned == typename typeST::Simplex_handle(nullptr));
BOOST_CHECK(st.num_vertices() == (size_t) 4); // Not incremented !!
BOOST_CHECK(st.dimension() == dim_max);
- BOOST_CHECK(AreAlmostTheSame(st.filtration(), max_fil));
/* Inserted simplex: */
/* 1 */
@@ -365,7 +353,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_insertion, typeST, list_of_tested_var
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << " - dimension " << st.dimension() << std::endl;
std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
@@ -575,7 +563,7 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(NSimplexAndSubfaces_tree_insertion, typeST, list_o
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << " - dimension " << st.dimension() << std::endl;
std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
for (auto f_simplex : st.filtration_simplex_range()) {
std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
@@ -630,9 +618,6 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(coface_on_simplex_tree, typeST, list_of_tested_var
/* o */
/* 5 */
- // FIXME
- st.set_dimension(3);
-
std::vector<typename typeST::Vertex_handle> simplex_result;
std::vector<typename typeST::Simplex_handle> result;
std::cout << "First test - Star of (3):" << std::endl;
@@ -729,9 +714,6 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_
/* o */
/* 5 */
- // FIXME
- st.set_dimension(3);
-
std::cout << "Printing st - address = " << &st << std::endl;
// Copy constructor
@@ -756,7 +738,6 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(copy_move_on_simplex_tree, typeST, list_of_tested_
typeST st_empty;
// Check st has been emptied by the move
BOOST_CHECK(st == st_empty);
- BOOST_CHECK(st.filtration() == 0);
BOOST_CHECK(st.dimension() == -1);
BOOST_CHECK(st.num_simplices() == 0);
BOOST_CHECK(st.num_vertices() == (size_t)0);
@@ -882,271 +863,3 @@ BOOST_AUTO_TEST_CASE(make_filtration_non_decreasing) {
BOOST_CHECK(st == st_other_copy);
}
-
-struct MyOptions : Simplex_tree_options_full_featured {
- // Not doing persistence, so we don't need those
- static const bool store_key = false;
- static const bool store_filtration = false;
- // I have few vertices
- typedef short Vertex_handle;
-};
-
-BOOST_AUTO_TEST_CASE(remove_maximal_simplex) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "REMOVE MAXIMAL SIMPLEX" << std::endl;
-
-
- typedef Simplex_tree<MyOptions> miniST;
- miniST st;
-
- // FIXME
- st.set_dimension(3);
-
- st.insert_simplex_and_subfaces({0, 1, 6, 7});
- st.insert_simplex_and_subfaces({3, 4, 5});
-
- // Constructs a copy at this state for further test purpose
- miniST st_pruned = st;
-
- st.insert_simplex_and_subfaces({3, 0});
- st.insert_simplex_and_subfaces({2, 1, 0});
-
- // Constructs a copy at this state for further test purpose
- miniST st_complete = st;
- // st_complete and st:
- // 1 6
- // o---o
- // /X\7/
- // o---o---o---o
- // 2 0 3\X/4
- // o
- // 5
- // st_pruned:
- // 1 6
- // o---o
- // \7/
- // o o---o
- // 0 3\X/4
- // o
- // 5
-
-#ifdef GUDHI_DEBUG
- std::cout << "Check exception throw in debug mode" << std::endl;
- // throw excpt because sh has children
- BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({0, 1, 6})), std::invalid_argument);
- BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({3})), std::invalid_argument);
- BOOST_CHECK(st == st_complete);
-#endif
-
- st.remove_maximal_simplex(st.find({0, 2}));
- st.remove_maximal_simplex(st.find({0, 1, 2}));
- st.remove_maximal_simplex(st.find({1, 2}));
- st.remove_maximal_simplex(st.find({2}));
- st.remove_maximal_simplex(st.find({0, 3}));
-
- BOOST_CHECK(st == st_pruned);
- // Remove all, but as the simplex tree is not storing filtration, there is no modification
- st.prune_above_filtration(0.0);
- BOOST_CHECK(st == st_pruned);
-
- miniST st_wo_seven;
- // FIXME
- st_wo_seven.set_dimension(3);
-
- st_wo_seven.insert_simplex_and_subfaces({0, 1, 6});
- st_wo_seven.insert_simplex_and_subfaces({3, 4, 5});
- // st_wo_seven:
- // 1 6
- // o---o
- // \X/
- // o o---o
- // 0 3\X/4
- // o
- // 5
-
- // Remove all 7 to test the both remove_maximal_simplex cases (when _members is empty or not)
- st.remove_maximal_simplex(st.find({0, 1, 6, 7}));
- st.remove_maximal_simplex(st.find({0, 1, 7}));
- st.remove_maximal_simplex(st.find({0, 6, 7}));
- st.remove_maximal_simplex(st.find({0, 7}));
- st.remove_maximal_simplex(st.find({1, 6, 7}));
- st.remove_maximal_simplex(st.find({1, 7}));
- st.remove_maximal_simplex(st.find({6, 7}));
- st.remove_maximal_simplex(st.find({7}));
-
- BOOST_CHECK(st == st_wo_seven);
-}
-
-BOOST_AUTO_TEST_CASE(prune_above_filtration) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "PRUNE ABOVE FILTRATION" << std::endl;
- typedef Simplex_tree<> typeST;
- typeST st;
-
- // FIXME
- st.set_dimension(3);
-
- st.insert_simplex_and_subfaces({0, 1, 6, 7}, 1.0);
- st.insert_simplex_and_subfaces({3, 4, 5}, 2.0);
-
- // Constructs a copy at this state for further test purpose
- typeST st_pruned = st;
- st_pruned.initialize_filtration(); // reset
-
- st.insert_simplex_and_subfaces({3, 0}, 3.0);
- st.insert_simplex_and_subfaces({2, 1, 0}, 4.0);
-
- // Constructs a copy at this state for further test purpose
- typeST st_complete = st;
- // st_complete and st:
- // 1 6
- // o---o
- // /X\7/
- // o---o---o---o
- // 2 0 3\X/4
- // o
- // 5
- // st_pruned:
- // 1 6
- // o---o
- // \7/
- // o o---o
- // 0 3\X/4
- // o
- // 5
-
- bool simplex_is_changed = false;
- // Check the no action cases
- // greater than initial filtration value
- simplex_is_changed = st.prune_above_filtration(10.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- BOOST_CHECK(st == st_complete);
- BOOST_CHECK(!simplex_is_changed);
- // equal to initial filtration value
- simplex_is_changed = st.prune_above_filtration(6.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- BOOST_CHECK(st == st_complete);
- BOOST_CHECK(!simplex_is_changed);
- // lower than initial filtration value, but still greater than the maximum filtration value
- simplex_is_changed = st.prune_above_filtration(5.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- BOOST_CHECK(st == st_complete);
- BOOST_CHECK(!simplex_is_changed);
-
- // Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
- std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
- for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
- for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::cout << (int) vertex << " ";
- }
- std::cout << std::endl;
- }
-
- // Check the pruned cases
- simplex_is_changed = st.prune_above_filtration(2.5);
- if (simplex_is_changed)
- st.initialize_filtration();
- BOOST_CHECK(st == st_pruned);
- BOOST_CHECK(simplex_is_changed);
-
- // Display the Simplex_tree
- std::cout << "The complex pruned at 2.5 contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
-
- simplex_is_changed = st.prune_above_filtration(2.0);
- if (simplex_is_changed)
- st.initialize_filtration();
-
- std::cout << "The complex pruned at 2.0 contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
-
- BOOST_CHECK(st == st_pruned);
- BOOST_CHECK(!simplex_is_changed);
-
- typeST st_empty;
- // FIXME
- st_empty.set_dimension(3);
- simplex_is_changed = st.prune_above_filtration(0.0);
- if (simplex_is_changed)
- st.initialize_filtration();
-
- // Display the Simplex_tree
- std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices";
- std::cout << " - dimension " << st.dimension() << std::endl;
-
- BOOST_CHECK(st == st_empty);
- BOOST_CHECK(simplex_is_changed);
-
- // Test case to the limit
- simplex_is_changed = st.prune_above_filtration(-1.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- BOOST_CHECK(st == st_empty);
- BOOST_CHECK(!simplex_is_changed);
-}
-
-BOOST_AUTO_TEST_CASE(mini_prune_above_filtration) {
- std::cout << "********************************************************************" << std::endl;
- std::cout << "MINI PRUNE ABOVE FILTRATION" << std::endl;
- typedef Simplex_tree<MyOptions> typeST;
- typeST st;
-
- // FIXME
- st.set_dimension(3);
-
- st.insert_simplex_and_subfaces({0, 1, 6, 7});
- st.insert_simplex_and_subfaces({3, 4, 5});
- st.insert_simplex_and_subfaces({3, 0});
- st.insert_simplex_and_subfaces({2, 1, 0});
-
- // st:
- // 1 6
- // o---o
- // /X\7/
- // o---o---o---o
- // 2 0 3\X/4
- // o
- // 5
-
- st.initialize_filtration();
-
- // Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
- BOOST_CHECK(st.num_simplices() == 27);
-
- // Test case to the limit - With these options, there is no filtration, which means filtration is 0
- bool simplex_is_changed = st.prune_above_filtration(1.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- // Display the Simplex_tree
- std::cout << "The complex pruned at 1.0 contains " << st.num_simplices() << " simplices" << std::endl;
- BOOST_CHECK(!simplex_is_changed);
- BOOST_CHECK(st.num_simplices() == 27);
-
- simplex_is_changed = st.prune_above_filtration(0.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- // Display the Simplex_tree
- std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices" << std::endl;
- BOOST_CHECK(!simplex_is_changed);
- BOOST_CHECK(st.num_simplices() == 27);
-
- // Test case to the limit
- simplex_is_changed = st.prune_above_filtration(-1.0);
- if (simplex_is_changed)
- st.initialize_filtration();
- // Display the Simplex_tree
- std::cout << "The complex pruned at -1.0 contains " << st.num_simplices() << " simplices" << std::endl;
- BOOST_CHECK(simplex_is_changed);
- BOOST_CHECK(st.num_simplices() == 0);
-
- // Display the Simplex_tree
- std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
-
-} \ No newline at end of file
diff --git a/src/Spatial_searching/doc/Intro_spatial_searching.h b/src/Spatial_searching/doc/Intro_spatial_searching.h
index 9a3c1b65..1ee5e92e 100644
--- a/src/Spatial_searching/doc/Intro_spatial_searching.h
+++ b/src/Spatial_searching/doc/Intro_spatial_searching.h
@@ -46,7 +46,7 @@ namespace spatial_searching {
*
* \section spatial_searching_examples Example
*
- * This example generates 500 random points, then performs radius search, and queries for nearest and farthest points using different methods.
+ * This example generates 500 random points, then performs all-near-neighbors searches, and queries for nearest and furthest neighbors using different methods.
*
* \include Spatial_searching/example_spatial_searching.cpp
*
diff --git a/src/Spatial_searching/example/example_spatial_searching.cpp b/src/Spatial_searching/example/example_spatial_searching.cpp
index 9e6a8f32..034ad24a 100644
--- a/src/Spatial_searching/example/example_spatial_searching.cpp
+++ b/src/Spatial_searching/example/example_spatial_searching.cpp
@@ -24,34 +24,34 @@ int main(void) {
// 10-nearest neighbor query
std::cout << "10 nearest neighbors from points[20]:\n";
- auto knn_range = points_ds.query_k_nearest_neighbors(points[20], 10, true);
+ auto knn_range = points_ds.k_nearest_neighbors(points[20], 10, true);
for (auto const& nghb : knn_range)
std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
// Incremental nearest neighbor query
std::cout << "Incremental nearest neighbors:\n";
- auto inn_range = points_ds.query_incremental_nearest_neighbors(points[45]);
+ auto inn_range = points_ds.incremental_nearest_neighbors(points[45]);
// Get the neighbors in distance order until we hit the first point
for (auto ins_iterator = inn_range.begin(); ins_iterator->first != 0; ++ins_iterator)
std::cout << ins_iterator->first << " (sq. dist. = " << ins_iterator->second << ")\n";
- // 10-farthest neighbor query
- std::cout << "10 farthest neighbors from points[20]:\n";
- auto kfn_range = points_ds.query_k_farthest_neighbors(points[20], 10, true);
+ // 10-furthest neighbor query
+ std::cout << "10 furthest neighbors from points[20]:\n";
+ auto kfn_range = points_ds.k_furthest_neighbors(points[20], 10, true);
for (auto const& nghb : kfn_range)
std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
- // Incremental farthest neighbor query
- std::cout << "Incremental farthest neighbors:\n";
- auto ifn_range = points_ds.query_incremental_farthest_neighbors(points[45]);
+ // Incremental furthest neighbor query
+ std::cout << "Incremental furthest neighbors:\n";
+ auto ifn_range = points_ds.incremental_furthest_neighbors(points[45]);
// Get the neighbors in distance reverse order until we hit the first point
for (auto ifs_iterator = ifn_range.begin(); ifs_iterator->first != 0; ++ifs_iterator)
std::cout << ifs_iterator->first << " (sq. dist. = " << ifs_iterator->second << ")\n";
- // Radius search
- std::cout << "Radius search:\n";
+ // All-near-neighbors search
+ std::cout << "All-near-neighbors search:\n";
std::vector<std::size_t> rs_result;
- points_ds.radius_search(points[45], 0.5, std::back_inserter(rs_result));
+ points_ds.all_near_neighbors(points[45], 0.5, std::back_inserter(rs_result));
K k;
for (auto const& p_idx : rs_result)
std::cout << p_idx << " (sq. dist. = " << k.squared_distance_d_object()(points[p_idx], points[45]) << ")\n";
diff --git a/src/Spatial_searching/include/gudhi/Kd_tree_search.h b/src/Spatial_searching/include/gudhi/Kd_tree_search.h
index f13a98f7..ef428002 100644
--- a/src/Spatial_searching/include/gudhi/Kd_tree_search.h
+++ b/src/Spatial_searching/include/gudhi/Kd_tree_search.h
@@ -42,19 +42,19 @@ namespace spatial_searching {
/**
* \class Kd_tree_search Kd_tree_search.h gudhi/Kd_tree_search.h
- * \brief Spatial tree data structure to perform (approximate) nearest and farthest neighbor search.
+ * \brief Spatial tree data structure to perform (approximate) nearest and furthest neighbor search.
*
* \ingroup spatial_searching
*
* \details
* The class Kd_tree_search is a tree-based data structure, based on
* <a target="_blank" href="http://doc.cgal.org/latest/Spatial_searching/index.html">CGAL dD spatial searching data structures</a>.
- * It provides a simplified API to perform (approximate) nearest and farthest neighbor searches. Contrary to CGAL default behavior, the tree
+ * It provides a simplified API to perform (approximate) nearest and furthest neighbor searches. Contrary to CGAL default behavior, the tree
* does not store the points themselves, but stores indices.
*
- * There are two types of queries: the <i>k-nearest or k-farthest neighbor query</i>, where <i>k</i> is fixed and the <i>k</i> nearest
- * or farthest points are computed right away,
- * and the <i>incremental nearest or farthest neighbor query</i>, where no number of neighbors is provided during the call, as the
+ * There are two types of queries: the <i>k-nearest or k-furthest neighbor query</i>, where <i>k</i> is fixed and the <i>k</i> nearest
+ * or furthest points are computed right away,
+ * and the <i>incremental nearest or furthest neighbor query</i>, where no number of neighbors is provided during the call, as the
* neighbors will be computed incrementally when the iterator on the range is incremented.
*
* \tparam Search_traits must be a model of the <a target="_blank"
@@ -96,7 +96,7 @@ class Kd_tree_search {
typedef CGAL::Orthogonal_k_neighbor_search<STraits> K_neighbor_search;
typedef typename K_neighbor_search::Tree Tree;
typedef typename K_neighbor_search::Distance Distance;
- /// \brief The range returned by a k-nearest or k-farthest neighbor search.
+ /// \brief The range returned by a k-nearest or k-furthest neighbor search.
/// Its value type is `std::pair<std::size_t, FT>` where `first` is the index
/// of a point P and `second` is the squared distance between P and the query point.
typedef K_neighbor_search KNS_range;
@@ -104,7 +104,7 @@ class Kd_tree_search {
typedef CGAL::Orthogonal_incremental_neighbor_search<
STraits, Distance, CGAL::Sliding_midpoint<STraits>, Tree>
Incremental_neighbor_search;
- /// \brief The range returned by an incremental nearest or farthest neighbor search.
+ /// \brief The range returned by an incremental nearest or furthest neighbor search.
/// Its value type is `std::pair<std::size_t, FT>` where `first` is the index
/// of a point P and `second` is the squared distance between P and the query point.
typedef Incremental_neighbor_search INS_range;
@@ -171,7 +171,7 @@ class Kd_tree_search {
/// @param[in] sorted Indicates if the computed sequence of k-nearest neighbors needs to be sorted.
/// @param[in] eps Approximation factor.
/// @return A range (whose `value_type` is `std::size_t`) containing the k-nearest neighbors.
- KNS_range query_k_nearest_neighbors(
+ KNS_range k_nearest_neighbors(
Point const& p,
unsigned int k,
bool sorted = true,
@@ -193,11 +193,11 @@ class Kd_tree_search {
/// \brief Search incrementally for the nearest neighbors from a query point.
/// @param[in] p The query point.
/// @param[in] eps Approximation factor.
- /// @return A range (whose `value_type` is `std::size_t`) containing the
+ /// @return A range (whose `value_type` is `std::size_t`) containing the
/// neighbors sorted by their distance to p.
/// All the neighbors are not computed by this function, but they will be
/// computed incrementally when the iterator on the range is incremented.
- INS_range query_incremental_nearest_neighbors(Point const& p, FT eps = FT(0)) const {
+ INS_range incremental_nearest_neighbors(Point const& p, FT eps = FT(0)) const {
// Initialize the search structure, and search all N points
// Note that we need to pass the Distance explicitly since it needs to
// know the property map
@@ -211,13 +211,13 @@ class Kd_tree_search {
return search;
}
- /// \brief Search for the k-farthest points from a query point.
+ /// \brief Search for the k-furthest points from a query point.
/// @param[in] p The query point.
- /// @param[in] k Number of farthest points to search.
- /// @param[in] sorted Indicates if the computed sequence of k-farthest neighbors needs to be sorted.
+ /// @param[in] k Number of furthest points to search.
+ /// @param[in] sorted Indicates if the computed sequence of k-furthest neighbors needs to be sorted.
/// @param[in] eps Approximation factor.
- /// @return A range (whose `value_type` is `std::size_t`) containing the k-farthest neighbors.
- KNS_range query_k_farthest_neighbors(
+ /// @return A range (whose `value_type` is `std::size_t`) containing the k-furthest neighbors.
+ KNS_range k_furthest_neighbors(
Point const& p,
unsigned int k,
bool sorted = true,
@@ -236,14 +236,14 @@ class Kd_tree_search {
return search;
}
- /// \brief Search incrementally for the farthest neighbors from a query point.
+ /// \brief Search incrementally for the furthest neighbors from a query point.
/// @param[in] p The query point.
/// @param[in] eps Approximation factor.
- /// @return A range (whose `value_type` is `std::size_t`)
+ /// @return A range (whose `value_type` is `std::size_t`)
/// containing the neighbors sorted by their distance to p.
/// All the neighbors are not computed by this function, but they will be
/// computed incrementally when the iterator on the range is incremented.
- INS_range query_incremental_farthest_neighbors(Point const& p, FT eps = FT(0)) const {
+ INS_range incremental_furthest_neighbors(Point const& p, FT eps = FT(0)) const {
// Initialize the search structure, and search all N points
// Note that we need to pass the Distance explicitly since it needs to
// know the property map
@@ -264,12 +264,10 @@ class Kd_tree_search {
/// Note: `it` is used this way: `*it++ = each_point`.
/// @param[in] eps Approximation factor.
template <typename OutputIterator>
- void radius_search(
- Point const& p,
- FT radius,
- OutputIterator it,
- FT eps = FT(0)) const {
-
+ void all_near_neighbors(Point const& p,
+ FT radius,
+ OutputIterator it,
+ FT eps = FT(0)) const {
m_tree.search(it, Fuzzy_sphere(p, radius, eps, m_tree.traits()));
}
diff --git a/src/Spatial_searching/test/test_Kd_tree_search.cpp b/src/Spatial_searching/test/test_Kd_tree_search.cpp
index f79114bc..8a8334c3 100644
--- a/src/Spatial_searching/test/test_Kd_tree_search.cpp
+++ b/src/Spatial_searching/test/test_Kd_tree_search.cpp
@@ -48,12 +48,12 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
Points_ds points_ds(points);
- // Test query_k_nearest_neighbors
+ // Test k_nearest_neighbors
std::size_t closest_pt_index =
- points_ds.query_k_nearest_neighbors(points[10], 1, false).begin()->first;
+ points_ds.k_nearest_neighbors(points[10], 1, false).begin()->first;
BOOST_CHECK(closest_pt_index == 10);
- auto kns_range = points_ds.query_k_nearest_neighbors(points[20], 10, true);
+ auto kns_range = points_ds.k_nearest_neighbors(points[20], 10, true);
std::vector<std::size_t> knn_result;
FT last_dist = -1.;
@@ -63,12 +63,12 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
last_dist = nghb.second;
}
- // Test query_incremental_nearest_neighbors
+ // Test incremental_nearest_neighbors
closest_pt_index =
- points_ds.query_incremental_nearest_neighbors(points[10]).begin()->first;
+ points_ds.incremental_nearest_neighbors(points[10]).begin()->first;
BOOST_CHECK(closest_pt_index == 10);
- auto inn_range = points_ds.query_incremental_nearest_neighbors(points[20]);
+ auto inn_range = points_ds.incremental_nearest_neighbors(points[20]);
std::vector<std::size_t> inn_result;
last_dist = -1.;
@@ -83,8 +83,8 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
// Same result for KNN and INN?
BOOST_CHECK(knn_result == inn_result);
- // Test query_k_farthest_neighbors
- auto kfn_range = points_ds.query_k_farthest_neighbors(points[20], 10, true);
+ // Test k_furthest_neighbors
+ auto kfn_range = points_ds.k_furthest_neighbors(points[20], 10, true);
std::vector<std::size_t> kfn_result;
last_dist = kfn_range.begin()->second;
@@ -94,8 +94,8 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
last_dist = nghb.second;
}
- // Test query_k_farthest_neighbors
- auto ifn_range = points_ds.query_incremental_farthest_neighbors(points[20]);
+ // Test k_furthest_neighbors
+ auto ifn_range = points_ds.incremental_furthest_neighbors(points[20]);
std::vector<std::size_t> ifn_result;
last_dist = ifn_range.begin()->second;
@@ -110,10 +110,10 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
// Same result for KFN and IFN?
BOOST_CHECK(kfn_result == ifn_result);
- // Test radius search
+ // Test all_near_neighbors
Point rs_q(rd.get_double(-1., 1), rd.get_double(-1., 1), rd.get_double(-1., 1), rd.get_double(-1., 1));
std::vector<std::size_t> rs_result;
- points_ds.radius_search(rs_q, 0.5, std::back_inserter(rs_result));
+ points_ds.all_near_neighbors(rs_q, 0.5, std::back_inserter(rs_result));
K k;
for (auto const& p_idx : rs_result)
BOOST_CHECK(k.squared_distance_d_object()(points[p_idx], rs_q) <= 0.5);
diff --git a/src/Subsampling/include/gudhi/sparsify_point_set.h b/src/Subsampling/include/gudhi/sparsify_point_set.h
index 507f8c79..7d3b97fb 100644
--- a/src/Subsampling/include/gudhi/sparsify_point_set.h
+++ b/src/Subsampling/include/gudhi/sparsify_point_set.h
@@ -83,7 +83,7 @@ sparsify_point_set(
*output_it++ = *it_pt;
- auto ins_range = points_ds.query_incremental_nearest_neighbors(*it_pt);
+ auto ins_range = points_ds.incremental_nearest_neighbors(*it_pt);
// If another point Q is closer that min_squared_dist, mark Q to be dropped
for (auto const& neighbor : ins_range) {
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
index 9fa7c825..6f061922 100644
--- a/src/Tangential_complex/include/gudhi/Tangential_complex.h
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -155,7 +155,7 @@ class Tangential_complex {
>::type Triangulation;
typedef typename Triangulation::Geom_traits Tr_traits;
typedef typename Triangulation::Weighted_point Tr_point;
- typedef typename Triangulation::Bare_point Tr_bare_point;
+ typedef typename Tr_traits::Base::Point_d Tr_bare_point;
typedef typename Triangulation::Vertex_handle Tr_vertex_handle;
typedef typename Triangulation::Full_cell_handle Tr_full_cell_handle;
typedef typename Tr_traits::Vector_d Tr_vector;
@@ -1093,8 +1093,8 @@ class Tangential_complex {
std::size_t num_inserted_points = 1;
#endif
// const int NUM_NEIGHBORS = 150;
- // KNS_range ins_range = m_points_ds.query_k_nearest_neighbors(center_pt, NUM_NEIGHBORS);
- INS_range ins_range = m_points_ds.query_incremental_nearest_neighbors(center_pt);
+ // KNS_range ins_range = m_points_ds.k_nearest_neighbors(center_pt, NUM_NEIGHBORS);
+ INS_range ins_range = m_points_ds.incremental_nearest_neighbors(center_pt);
// While building the local triangulation, we keep the radius
// of the sphere "star sphere" centered at "center_vertex"
@@ -1203,7 +1203,7 @@ class Tangential_complex {
Point center_point = compute_perturbed_point(i);
// Among updated point, what is the closer from our center point?
std::size_t closest_pt_index =
- updated_pts_ds.query_k_nearest_neighbors(center_point, 1, false).begin()->first;
+ updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
typename K::Construct_weighted_point_d k_constr_wp =
m_k.construct_weighted_point_d_object();
@@ -1315,11 +1315,10 @@ class Tangential_complex {
m_k.compute_coordinate_d_object();
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors(
- p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points_for_tse;
#else
- KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points;
#endif
@@ -1413,11 +1412,10 @@ class Tangential_complex {
const Point &p = m_points[*it_index];
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors(
- p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points_for_tse;
#else
- KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points;
#endif
diff --git a/src/Witness_complex/doc/Witness_complex_doc.h b/src/Witness_complex/doc/Witness_complex_doc.h
index 171a185f..5d5c0735 100644
--- a/src/Witness_complex/doc/Witness_complex_doc.h
+++ b/src/Witness_complex/doc/Witness_complex_doc.h
@@ -90,8 +90,9 @@ int main(int argc, char * const argv[]) {
Gudhi::Points_off_reader<Point_d> off_reader(file_name);
point_vector = Point_vector(off_reader.get_point_cloud());
- // Choose landmarks
- Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (one can choose either of the two methods below)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Witness_complex witness_complex(landmarks,
@@ -107,7 +108,14 @@ int main(int argc, char * const argv[]) {
Here is an example of constructing a strong witness complex filtration and computing persistence on it:
- \include Witness_complex/example_strong_witness_persistence.cpp
+ \include Witness_complex/strong_witness_persistence.cpp
+
+ \section witnessexample3 Example3: Computing relaxed witness complex persistence from a distance matrix
+
+ In this example we compute the relaxed witness complex persistence from a given matrix of closest landmarks to each witness.
+ Each landmark is given as the couple (index, distance).
+
+ \include Witness_complex/example_nearest_landmark_table.cpp
\copyright GNU General Public License v3.
diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt
index cbc53902..a8231392 100644
--- a/src/Witness_complex/example/CMakeLists.txt
+++ b/src/Witness_complex/example/CMakeLists.txt
@@ -13,39 +13,23 @@ install(TARGETS Witness_complex_example_nearest_landmark_table DESTINATION bin)
# CGAL and Eigen3 are required for Euclidean version of Witness
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
add_executable( Witness_complex_example_off example_witness_complex_off.cpp )
- add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp )
-
- add_executable ( Witness_complex_example_witness_persistence example_witness_complex_persistence.cpp )
- target_link_libraries(Witness_complex_example_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- add_executable ( Witness_complex_example_strong_witness_persistence example_strong_witness_persistence.cpp )
- target_link_libraries(Witness_complex_example_strong_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- if (TBB_FOUND)
- target_link_libraries(Witness_complex_example_witness_persistence ${TBB_LIBRARIES})
- target_link_libraries(Witness_complex_example_strong_witness_persistence ${TBB_LIBRARIES})
- endif()
+
+ add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
+ target_link_libraries(Witness_complex_example_strong_off)
add_test(NAME Witness_complex_example_off_test_torus
COMMAND $<TARGET_FILE:Witness_complex_example_off>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "20" "1.0" "3")
+ add_test(NAME Witness_complex_example_test_sphere_10
+ COMMAND $<TARGET_FILE:Witness_complex_example_sphere> "10")
add_test(NAME Witness_complex_example_strong_off_test_torus
COMMAND $<TARGET_FILE:Witness_complex_example_strong_off>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "20" "1.0" "3")
- add_test(NAME Witness_complex_example_test_sphere_10
- COMMAND $<TARGET_FILE:Witness_complex_example_sphere> "10")
- add_test(NAME Witness_complex_example_test_torus_persistence
- COMMAND $<TARGET_FILE:Witness_complex_example_witness_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
- add_test(NAME Witness_complex_example_strong_test_torus_persistence
- COMMAND $<TARGET_FILE:Witness_complex_example_strong_witness_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
-
+
install(TARGETS Witness_complex_example_off DESTINATION bin)
- install(TARGETS Witness_complex_example_strong_off DESTINATION bin)
install(TARGETS Witness_complex_example_sphere DESTINATION bin)
- install(TARGETS Witness_complex_example_witness_persistence DESTINATION bin)
- install(TARGETS Witness_complex_example_strong_witness_persistence DESTINATION bin)
+ install(TARGETS Witness_complex_example_strong_off DESTINATION bin)
+
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
index 0ee9ee90..bc069654 100644
--- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
@@ -23,6 +23,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Euclidean_strong_witness_complex.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/Points_off_io.h>
#include <CGAL/Epick_d.h>
@@ -63,9 +64,10 @@ int main(int argc, char * const argv[]) {
std::cout << "Successfully read " << point_vector.size() << " points.\n";
std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
- // Choose landmarks
- Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
-
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
+
// Compute witness complex
start = clock();
Witness_complex witness_complex(landmarks,
diff --git a/src/Witness_complex/example/example_witness_complex_off.cpp b/src/Witness_complex/example/example_witness_complex_off.cpp
index b36dac0d..be11c955 100644
--- a/src/Witness_complex/example/example_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_witness_complex_off.cpp
@@ -4,6 +4,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Euclidean_witness_complex.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/Points_off_io.h>
#include <CGAL/Epick_d.h>
@@ -44,8 +45,9 @@ int main(int argc, char * const argv[]) {
std::cout << "Successfully read " << point_vector.size() << " points.\n";
std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
- // Choose landmarks
- Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
start = clock();
diff --git a/src/Witness_complex/example/example_witness_complex_sphere.cpp b/src/Witness_complex/example/example_witness_complex_sphere.cpp
index 124fd99b..a66da3f9 100644
--- a/src/Witness_complex/example/example_witness_complex_sphere.cpp
+++ b/src/Witness_complex/example/example_witness_complex_sphere.cpp
@@ -25,6 +25,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Euclidean_witness_complex.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/reader_utils.h>
#include <CGAL/Epick_d.h>
@@ -75,7 +76,8 @@ int main(int argc, char * const argv[]) {
// Choose landmarks
start = clock();
- Gudhi::subsampling::pick_n_random_points(point_vector, number_of_landmarks, std::back_inserter(landmarks));
+ // Gudhi::subsampling::pick_n_random_points(point_vector, number_of_landmarks, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, number_of_landmarks, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Witness_complex witness_complex(landmarks,
diff --git a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h
index fb669ef8..4f3cef4f 100644
--- a/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h
+++ b/src/Witness_complex/include/gudhi/Euclidean_strong_witness_complex.h
@@ -84,7 +84,7 @@ class Euclidean_strong_witness_complex
: landmarks_(std::begin(landmarks), std::end(landmarks)), landmark_tree_(landmarks_) {
nearest_landmark_table_.reserve(boost::size(witnesses));
for (auto w : witnesses)
- nearest_landmark_table_.push_back(landmark_tree_.query_incremental_nearest_neighbors(w));
+ nearest_landmark_table_.push_back(landmark_tree_.incremental_nearest_neighbors(w));
}
/** \brief Returns the point corresponding to the given vertex.
diff --git a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h
index 6afe9a5d..ff8bb139 100644
--- a/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h
+++ b/src/Witness_complex/include/gudhi/Euclidean_witness_complex.h
@@ -86,7 +86,7 @@ class Euclidean_witness_complex
: landmarks_(std::begin(landmarks), std::end(landmarks)), landmark_tree_(landmarks) {
nearest_landmark_table_.reserve(boost::size(witnesses));
for (auto w : witnesses)
- nearest_landmark_table_.push_back(landmark_tree_.query_incremental_nearest_neighbors(w));
+ nearest_landmark_table_.push_back(landmark_tree_.incremental_nearest_neighbors(w));
}
/** \brief Returns the point corresponding to the given vertex.
diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h
index 6f4bcf60..b3d00b11 100644
--- a/src/Witness_complex/include/gudhi/Strong_witness_complex.h
+++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h
@@ -34,7 +34,8 @@ namespace Gudhi {
namespace witness_complex {
-/* \private
+ /**
+ * \private
* \class Strong_witness_complex
* \brief Constructs strong witness complex for a given table of nearest landmarks with respect to witnesses.
* \ingroup witness_complex
@@ -127,10 +128,11 @@ class Strong_witness_complex {
if ((Landmark_id)simplex.size() - 1 > complex_dim)
complex_dim = simplex.size() - 1;
}
- complex.set_dimension(complex_dim);
return true;
}
+ //@}
+
private:
/* \brief Adds recursively all the faces of a certain dimension dim-1 witnessed by the same witness.
* Iterator is needed to know until how far we can take landmarks to form simplexes.
@@ -171,7 +173,6 @@ class Strong_witness_complex {
simplex.pop_back();
}
}
- //@}
};
} // namespace witness_complex
diff --git a/src/Witness_complex/include/gudhi/Witness_complex.h b/src/Witness_complex/include/gudhi/Witness_complex.h
index bcfe8484..53c38520 100644
--- a/src/Witness_complex/include/gudhi/Witness_complex.h
+++ b/src/Witness_complex/include/gudhi/Witness_complex.h
@@ -130,7 +130,6 @@ class Witness_complex {
}
k++;
}
- complex.set_dimension(k-1);
return true;
}
diff --git a/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp b/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
index 62fd1157..4f718203 100644
--- a/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
+++ b/src/Witness_complex/test/test_euclidean_simple_witness_complex.cpp
@@ -75,7 +75,7 @@ BOOST_AUTO_TEST_CASE(simple_witness_complex) {
Kd_tree landmark_tree(landmarks);
Nearest_landmark_table nearest_landmark_table;
for (auto w: witnesses)
- nearest_landmark_table.push_back(landmark_tree.query_incremental_nearest_neighbors(w));
+ nearest_landmark_table.push_back(landmark_tree.incremental_nearest_neighbors(w));
// Weak witness complex: Euclidean version
EuclideanWitnessComplex eucl_witness_complex(landmarks,
diff --git a/src/Witness_complex/utilities/CMakeLists.txt b/src/Witness_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..125a41ff
--- /dev/null
+++ b/src/Witness_complex/utilities/CMakeLists.txt
@@ -0,0 +1,28 @@
+cmake_minimum_required(VERSION 2.6)
+project(Witness_complex_utilities)
+
+# CGAL and Eigen3 are required for Euclidean version of Witness
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+
+ add_executable ( Witness_complex_strong_witness_persistence strong_witness_persistence.cpp )
+ target_link_libraries(Witness_complex_strong_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ add_executable ( Witness_complex_weak_witness_persistence weak_witness_persistence.cpp )
+ target_link_libraries(Witness_complex_weak_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_strong_witness_persistence ${TBB_LIBRARIES})
+ target_link_libraries(Witness_complex_weak_witness_persistence ${TBB_LIBRARIES})
+ endif()
+
+ add_test(NAME Witness_complex_strong_test_torus_persistence
+ COMMAND $<TARGET_FILE:Witness_complex_strong_witness_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
+ add_test(NAME Witness_complex_weak_test_torus_persistence
+ COMMAND $<TARGET_FILE:Witness_complex_weak_witness_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
+
+ install(TARGETS Witness_complex_strong_witness_persistence DESTINATION bin)
+ install(TARGETS Witness_complex_weak_witness_persistence DESTINATION bin)
+
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/Witness_complex/utilities/README b/src/Witness_complex/utilities/README
new file mode 100644
index 00000000..1141033e
--- /dev/null
+++ b/src/Witness_complex/utilities/README
@@ -0,0 +1,74 @@
+# Witness_complex #
+
+For more details about the witness complex, please read the [user manual of the package](http://gudhi.gforge.inria.fr/doc/latest/group__witness__complex.html).
+
+## `weak_witness_persistence` ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+*Usage*
+`weak_witness_persistence [options] <OFF input file>`
+
+*Allowed options*
+
+* `-h [ --help ]` Produce help message
+* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::cout.
+* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+* `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute.
+
+*Example*
+`weak_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.006`
+
+outputs:
+```
+Successfully read 1307 points.
+Ambient dimension is 3.
+The complex contains 732 simplices and has dimension 8
+11 0 0 inf
+11 1 0 inf
+11 2 0.0275251 0.0534586
+11 1 0 0.0239952
+```
+
+N.B.: output is random as the 20 landmarks are chosen randomly.
+
+## `strong_witness_persistence` ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Strong witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+*Usage*
+`strong_witness_persistence [options] <OFF input file>`
+
+*Allowed options*
+
+* `-h [ --help ]` Produce help message
+* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::cout.
+* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+* `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute.
+
+*Example*
+`strong_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.06`
+
+outputs:
+```
+Successfully read 1307 points.
+Ambient dimension is 3.
+The complex contains 1836 simplices and has dimension 8
+11 0 0 inf
+11 1 0.00674748 inf
+11 2 0.0937751 0.235354
+```
+
+N.B.: output is random as the 20 landmarks are chosen randomly.
diff --git a/src/Witness_complex/example/example_strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp
index f786fe7b..e3e0c1ee 100644
--- a/src/Witness_complex/example/example_strong_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp
@@ -25,6 +25,7 @@
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_off_io.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <boost/program_options.hpp>
@@ -76,8 +77,9 @@ int main(int argc, char * argv[]) {
std::cout << "Successfully read " << witnesses.size() << " points.\n";
std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
- // Choose landmarks from witnesses
- Gudhi::subsampling::pick_n_random_points(witnesses, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Strong_witness_complex strong_witness_complex(landmarks,
diff --git a/src/Witness_complex/example/example_witness_complex_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp
index a1146922..a63b0837 100644
--- a/src/Witness_complex/example/example_witness_complex_persistence.cpp
+++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp
@@ -25,6 +25,7 @@
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_off_io.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <boost/program_options.hpp>
@@ -76,8 +77,9 @@ int main(int argc, char * argv[]) {
std::cout << "Successfully read " << witnesses.size() << " points.\n";
std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
- // Choose landmarks from witnesses
- Gudhi::subsampling::pick_n_random_points(witnesses, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Witness_complex witness_complex(landmarks,
diff --git a/src/cmake/modules/GUDHI_doxygen_target.cmake b/src/cmake/modules/GUDHI_doxygen_target.cmake
index d2cb952d..f3e2d9f5 100644
--- a/src/cmake/modules/GUDHI_doxygen_target.cmake
+++ b/src/cmake/modules/GUDHI_doxygen_target.cmake
@@ -3,6 +3,11 @@ find_package(Doxygen)
if(DOXYGEN_FOUND)
# configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
+ #starting from cmake 3.9 the usage of DOXYGEN_EXECUTABLE is deprecated
+ if(TARGET Doxygen::doxygen)
+ get_property(DOXYGEN_EXECUTABLE TARGET Doxygen::doxygen PROPERTY IMPORTED_LOCATION)
+ endif()
+
add_custom_target(doxygen ${DOXYGEN_EXECUTABLE} ${GUDHI_USER_VERSION_DIR}/Doxyfile
WORKING_DIRECTORY ${GUDHI_USER_VERSION_DIR}
DEPENDS ${GUDHI_USER_VERSION_DIR}/Doxyfile ${GUDHI_DOXYGEN_DEPENDENCY}
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index 8f486118..8269c3bf 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -54,10 +54,12 @@ if(CGAL_FOUND)
endforeach(CGAL_INCLUDE_DIR ${CGAL_INCLUDE_DIRS})
endif(NOT CGAL_VERSION VERSION_GREATER 4.9.0)
- # For dev version
- include_directories(BEFORE "src/common/include/gudhi_patches")
- # For user version
- include_directories(BEFORE "include/gudhi_patches")
+ if (NOT CGAL_VERSION VERSION_GREATER 4.11.0)
+ # For dev version
+ include_directories(BEFORE "src/common/include/gudhi_patches")
+ # For user version
+ include_directories(BEFORE "include/gudhi_patches")
+ endif (NOT CGAL_VERSION VERSION_GREATER 4.11.0)
endif()
endif()
@@ -117,15 +119,13 @@ if(NOT GUDHI_CYTHON_PATH)
endif(NOT GUDHI_CYTHON_PATH)
if(PYTHONINTERP_FOUND AND CYTHON_FOUND)
- # Unitary tests are available through py.test
- find_program( PYTEST_PATH py.test )
# Default found version 2
if(PYTHON_VERSION_MAJOR EQUAL 2)
# Documentation generation is available through sphinx
find_program( SPHINX_PATH sphinx-build )
elseif(PYTHON_VERSION_MAJOR EQUAL 3)
# No sphinx-build in Pyton3, just hack it
- set(SPHINX_PATH "${CMAKE_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/doc/python3-sphinx-build")
+ set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/doc/python3-sphinx-build.py")
else()
message(FATAL_ERROR "ERROR: Try to compile the Cython interface. Python version ${PYTHON_VERSION_STRING} is not valid.")
endif(PYTHON_VERSION_MAJOR EQUAL 2)
diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake
index cff64ad2..4abc2574 100644
--- a/src/cmake/modules/GUDHI_user_version_target.cmake
+++ b/src/cmake/modules/GUDHI_user_version_target.cmake
@@ -48,7 +48,11 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
- set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
+ if (NOT CGAL_VERSION VERSION_GREATER 4.11.0)
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
+ else ()
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi")
+ endif ()
foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
diff --git a/src/common/doc/file_formats.h b/src/common/doc/file_formats.h
index c145b271..d06b81f5 100644
--- a/src/common/doc/file_formats.h
+++ b/src/common/doc/file_formats.h
@@ -2,7 +2,7 @@
* (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
-* Author(s): Clément Jamin
+* Author(s): Clément Jamin
*
* Copyright (C) 2017 INRIA
*
@@ -26,7 +26,7 @@
namespace Gudhi {
/*! \page fileformats File formats
-
+
\tableofcontents
\section FileFormatsPers Persistence Diagram
@@ -34,20 +34,89 @@ namespace Gudhi {
Such a file, whose extension is usually `.pers`, contains a list of persistence intervals.<br>
Lines starting with `#` are ignored (comments).<br>
Other lines might contain 2, 3 or 4 values (the number of values on each line must be the same for all lines):
- \code{.unparsed}
+ \verbatim
[[field] dimension] birth death
- \endcode
+ \endverbatim
Here is a simple sample file:
- \code{.unparsed}
- # Beautiful persistence diagram
+ \verbatim
+ # Persistence diagram example
2 2.7 3.7
2 9.6 14.
+ # Some comments
3 34.2 34.974
4 3. inf
- \endcode
+ \endverbatim
Other sample files can be found in the `data/persistence_diagram` folder.
+
+ Such files can be generated with `Gudhi::persistent_cohomology::Persistent_cohomology::output_diagram()` and read with
+ `Gudhi::read_persistence_intervals_and_dimension()`, `Gudhi::read_persistence_intervals_grouped_by_dimension()` or
+ `Gudhi::read_persistence_intervals_in_dimension()`.
+
+
+ \section FileFormatsIsoCuboid Iso-cuboid
+
+ Such a file describes an iso-oriented cuboid with diagonal opposite vertices (min_x, min_y, min_z,...) and (max_x, max_y, max_z, ...). The format is:<br>
+ \verbatim
+ min_x min_y [min_z ...]
+ max_x max_y [max_z ...]
+ \endverbatim
+
+ Here is a simple sample file in the 3D case:
+ \verbatim
+ -1. -1. -1.
+ 1. 1. 1.
+ \endverbatim
+
+
+ \section FileFormatsPerseus Perseus
+
+ This file format is the format used by the Perseus software
+ (http://www.sas.upenn.edu/~vnanda/perseus/) by Vidit Nanda.
+ The first line contains a number d begin the dimension of the
+ bitmap (2 in the example below). Next d lines are the numbers of top dimensional cubes in each dimensions (3 and 3
+ in the example below). Next, in lexicographical order, the filtration of top dimensional cubes is given (1 4 6 8
+ 20 4 7 6 5 in the example below).
+
+ \image html "exampleBitmap.png" "Example of a input data."
+
+ The input file for the following complex is:
+ \verbatim
+ 2
+ 3
+ 3
+ 1
+ 4
+ 6
+ 8
+ 20
+ 4
+ 7
+ 6
+ 5
+ \endverbatim
+
+ To indicate periodic boundary conditions in a
+ given direction, then number of top dimensional cells in this direction have to be multiplied by -1. For instance:
+
+ \verbatim
+ 2
+ -3
+ 3
+ 1
+ 4
+ 6
+ 8
+ 20
+ 4
+ 7
+ 6
+ 5
+ \endverbatim
+
+ Indicate that we have imposed periodic boundary conditions in the direction x, but not in the direction y.
+
*/
} // namespace Gudhi
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index e5f135db..108cf6e3 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -93,6 +93,24 @@
</td>
</tr>
</table>
+ \subsection CoverComplexDataStructure Cover Complexes: Nerves and Graph Induced Complexes
+ \image html "gicvisu.jpg" "Graph Induced Complex of a point cloud."
+<table border="0">
+ <tr>
+ <td width="25%">
+ <b>Author:</b> Mathieu Carri&egrave;re<br>
+ <b>Introduced in:</b> GUDHI 2.0.1<br>
+ <b>Copyright:</b> GPL v3<br>
+ </td>
+ <td width="75%">
+ Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain
+ topological information about the input data. They can be computed with a cover of the
+ data, that comes i.e. from the preimage of a family of intervals covering the image
+ of a scalar-valued function defined on the data. <br>
+ <b>User manual:</b> \ref cover_complex - <b>Reference manual:</b> Gudhi::cover_complex::Cover_complex
+ </td>
+ </tr>
+</table>
\subsection SkeletonBlockerDataStructure Skeleton blocker
\image html "ds_representation.png" "Skeleton blocker representation"
<table border="0">
@@ -161,7 +179,7 @@
<b>Author:</b> Fran&ccedil;ois Godi<br>
<b>Introduced in:</b> GUDHI 2.0.0<br>
<b>Copyright:</b> GPL v3<br>
- <b>Requires:</b> \ref cgal &ge; 4.8.1 and \ref eigen3
+ <b>Requires:</b> \ref cgal &ge; 4.8.1
</td>
<td width="75%">
Bottleneck distance measures the similarity between two persistence diagrams.
@@ -288,30 +306,38 @@ make doxygen
* Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to
* your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
*
- * The following examples require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
+ * The following examples/utilities require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
* Library</a> (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed:
- * \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
* Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
*
- * The following example requires CGAL version &ge; 4.6.0:
- * \li <a href="_witness_complex_2witness_complex_sphere_8cpp-example.html">
- * Witness_complex/witness_complex_sphere.cpp</a>
- *
+ * The following examples/utilities require CGAL version &ge; 4.6.0:
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_strong_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
+ * Witness_complex/example_witness_complex_sphere.cpp</a>
+ *
* The following example requires CGAL version &ge; 4.7.0:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
*
@@ -320,8 +346,8 @@ make doxygen
* Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
* \li <a href="_bottleneck_distance_2bottleneck_basic_example_8cpp-example.html">
* Bottleneck_distance/bottleneck_basic_example.cpp</a>
- * \li <a href="_bottleneck_distance_2bottleneck_read_file_example_8cpp-example.html">
- * Bottleneck_distance/bottleneck_read_file_example.cpp</a>
+ * \li <a href="_bottleneck_distance_2bottleneck_read_file_8cpp-example.html">
+ * Bottleneck_distance/bottleneck_distance.cpp</a>
* \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
* Spatial_searching/example_spatial_searching.cpp</a>
* \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
@@ -342,19 +368,45 @@ make doxygen
* <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> is a C++ template library for linear algebra:
* matrices, vectors, numerical solvers, and related algorithms.
*
- * The following example requires the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
+ * The following examples/utilities require the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
* built if Eigen3 is not installed:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
+ * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
- *
+ * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
+ * Spatial_searching/example_spatial_searching.cpp</a>
+ * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
+ * Subsampling/example_choose_n_farthest_points.cpp</a>
+ * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
+ * Subsampling/example_custom_kernel.cpp</a>
+ * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
+ * Subsampling/example_pick_n_random_points.cpp</a>
+ * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
+ * Subsampling/example_sparsify_point_set.cpp</a>
+ * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
+ * Tangential_complex/example_basic.cpp</a>
+ * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
+ * Tangential_complex/example_with_perturb.cpp</a>
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_strong_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
+ * Witness_complex/example_witness_complex_sphere.cpp</a>
+ *
* \subsection tbb Threading Building Blocks
* <a target="_blank" href="https://www.threadingbuildingblocks.org/">Intel&reg; TBB</a> lets you easily write parallel
* C++ programs that take full advantage of multicore performance, that are portable and composable, and that have
@@ -362,27 +414,35 @@ make doxygen
*
* Having Intel&reg; TBB installed is recommended to parallelize and accelerate some GUDHI computations.
*
- * The following examples are using Intel&reg; TBB if installed:
+ * The following examples/utilities are using Intel&reg; TBB if installed:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_8cpp-example.html">
- * Bitmap_cubical_complex/Bitmap_cubical_complex.cpp</a>
+ * Bitmap_cubical_complex/cubical_complex_persistence.cpp</a>
* \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_periodic_boundary_conditions_8cpp-example.html">
- * Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp</a>
+ * Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp</a>
* \li <a href="_bitmap_cubical_complex_2_random_bitmap_cubical_complex_8cpp-example.html">
* Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
* \li <a href="_simplex_tree_2simple_simplex_tree_8cpp-example.html">
* Simplex_tree/simple_simplex_tree.cpp</a>
* \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
* Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
* \li <a href="_simplex_tree_2simplex_tree_from_cliques_of_graph_8cpp-example.html">
* Simplex_tree/simplex_tree_from_cliques_of_graph.cpp</a>
+ * \li <a href="_simplex_tree_2graph_expansion_with_blocker_8cpp-example.html">
+ * Simplex_tree/graph_expansion_with_blocker.cpp</a>
* \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
@@ -395,27 +455,31 @@ make doxygen
* Persistent_cohomology/persistence_from_simple_simplex_tree.cpp</a>
* \li <a href="_persistent_cohomology_2plain_homology_8cpp-example.html">
* Persistent_cohomology/plain_homology.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_distance_matrix_persistence_8cpp-example.html">
- * Persistent_cohomology/rips_distance_matrix_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
* Persistent_cohomology/rips_multifield_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_persistence_8cpp-example.html">
- * Persistent_cohomology/rips_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_persistence_step_by_step_8cpp-example.html">
* Persistent_cohomology/rips_persistence_step_by_step.cpp</a>
* \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
* \li <a href="_rips_complex_2example_one_skeleton_rips_from_points_8cpp-example.html">
* Rips_complex/example_one_skeleton_rips_from_points.cpp</a>
* \li <a href="_rips_complex_2example_rips_complex_from_off_file_8cpp-example.html">
* Rips_complex/example_rips_complex_from_off_file.cpp</a>
- *
+ * \li <a href="_rips_complex_2rips_distance_matrix_persistence_8cpp-example.html">
+ * Rips_complex/rips_distance_matrix_persistence.cpp</a>
+ * \li <a href="_rips_complex_2rips_persistence_8cpp-example.html">
+ * Rips_complex/rips_persistence.cpp</a>
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_nearest_landmark_table_8cpp-example.html">
+ * Witness_complex/example_nearest_landmark_table.cpp</a>
+ *
* \section Contributions Bug reports and contributions
* Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
* \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
@@ -439,36 +503,37 @@ make doxygen
/*! @file Examples
* @example Alpha_complex/Alpha_complex_from_off.cpp
* @example Alpha_complex/Alpha_complex_from_points.cpp
+ * @example Alpha_complex/alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/alpha_complex_persistence.cpp
+ * @example Alpha_complex/exact_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
* @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
* @example Bottleneck_distance/bottleneck_basic_example.cpp
- * @example Bottleneck_distance/bottleneck_read_file_example.cpp
- * @example Bitmap_cubical_complex/Bitmap_cubical_complex.cpp
- * @example Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
+ * @example Bottleneck_distance/bottleneck_distance.cpp
+ * @example Bitmap_cubical_complex/cubical_complex_persistence.cpp
+ * @example Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp
* @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp
* @example common/example_CGAL_3D_points_off_reader.cpp
* @example common/example_CGAL_points_off_reader.cpp
* @example Contraction/Garland_heckbert.cpp
* @example Contraction/Rips_contraction.cpp
- * @example Persistent_cohomology/alpha_complex_3d_persistence.cpp
- * @example Persistent_cohomology/alpha_complex_persistence.cpp
* @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
- * @example Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp
- * @example Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp
- * @example Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp
* @example Persistent_cohomology/persistence_from_file.cpp
* @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
* @example Persistent_cohomology/plain_homology.cpp
* @example Persistent_cohomology/rips_multifield_persistence.cpp
- * @example Persistent_cohomology/rips_distance_matrix_persistence.cpp
- * @example Persistent_cohomology/rips_persistence.cpp
* @example Persistent_cohomology/custom_persistence_sort.cpp
* @example Persistent_cohomology/rips_persistence_step_by_step.cpp
* @example Rips_complex/example_one_skeleton_rips_from_points.cpp
* @example Rips_complex/example_rips_complex_from_off_file.cpp
+ * @example Rips_complex/rips_persistence.cpp
+ * @example Rips_complex/rips_distance_matrix_persistence.cpp
* @example Simplex_tree/mini_simplex_tree.cpp
* @example Simplex_tree/simple_simplex_tree.cpp
* @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
* @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp
+ * @example Simplex_tree/graph_expansion_with_blocker.cpp
* @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp
* @example Skeleton_blocker/Skeleton_blocker_iteration.cpp
* @example Skeleton_blocker/Skeleton_blocker_link.cpp
@@ -481,9 +546,9 @@ make doxygen
* @example Tangential_complex/example_with_perturb.cpp
* @example Witness_complex/example_nearest_landmark_table.cpp
* @example Witness_complex/example_strong_witness_complex_off.cpp
- * @example Witness_complex/example_strong_witness_persistence.cpp
* @example Witness_complex/example_witness_complex_off.cpp
- * @example Witness_complex/example_witness_complex_persistence.cpp
* @example Witness_complex/example_witness_complex_sphere.cpp
+ * @example Witness_complex/weak_witness_persistence.cpp
+ * @example Witness_complex/strong_witness_persistence.cpp
*/
\ No newline at end of file
diff --git a/src/common/include/gudhi/graph_simplicial_complex.h b/src/common/include/gudhi/graph_simplicial_complex.h
index 5fe7c826..d84421b2 100644
--- a/src/common/include/gudhi/graph_simplicial_complex.h
+++ b/src/common/include/gudhi/graph_simplicial_complex.h
@@ -28,6 +28,9 @@
#include <utility> // for pair<>
#include <vector>
#include <map>
+#include <tuple> // for std::tie
+
+namespace Gudhi {
/* Edge tag for Boost PropertyGraph. */
struct edge_filtration_t {
@@ -39,4 +42,64 @@ struct vertex_filtration_t {
typedef boost::vertex_property_tag kind;
};
+template <typename SimplicialComplexForProximityGraph>
+using Proximity_graph = typename boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
+, boost::property < vertex_filtration_t, typename SimplicialComplexForProximityGraph::Filtration_value >
+, boost::property < edge_filtration_t, typename SimplicialComplexForProximityGraph::Filtration_value >>;
+
+/** \brief Computes the proximity graph of the points.
+ *
+ * If points contains n elements, the proximity graph is the graph with n vertices, and an edge [u,v] iff the
+ * distance function between points u and v is smaller than threshold.
+ *
+ * \tparam ForwardPointRange furnishes `.begin()` and `.end()` methods.
+ *
+ * \tparam Distance furnishes `operator()(const Point& p1, const Point& p2)`, where
+ * `Point` is a point from the `ForwardPointRange`, and that returns a `Filtration_value`.
+ */
+template< typename SimplicialComplexForProximityGraph
+ , typename ForwardPointRange
+ , typename Distance >
+Proximity_graph<SimplicialComplexForProximityGraph> compute_proximity_graph(
+ const ForwardPointRange& points,
+ typename SimplicialComplexForProximityGraph::Filtration_value threshold,
+ Distance distance) {
+ using Vertex_handle = typename SimplicialComplexForProximityGraph::Vertex_handle;
+ using Filtration_value = typename SimplicialComplexForProximityGraph::Filtration_value;
+
+ std::vector<std::pair< Vertex_handle, Vertex_handle >> edges;
+ std::vector< Filtration_value > edges_fil;
+ std::map< Vertex_handle, Filtration_value > vertices;
+
+ Vertex_handle idx_u, idx_v;
+ Filtration_value fil;
+ idx_u = 0;
+ for (auto it_u = points.begin(); it_u != points.end(); ++it_u) {
+ idx_v = idx_u + 1;
+ for (auto it_v = it_u + 1; it_v != points.end(); ++it_v, ++idx_v) {
+ fil = distance(*it_u, *it_v);
+ if (fil <= threshold) {
+ edges.emplace_back(idx_u, idx_v);
+ edges_fil.push_back(fil);
+ }
+ }
+ ++idx_u;
+ }
+
+ // Points are labeled from 0 to idx_u-1
+ Proximity_graph<SimplicialComplexForProximityGraph> skel_graph(edges.begin(), edges.end(), edges_fil.begin(), idx_u);
+
+ auto vertex_prop = boost::get(vertex_filtration_t(), skel_graph);
+
+ typename boost::graph_traits<Proximity_graph<SimplicialComplexForProximityGraph>>::vertex_iterator vi, vi_end;
+ for (std::tie(vi, vi_end) = boost::vertices(skel_graph);
+ vi != vi_end; ++vi) {
+ boost::put(vertex_prop, *vi, 0.);
+ }
+
+ return skel_graph;
+}
+
+} // namespace Gudhi
+
#endif // GRAPH_SIMPLICIAL_COMPLEX_H_
diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h
index bda93f4f..90be4fc7 100644
--- a/src/common/include/gudhi/reader_utils.h
+++ b/src/common/include/gudhi/reader_utils.h
@@ -38,6 +38,8 @@
#include <utility> // for pair
#include <tuple> // for std::make_tuple
+namespace Gudhi {
+
// Keep this file tag for Doxygen to parse the code, otherwise, functions are not documented.
// It is required for global functions and variables.
@@ -362,4 +364,6 @@ inline std::vector<std::pair<double, double>> read_persistence_intervals_in_dime
return ret;
}
+} // namespace Gudhi
+
#endif // READER_UTILS_H_
diff --git a/src/common/test/test_distance_matrix_reader.cpp b/src/common/test/test_distance_matrix_reader.cpp
index 95a73bd9..656e6f2e 100644
--- a/src/common/test/test_distance_matrix_reader.cpp
+++ b/src/common/test/test_distance_matrix_reader.cpp
@@ -36,7 +36,7 @@ BOOST_AUTO_TEST_CASE( lower_triangular_distance_matrix )
{
Distance_matrix from_lower_triangular;
// Read lower_triangular_distance_matrix.csv file where the separator is a ','
- from_lower_triangular = read_lower_triangular_matrix_from_csv_file<double>("lower_triangular_distance_matrix.csv",
+ from_lower_triangular = Gudhi::read_lower_triangular_matrix_from_csv_file<double>("lower_triangular_distance_matrix.csv",
',');
for (auto& i : from_lower_triangular) {
for (auto j : i) {
@@ -69,7 +69,7 @@ BOOST_AUTO_TEST_CASE( full_square_distance_matrix )
{
Distance_matrix from_full_square;
// Read full_square_distance_matrix.csv file where the separator is the default one ';'
- from_full_square = read_lower_triangular_matrix_from_csv_file<double>("full_square_distance_matrix.csv");
+ from_full_square = Gudhi::read_lower_triangular_matrix_from_csv_file<double>("full_square_distance_matrix.csv");
for (auto& i : from_full_square) {
for (auto j : i) {
std::cout << j << " ";
diff --git a/src/common/test/test_persistence_intervals_reader.cpp b/src/common/test/test_persistence_intervals_reader.cpp
index a06fff1e..be299376 100644
--- a/src/common/test/test_persistence_intervals_reader.cpp
+++ b/src/common/test/test_persistence_intervals_reader.cpp
@@ -27,6 +27,7 @@
#include <utility> // for pair
#include <tuple>
#include <limits> // for inf
+#include <map>
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE "persistence_intervals_reader"
@@ -44,7 +45,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_without_dimension )
expected_intervals_by_dimension[-1].push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
Persistence_intervals_by_dimension persistence_intervals_by_dimension =
- read_persistence_intervals_grouped_by_dimension("persistence_intervals_without_dimension.pers");
+ Gudhi::read_persistence_intervals_grouped_by_dimension("persistence_intervals_without_dimension.pers");
std::cout << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
for (auto map_iter : expected_intervals_by_dimension) {
@@ -69,7 +70,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_without_dimension )
expected_intervals_in_dimension.push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
Persistence_intervals persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers");
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers");
std::cout << "\nread_persistence_intervals_in_dimension - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -83,22 +84,22 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_without_dimension )
expected_intervals_in_dimension.clear();
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 0);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 0);
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
expected_intervals_in_dimension.clear();
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 1);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 1);
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
expected_intervals_in_dimension.clear();
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 2);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 2);
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
expected_intervals_in_dimension.clear();
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 3);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_without_dimension.pers", 3);
BOOST_CHECK(persistence_intervals_in_dimension == expected_intervals_in_dimension);
}
@@ -112,7 +113,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
expected_intervals_by_dimension[1].push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
Persistence_intervals_by_dimension persistence_intervals_by_dimension =
- read_persistence_intervals_grouped_by_dimension("persistence_intervals_with_dimension.pers");
+ Gudhi::read_persistence_intervals_grouped_by_dimension("persistence_intervals_with_dimension.pers");
std::cout << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
for (auto map_iter : expected_intervals_by_dimension) {
@@ -137,7 +138,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
expected_intervals_in_dimension.push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
Persistence_intervals persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers");
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers");
std::cout << "\nread_persistence_intervals_in_dimension - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -152,7 +153,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
expected_intervals_in_dimension.clear();
expected_intervals_in_dimension.push_back(std::make_pair(2.7, 3.7));
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 0);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 0);
std::cout << "\nread_persistence_intervals_in_dimension 0 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -168,7 +169,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
expected_intervals_in_dimension.push_back(std::make_pair(9.6, 14.));
expected_intervals_in_dimension.push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 1);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 1);
std::cout << "\nread_persistence_intervals_in_dimension 1 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -182,7 +183,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
expected_intervals_in_dimension.clear();
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 2);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 2);
std::cout << "\nread_persistence_intervals_in_dimension 2 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -197,7 +198,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_dimension )
expected_intervals_in_dimension.clear();
expected_intervals_in_dimension.push_back(std::make_pair(34.2, 34.974));
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 3);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_dimension.pers", 3);
std::cout << "\nread_persistence_intervals_in_dimension 3 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -221,7 +222,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
expected_intervals_by_dimension[1].push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
Persistence_intervals_by_dimension persistence_intervals_by_dimension =
- read_persistence_intervals_grouped_by_dimension("persistence_intervals_with_field.pers");
+ Gudhi::read_persistence_intervals_grouped_by_dimension("persistence_intervals_with_field.pers");
std::cout << "\nread_persistence_intervals_grouped_by_dimension - expected\n";
for (auto map_iter : expected_intervals_by_dimension) {
@@ -246,7 +247,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
expected_intervals_in_dimension.push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
Persistence_intervals persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers");
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers");
std::cout << "\nread_persistence_intervals_in_dimension - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -261,7 +262,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
expected_intervals_in_dimension.clear();
expected_intervals_in_dimension.push_back(std::make_pair(2.7, 3.7));
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 0);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 0);
std::cout << "\nread_persistence_intervals_in_dimension 0 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -277,7 +278,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
expected_intervals_in_dimension.push_back(std::make_pair(9.6, 14.));
expected_intervals_in_dimension.push_back(std::make_pair(3., std::numeric_limits<double>::infinity()));
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 1);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 1);
std::cout << "\nread_persistence_intervals_in_dimension 1 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -291,7 +292,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
expected_intervals_in_dimension.clear();
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 2);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 2);
std::cout << "\nread_persistence_intervals_in_dimension 2 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
@@ -306,7 +307,7 @@ BOOST_AUTO_TEST_CASE( persistence_intervals_with_field )
expected_intervals_in_dimension.clear();
expected_intervals_in_dimension.push_back(std::make_pair(34.2, 34.974));
persistence_intervals_in_dimension =
- read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 3);
+ Gudhi::read_persistence_intervals_in_dimension("persistence_intervals_with_field.pers", 3);
std::cout << "\nread_persistence_intervals_in_dimension 3 - expected\n";
for (auto vec_iter : expected_intervals_in_dimension)
diff --git a/src/common/utilities/README b/src/common/utilities/README
index dc841521..18fa8cc4 100644
--- a/src/common/utilities/README
+++ b/src/common/utilities/README
@@ -1,19 +1,19 @@
-======================= off_file_from_shape_generator ==================================
+# Pointset generator #
-Example of use :
+## `off_file_from_shape_generator` ##
-*** on|in sphere|cube|curve|torus|klein generator
+Generates a pointset and save it in an OFF file. Command-line is:
+`off_file_from_shape_generator on|in sphere|cube|curve|torus|klein <filename> <num_points> <dimension> <parameter1> <parameter2>...`
-./off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2
+Warning: "on cube" generator is not available!
- => generates a onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2
+Examples:
-./off_file_from_shape_generator in sphere inSphere.off 100 2
+* Generate an onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2:
+`off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2`
+
+* Generate an inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default):
+`off_file_from_shape_generator in sphere inSphere.off 100 2`
- => generates a inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default)
-
-./off_file_from_shape_generator in cube inCube.off 10000 3 5.8
-
- => generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8
-
-!! Warning: hypegenerator on cube is not available !!
+* Generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8:
+`off_file_from_shape_generator in cube inCube.off 10000 3 5.8`
diff --git a/src/common/utilities/off_file_from_shape_generator.cpp b/src/common/utilities/off_file_from_shape_generator.cpp
index 0f310a13..afcd558c 100644
--- a/src/common/utilities/off_file_from_shape_generator.cpp
+++ b/src/common/utilities/off_file_from_shape_generator.cpp
@@ -77,7 +77,7 @@ int main(int argc, char **argv) {
usage(argv[0]);
}
- enum class Data_shape { sphere, cube, curve, torus, klein, undefined } ;
+ enum class Data_shape { sphere, cube, curve, torus, klein, undefined};
Data_shape shape = Data_shape::undefined;
if (memcmp(argv[2], "sphere", sizeof("sphere")) == 0) {
diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt
index 99badffb..afca9d60 100644
--- a/src/cython/CMakeLists.txt
+++ b/src/cython/CMakeLists.txt
@@ -15,8 +15,18 @@ function( add_gudhi_cython_lib THE_LIB )
endif(EXISTS ${THE_LIB})
endfunction( add_gudhi_cython_lib )
+# THE_TEST is the python test file name (without .py extension) containing tests functions
+function( add_gudhi_py_test THE_TEST )
+ # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
+ # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
+ add_test(NAME ${THE_TEST}_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+endfunction( add_gudhi_py_test )
+
+
if(CYTHON_FOUND)
- message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_EXECUTABLE} - py.test is ${PYTEST_PATH} - Sphinx is ${SPHINX_PATH}")
+ message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_EXECUTABLE} - Sphinx is ${SPHINX_PATH}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
@@ -73,76 +83,20 @@ if(CYTHON_FOUND)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
endif (EIGEN3_FOUND)
- # Copy recursively include, cython, example, doc and test repositories before packages finding
- # Some tests and doc files are removed in case some packages are not found
- file(COPY include DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY cython DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY example DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY test DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY doc DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- # Developper version for doc images
- file(GLOB GUDHI_DEV_DOC_IMAGES "${CMAKE_SOURCE_DIR}/src/*/doc/*.png")
- file(COPY ${GUDHI_DEV_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- file(GLOB GUDHI_DEV_DOC_IMAGES "${CMAKE_SOURCE_DIR}/src/*/doc/*.svg")
- file(COPY ${GUDHI_DEV_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- # User version for doc images
- file(GLOB GUDHI_USER_DOC_IMAGES "${CMAKE_SOURCE_DIR}/doc/*/*.png")
- file(COPY ${GUDHI_USER_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- file(GLOB GUDHI_USER_DOC_IMAGES "${CMAKE_SOURCE_DIR}/doc/*/*.svg")
- file(COPY ${GUDHI_USER_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- # Biblio
- file(GLOB GUDHI_BIB_FILES "${CMAKE_SOURCE_DIR}/biblio/*.bib")
- file(COPY ${GUDHI_BIB_FILES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- # Cubical complex perseus doc example
- file(GLOB GUDHI_CUBICAL_PERSEUS_FILES "${CMAKE_SOURCE_DIR}/data/bitmap/*cubicalcomplexdoc.txt")
- file(COPY ${GUDHI_CUBICAL_PERSEUS_FILES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- # Persistence graphical tools examples
- file(COPY "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
-
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- # If CGAL_VERSION >= 4.8.1, include subsampling
- set(GUDHI_CYTHON_SUBSAMPLING "include 'cython/subsampling.pyx'")
- set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include 'cython/tangential_complex.pyx'")
- set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include 'cython/bottleneck_distance.pyx'")
- else (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- # Remove subsampling unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_subsampling.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_user.rst")
- # Remove tangential complex and bottleneck unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_tangential_complex.py)
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_bottleneck_distance.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_user.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_user.rst")
+ set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'")
endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- # If CGAL_VERSION >= 4.7.0, include alpha
- set(GUDHI_CYTHON_ALPHA_COMPLEX "include 'cython/alpha_complex.pyx'")
- else (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- # Remove alpha complex unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_alpha_complex.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_user.rst")
- endif (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
- # If CGAL_VERSION >= 4.6.0, include euclidean versions of witness complex
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'")
+ set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'")
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'")
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX
- "include 'cython/euclidean_witness_complex.pyx'\ninclude 'cython/euclidean_strong_witness_complex.pyx'\n")
- else (NOT CGAL_VERSION VERSION_LESS 4.6.0)
- # Remove alpha complex unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_euclidean_witness_complex.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/euclidean_witness_complex_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/euclidean_strong_witness_complex_ref.rst")
- endif (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n")
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
if(CGAL_FOUND)
# Add CGAL compilation args
@@ -193,10 +147,6 @@ if(CYTHON_FOUND)
set( GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}")
endif(UNIX)
- # set sphinx-build in make files
- configure_file(doc/Makefile.in "${CMAKE_CURRENT_BINARY_DIR}/doc/Makefile" @ONLY)
- configure_file(doc/make.bat.in "${CMAKE_CURRENT_BINARY_DIR}/doc/make.bat" @ONLY)
-
# Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention
configure_file(setup.py.in "${CMAKE_CURRENT_BINARY_DIR}/setup.py" @ONLY)
# Generate gudhi.pyx - Gudhi cython file
@@ -218,112 +168,147 @@ if(CYTHON_FOUND)
PATTERN "*.pyd")
# Test examples
- if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ # Bottleneck and Alpha
add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
-f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
- set_tests_properties(alpha_rips_persistence_bottleneck_distance_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
-
- add_test(NAME bottleneck_basic_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/bottleneck_basic_example.py")
- set_tests_properties(bottleneck_basic_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ # Tangential
add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
- set_tests_properties(tangential_complex_plain_homology_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ add_gudhi_py_test(test_tangential_complex)
+
+ # Witness complex AND Subsampling
add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- set_tests_properties(euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- set_tests_properties(euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+
+ # Subsampling
+ add_gudhi_py_test(test_subsampling)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ # Bottleneck
+ add_test(NAME bottleneck_basic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
+
+ add_gudhi_py_test(test_bottleneck_distance)
endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ # Alpha
add_test(NAME alpha_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/alpha_complex_from_points_example.py")
- set_tests_properties(alpha_complex_from_points_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
- set_tests_properties(alpha_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
- endif (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
- endif (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ add_gudhi_py_test(test_alpha_complex)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ # Euclidean witness
+ add_gudhi_py_test(test_euclidean_witness_complex)
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+
+ # Cubical
add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
--no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt)
- set_tests_properties(periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME random_cubical_complex_persistence_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/random_cubical_complex_persistence_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
10 10 10)
- set_tests_properties(random_cubical_complex_persistence_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ add_gudhi_py_test(test_cubical_complex)
+
+ # Rips
add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
- set_tests_properties(rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
- set_tests_properties(rips_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME rips_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/rips_complex_from_points_example.py)
- set_tests_properties(rips_complex_from_points_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_from_points_example.py)
+
+ add_gudhi_py_test(test_rips_complex)
+ # Simplex tree
add_test(NAME simplex_tree_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/simplex_tree_example.py)
- set_tests_properties(simplex_tree_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/simplex_tree_example.py)
+ add_gudhi_py_test(test_simplex_tree)
+
+ # Witness
add_test(NAME witness_complex_from_nearest_landmark_table_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/witness_complex_from_nearest_landmark_table.py)
- set_tests_properties(witness_complex_from_nearest_landmark_table_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
-
- # Unitary tests are available through py.test
- if(PYTEST_PATH)
- add_test(
- NAME gudhi_cython_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${PYTEST_PATH}")
- set_tests_properties(gudhi_cython_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
- endif(PYTEST_PATH)
-
- # Documentation generation is available through sphinx
- if(SPHINX_PATH)
- if (UNIX)
- add_custom_target(sphinx
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc
- DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
- COMMAND make html doctest)
- else (UNIX)
- add_custom_target(sphinx
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc
- COMMAND make.bat html doctest)
- endif (UNIX)
- endif(SPHINX_PATH)
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/witness_complex_from_nearest_landmark_table.py)
+
+ add_gudhi_py_test(test_witness_complex)
+
+ # Reader utils
+ add_gudhi_py_test(test_reader_utils)
+
+ # Documentation generation is available through sphinx - requires all modules
+ if(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
+ # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
+ # Images and biblio warnings because not found on developper version
+ if (GUDHI_CYTHON_PATH STREQUAL "src/cython")
+ set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
+ endif()
+ # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
+ DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
+
+ add_test(NAME sphinx_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
+
+ endif(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
endif(CYTHON_FOUND)
diff --git a/src/cython/cython/periodic_cubical_complex.pyx b/src/cython/cython/periodic_cubical_complex.pyx
index 581c7b69..3025f125 100644
--- a/src/cython/cython/periodic_cubical_complex.pyx
+++ b/src/cython/cython/periodic_cubical_complex.pyx
@@ -33,7 +33,7 @@ __license__ = "GPL v3"
cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>>":
- Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells)
+ Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells, vector[bool] periodic_dimensions)
Periodic_cubical_complex_base_interface(string perseus_file)
int num_simplices()
int dimension()
@@ -58,7 +58,7 @@ cdef class PeriodicCubicalComplex:
# Fake constructor that does nothing but documenting the constructor
def __init__(self, dimensions=None, top_dimensional_cells=None,
- perseus_file=''):
+ periodic_dimensions=None, perseus_file=''):
"""PeriodicCubicalComplex constructor from dimensions and
top_dimensional_cells or from a Perseus-style file name.
@@ -66,6 +66,8 @@ cdef class PeriodicCubicalComplex:
:type dimensions: list of int
:param top_dimensional_cells: A list of cells filtration values.
:type top_dimensional_cells: list of double
+ :param periodic_dimensions: A list of top dimensional cells periodicity value.
+ :type periodic_dimensions: list of boolean
Or
@@ -75,10 +77,10 @@ cdef class PeriodicCubicalComplex:
# The real cython constructor
def __cinit__(self, dimensions=None, top_dimensional_cells=None,
- perseus_file=''):
- if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''):
- self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells)
- elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''):
+ periodic_dimensions=None, perseus_file=''):
+ if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file is ''):
+ self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions)
+ elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file is not ''):
if os.path.isfile(perseus_file):
self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file))
else:
diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py
index a984633e..fb837e29 100755
--- a/src/cython/cython/persistence_graphical_tools.py
+++ b/src/cython/cython/persistence_graphical_tools.py
@@ -1,11 +1,12 @@
import matplotlib.pyplot as plt
import numpy as np
+import os
"""This file is part of the Gudhi Library. The Gudhi library
(Geometric Understanding in Higher Dimensions) is a generic C++
library for computational topology.
- Author(s): Vincent Rouvreau
+ Author(s): Vincent Rouvreau, Bertrand Michel
Copyright (C) 2016 INRIA
@@ -23,15 +24,17 @@ import numpy as np
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
-__author__ = "Vincent Rouvreau"
+__author__ = "Vincent Rouvreau, Bertrand Michel"
__copyright__ = "Copyright (C) 2016 INRIA"
__license__ = "GPL v3"
-def __min_birth_max_death(persistence):
+def __min_birth_max_death(persistence, band_boot=0.):
"""This function returns (min_birth, max_death) from the persistence.
:param persistence: The persistence to plot.
:type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param band_boot: bootstrap band
+ :type band_boot: float.
:returns: (float, float) -- (min_birth, max_death).
"""
# Look for minimum birth date and maximum death date for plot optimisation
@@ -45,6 +48,8 @@ def __min_birth_max_death(persistence):
max_death = float(interval[1][0])
if float(interval[1][0]) < min_birth:
min_birth = float(interval[1][0])
+ if band_boot > 0.:
+ max_death += band_boot
return (min_birth, max_death)
"""
@@ -59,7 +64,7 @@ def show_palette_values(alpha=0.6):
:param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
:type alpha: float.
- :returns: plot -- An horizontal bar plot of dimensions color.
+ :returns: plot the dimension palette values.
"""
colors = []
for color in palette:
@@ -70,18 +75,38 @@ def show_palette_values(alpha=0.6):
plt.barh(y_pos, y_pos + 1, align='center', alpha=alpha, color=colors)
plt.ylabel('Dimension')
plt.title('Dimension palette values')
+ return plt
- plt.show()
-
-def plot_persistence_barcode(persistence, alpha=0.6):
+def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, max_barcodes=0):
"""This function plots the persistence bar code.
:param persistence: The persistence to plot.
:type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A persistence file style name (reset persistence if both are set).
+ :type persistence_file: string
:param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
:type alpha: float.
+ :param max_barcodes: number of maximal barcodes to be displayed
+ (persistence will be sorted by life time if max_barcodes is set)
+ :type max_barcodes: int.
:returns: plot -- An horizontal bar plot of persistence.
"""
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_barcodes > 0 and max_barcodes < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_barcodes]
+
(min_birth, max_death) = __min_birth_max_death(persistence)
ind = 0
delta = ((max_death - min_birth) / 10.0)
@@ -106,18 +131,40 @@ def plot_persistence_barcode(persistence, alpha=0.6):
plt.title('Persistence barcode')
# Ends plot on infinity value and starts a little bit before min_birth
plt.axis([axis_start, infinity, 0, ind])
- plt.show()
+ return plt
-def plot_persistence_diagram(persistence, alpha=0.6):
- """This function plots the persistence diagram.
+def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, band_boot=0., max_plots=0):
+ """This function plots the persistence diagram with an optional confidence band.
:param persistence: The persistence to plot.
:type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A persistence file style name (reset persistence if both are set).
+ :type persistence_file: string
:param alpha: alpha value in [0.0, 1.0] for points and horizontal infinity line (default is 0.6).
:type alpha: float.
- :returns: plot -- An diagram plot of persistence.
+ :param band_boot: bootstrap band (not displayed if :math:`\leq` 0.)
+ :type band_boot: float.
+ :param max_plots: number of maximal plots to be displayed
+ :type max_plots: int.
+ :returns: plot -- A diagram plot of persistence.
"""
- (min_birth, max_death) = __min_birth_max_death(persistence)
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_plots > 0 and max_plots < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_plots]
+
+ (min_birth, max_death) = __min_birth_max_death(persistence, band_boot)
ind = 0
delta = ((max_death - min_birth) / 10.0)
# Replace infinity values with max_death + delta for diagram to be more
@@ -131,6 +178,9 @@ def plot_persistence_diagram(persistence, alpha=0.6):
plt.plot(x, x, color='k', linewidth=1.0)
plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
+ # bootstrap band
+ if band_boot > 0.:
+ plt.fill_between(x, x, x+band_boot, alpha=alpha, facecolor='red')
# Draw points in loop
for interval in reversed(persistence):
@@ -149,4 +199,4 @@ def plot_persistence_diagram(persistence, alpha=0.6):
plt.ylabel('Death')
# Ends plot on infinity value and starts a little bit before min_birth
plt.axis([axis_start, infinity, axis_start, infinity + delta])
- plt.show()
+ return plt
diff --git a/src/cython/cython/persistence_representations_intervals.pyx b/src/cython/cython/persistence_representations_intervals.pyx
deleted file mode 100644
index c1cc347d..00000000
--- a/src/cython/cython/persistence_representations_intervals.pyx
+++ /dev/null
@@ -1,318 +0,0 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libcpp cimport bool
-import os
-
-"""
-This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-
-"""
-This is a promisse that there will be a class in this file with the following
-function signature. Something like C++ predeclaration.
-According to Vincent, most of the tutorials in cython suggest to
-separate pre-declaration below with the definition of the method.
-Hovewer it seems to create problems, that is why we keep them both here.
-"""
-
-
-
-
-cdef extern from "Persistence_intervals_interface.h" namespace "Gudhi::Persistence_representations":
- cdef cppclass Persistence_intervals_interface "Gudhi::Persistence_representations::Persistence_intervals_interface":
- Persistence_intervals_interface(const char*, unsigned)
- Persistence_intervals_interface(const vector[pair[double, double]] intervals)
- pair[double, double] get_x_range() const
- pair[double, double] get_y_range() const
- vector[double] length_of_dominant_intervals(size_t where_to_cut)const
- vector[pair[double, double]] dominant_intervals(size_t where_to_cut) const
- vector[size_t] histogram_of_lengths(size_t number_of_bins) const
- vector[size_t] cumulative_histogram_of_lengths(size_t number_of_bins) const
- vector[double] characteristic_function_of_diagram(double x_min, double x_max, size_t number_of_bins)const
- vector[double] cumulative_characteristic_function_of_diagram(double x_min, double x_max, size_t number_of_bins)const
- vector[pair[double, size_t]] compute_persistent_betti_numbers()const
- double project_to_R(int number_of_function) const
- size_t number_of_projections_to_R() const
- vector[double] vectorize(int number_of_function) const
- size_t number_of_vectorize_functions() const
-
-"""
-make sure that here we call the functions from the intermediate .h file,
-with dummy names, so that later below we can use the same names of the
-functions as in C++ version.
-Over here I need to list all the functions that will be used in the file.
-So there should be a list of constructors, methors, etc.
-to separate the function, use newline. Put there only C++ signature
-"""
-
-
-#convention for python class is PersistenceIntervals instead of
-#Persistence_intervals for methods it is def num_simplices(self).
-cdef class PersistenceIntervals:
- """
- Persistence intrvals is a standard representation of persistent homology. This file provide implementation of a number of operations on persistence diagrams.
- """
- cdef Persistence_intervals_interface * thisptr
-
- #do we need a fake constructor here, as in case of bitmaps??
- #We do need it so that we have a doc for python because the
- #documentation only read from __init__, it do not read from
- #__cinit__, where __ means private memeber
- def __init__(self, vector_of_intervals=None, dimension=None,
- file_with_intervals=''):
- """Persistence interals is a standard representation of
- persistent homology. This file provide implementation of a
- number of operations on persistence diagrams.
-
- :param dimensions: A vector of birth-death pairs.
-
- Or
-
- :param Gudhi style file togethr with a dimension of birth-death
- pairs to consider.
- """
-
- #The real cython constructor
- def __cinit__(self, vector_of_intervals=None, dimension=None,
- file_with_intervals=''):
- """
- This is a constructor of a class Persistence_intervals.
- It either take text file and a positive integer, or a vector
- of pairs. In case of file, each line of the input file is
- supposed to contain two numbers of a type double
- (or convertible to double) representing the birth and the death
- of the persistence interval. If the pairs are not sorted so that
- birth <= death, then the constructor will sort then that way.
- In case of vector of pairs, it simply accept vector of pair of
- doubles.
- :param vector_of_intervals -- vector of pairs of doubles with
- birth-death pairs. None if we construct it from file.
- :type vector of pairs of doubles or None
- :param dimension -- diension of intervals to be extracted from file
- :type nonnegative integer or None
- :param file_with_intervals - a path to Gudhi style file with
- persistence interfals.
- :type string of None.
- """
- if (vector_of_intervals is None) and (file_with_intervals is not ''):
- if (dimension is not None):
- if os.path.isfile(file_with_intervals):
- self.thisptr = new Persistence_intervals_interface(file_with_intervals, dimension)
- else:
- print("file " + file_with_intervals + " not found.")
- else:
- self.thisptr = new Persistence_intervals_interface(file_with_intervals)
- elif (file_with_intervals is '') and (vector_of_intervals is not None):
- self.thisptr = new Persistence_intervals_interface(vector_of_intervals)
- else:
- print("Persistence interals can be constructed from vector of birth-death pairs, vector_of_intervals or a Gudhi-style file.")
-
- def __dealloc__(self):
- """
- destructor
- """
- if self.thisptr != NULL:
- del self.thisptr
-
- #from here on this is my try. Do we need to specify the returned type??
- #no, we do not.
- def get_x_range(self):
- """
- This procedure returns x-range of a given persistence diagram.
- """
- if self.thisptr != NULL:
- return self.thisptr.get_x_range()
-
- def get_y_range(self):
- """
- This procedure returns y-range of a given persistence diagram.
- """
- if self.thisptr != NULL:
- return self.thisptr.get_y_range()
-
- def length_of_dominant_intervals(self, where_to_cut):
- """
- Procedure that compute the vector of lengths of the dominant
- (i.e. the longest) persistence intervals. The list is
- truncated at the parameter of the call where_to_cut
- (set by default to 100).
- :param where_to_cut -- number of domiannt intervals to be returned.
- :type positive integer.
- """
- if (self.thisptr != NULL) and (where_to_cut is not None):
- return self.thisptr.length_of_dominant_intervals(where_to_cut)
- else:
- if (self.thisptr != NULL):
- return self.thisptr.dominant_intervals(100)
-
- def dominant_intervals(self, where_to_cut):
- """
- Procedure that compute the vector of the dominant (i.e. the longest)
- persistence intervals. The parameter of the procedure (set by default
- to 100) is the number of dominant intervals returned by the procedure.
- :param where_to_cut -- number of lengths of domiannt intervals to
- be returned.
- :type positive integer.
- """
- if (self.thisptr != NULL) and (where_to_cut is not None):
- return self.thisptr.dominant_intervals(where_to_cut)
- else:
- if (self.thisptr != NULL):
- return self.thisptr.dominant_intervals(100)
-
- def histogram_of_lengths(self, number_of_bins):
- """
- Procedure to compute a histogram of interval's length.
- A histogram is a block plot. The number of blocks is
- determined by the first parameter of the function
- (set by default to 10).
- For the sake of argument let us assume that the length of the
- longest interval is 1 and the number of bins is
- 10. In this case the i-th block correspond to a range between
- i-1/10 and i10. The vale of a block supported at the interval is
- the number of persistence intervals of a length between x_0
- and x_1.
- :param where_to_cut -- number of bins in the histogram.
- :type positive integer.
- """
- if (self.thisptr != NULL) and (number_of_bins is not None):
- return self.thisptr.histogram_of_lengths(number_of_bins)
- else:
- if (self.thisptr != NULL):
- return self.thisptr.dominant_intervals(100)
-
- def cumulative_histogram_of_lengths(self, number_of_bins):
- """
- Based on a histogram of intervals lengths computed by the
- function histogram_of_lengths H the procedure below
- computes the cumulative histogram. The i-th position
- of the resulting histogram
- is the sum of values of H for the positions from 0 to i.
- :param where_to_cut -- number of bins in the histogram.
- :type positive integer.
- """
- if (self.thisptr != NULL) and (number_of_bins is not None):
- return self.thisptr.cumulative_histogram_of_lengths(number_of_bins)
- else:
- if (self.thisptr != NULL):
- return self.thisptr.cumulative_histogram_of_lengths(10)
-
- def characteristic_function_of_diagram(self, x_min, x_max, number_of_bins):
- """
- In this procedure we assume that each barcode is a characteristic
- function of a hight equal to its length. The persistence diagram
- is a sum of such a functions. The procedure below construct a
- function being a sum of the characteristic functions of
- persistence intervals. The first two parameters are the range in
- which the function is to be computed and the last parameter is
- the number of bins in the discretization of the interval
- [_min,_max]
- :param x_min -- Begin of range of function.
- :type real number
- :param x_max -- End of range of function.
- :type real number
- :param number_of_bins -- Number of bins in characteristic function.
- :type positive integer
- """
- if (self.thisptr != NULL) and (x_min is not None) and (x_max is not None) and (number_of_bins is not None):
- return self.thisptr.characteristic_function_of_diagram(x_min, x_max , number_of_bins)
- else:
- if (self.thisptr != NULL) and (x_min is not None) and (x_max is not None):
- return self.thisptr.characteristic_function_of_diagram(x_min, x_max, 10)
-
- def cumulative_characteristic_function_of_diagram(self, x_min, x_max, number_of_bins):
- """
- Cumulative version of the function characteristic_function_of_diagram.
- :param x_min -- Begin of range of function.
- :type real number
- :param x_max -- End of range of function.
- :type real number
- :param number_of_bins -- Number of bins in characteristic function.
- :type positive integer
- """
- if (self.thisptr != NULL) and (x_min is not None) and (x_max is not None) and (number_of_bins is not None):
- return self.thisptr.cumulative_characteristic_function_of_diagram(x_min, x_max, number_of_bins)
- else:
- if (self.thisptr != NULL) and (x_min is not None) and (x_max is not None):
- return self.thisptr.cumulative_characteristic_function_of_diagram(x_min, x_max, 10)
-
- def compute_persistent_betti_numbers(self):
- """
- Compute the function of persistence Betti numbers. The returned
- value is a vector of pair. First element of each
- pair is a place where persistence Betti numbers change.
- Second element of each pair is the value of Persistence Betti
- numbers at that point.
- """
- if self.thisptr != NULL:
- return self.thisptr.compute_persistent_betti_numbers()
-
- def project_to_R(self, number_of_function):
- """
- This is a simple function projecting the persistence intervals
- to a real number. The function we use here is a sum
- of squared lengths of intervals. It can be naturally interpreted as
- sum of step function, where the step hight it equal to the length
- of the interval. At the moment this function is not tested, since
- it is quite likely to be changed in the future. Given this, when
- using it, keep in mind that it
- will be most likely changed in the next versions.
- :param number of projection
- :type positive integer.
- """
- if (self.thisptr != NULL) and (number_of_function is not None):
- return self.thisptr.project_to_R(number_of_function)
-
- def number_of_projections_to_R(self):
- """
- The function gives the number of possible projections to R.
- This function is required by the
- Real_valued_topological_data concept.
- """
- if self.thisptr != NULL:
- return self.thisptr.number_of_projections_to_R()
-
- def vectorize(self, number_of_function):
- """
- Return a family of vectors obtained from the persistence diagram.
- The i-th vector consist of the length of i
- dominant persistence intervals.
- :param number of function to vectorizes
- :type positive integer.
- """
- if (self.thisptr != NULL) and (number_of_function is not None):
- return self.thisptr.vectorize(number_of_function)
-
- def number_of_vectorize_functions(self):
- """
- This function return the number of functions that allows
- vectorization of a persistence diagram. It is required
- in a concept Vectorized_topological_data
- """
- if (self.thisptr != NULL):
- return self.thisptr.number_of_vectorize_functions()
diff --git a/src/cython/cython/persistence_representations_landscapes.pyx b/src/cython/cython/persistence_representations_landscapes.pyx
deleted file mode 100644
index 39b62439..00000000
--- a/src/cython/cython/persistence_representations_landscapes.pyx
+++ /dev/null
@@ -1,378 +0,0 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libcpp cimport bool
-from cython.operator cimport dereference as deref
-import os
-import sys
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-
-
-cdef extern from "Persistence_landscape_interface.h" namespace "Gudhi::Persistence_representations":
- cdef cppclass Persistence_landscape_interface "Gudhi::Persistence_representations::Persistence_landscape_interface":
- Persistence_landscape_interface()
- #Persistence_landscape_interface(vector[pair[double, double]], bool, size_t)
- #Persistence_landscape_interface(const char*, size_t , size_t)
- void load_landscape_from_file(const char*)
- void print_to_file(const char*)const
- double compute_integral_of_landscape()const
- double compute_integral_of_a_level_of_a_landscape(size_t)const
- double compute_integral_of_landscape(double)const
- double compute_value_at_a_given_point(unsigned , double)const
- double compute_maximum()const
- double compute_minimum()const
- double compute_norm_of_landscape(double)
- Persistence_landscape_interface* new_abs_interface()
- size_t size()const
- double find_max(unsigned)const
- double project_to_R(int)const
- size_t number_of_projections_to_R()const
- vector[double] vectorize(int)const
- size_t number_of_vectorize_functions()const
- void compute_average(const vector[Persistence_landscape_interface*]&)
- void new_compute_average(const vector[Persistence_landscape_interface*]&)
- double distance(const Persistence_landscape_interface&, double)
- double compute_scalar_product(const Persistence_landscape_interface&)const
- pair[double, double] get_y_range(size_t)const
- #**************
- #static methods
- @staticmethod
- Persistence_landscape_interface* construct_from_file( const char*, size_t, size_t)
- @staticmethod
- Persistence_landscape_interface* construct_from_vector_of_pairs( const vector[pair[double, double]], size_t)
- #***************
-
-
-
-
-#convention for python class is PersistenceIntervals instead of Persistence_intervals
-#for methods it is def num_simplices(self).
-cdef class PersistenceLandscapes:
-
- cdef Persistence_landscape_interface* thisptr
-
-
-
-#Can we have only one constructor, or can we have more
- def __init__(self, vector_of_intervals=None, dimension=None, file_with_intervals='',number_of_levels=sys.maxsize):
- """
- This is a class implementing persistence landscapes data structures.
- For theoretical description, please consult <i>Statistical topological
- data analysis using persistence landscapes</i>\cite bubenik_landscapes_2015,
- and for details of algorithms, A persistence landscapes toolbox for
- topological statistics</i>\cite bubenik_dlotko_landscapes_2016.
-
- Persistence landscapes allow vectorization, computations of distances,
- computations of projections to Real, computations of averages and
- scalar products. Therefore they implement suitable interfaces. It
- implements the following concepts: Vectorized_topological_data,
- Topological_data_with_distances, Real_valued_topological_data,
- Topological_data_with_averages, Topological_data_with_scalar_product
-
- Note that at the moment, due to rounding errors during the construction
- of persistence landscapes, elements which are different by 0.000005 are
- considered the same. If the scale in your persistence diagrams is
- comparable to this value, please rescale them before use this code.
- """
-
-
-
- def __cinit__(self, vector_of_intervals=None, dimension=None, file_with_intervals='',number_of_levels=sys.maxsize):
- """
- This is a constructor of a class PersistenceLandscapes.
- It either take text file and a positive integer, or a vector
- of pairs. The last optional parameter is the nunmer of levels of
- the landscapes to be generated. If not set, all the levels will
- be generated. In case of file, each line of the input file is,
- supposed to contain two numbers of a type double
- (or convertible to double) representing the birth and the death
- of the persistence interval. If the pairs are not sorted so that
- birth <= death, then the constructor will sort then that way.
- In case of vector of pairs, it simply accept vector of pair of
- doubles.
- :param vector_of_intervals -- vector of pairs of doubles with
- birth-death pairs. None if we construct it from file.
- :type vector of pairs of doubles or None
- :param dimension -- diension of intervals to be extracted from file
- :type nonnegative integer or None
- :param file_with_intervals - a path to Gudhi style file with
- persistence interfals.
- :type string of None.
- :param number_of_levels - number of levels of landscape to be
- generated (if not set, all of the are generated).
- :type positive integer
- """
- if (vector_of_intervals is None) and (file_with_intervals is not ''):
- if (dimension is not None):
- if os.path.isfile(file_with_intervals):
- #self.thisptr = new Persistence_landscape_interface(file_with_intervals, dimension, number_of_levels)
- self.thisptr = Persistence_landscape_interface.construct_from_file(file_with_intervals, dimension, number_of_levels)
- else:
- print("file " + file_with_intervals + " not found.")
- else:
- #self.thisptr = new Persistence_landscape_interface(file_with_intervals, number_of_levels)
- self.thisptr = Persistence_landscape_interface.construct_from_file(file_with_intervals,0, number_of_levels)
- elif (file_with_intervals is '') and (vector_of_intervals is not None):
- #self.thisptr = new Persistence_landscape_interface(vector_of_intervals, true, number_of_levels)
- self.thisptr = Persistence_landscape_interface.construct_from_vector_of_pairs(vector_of_intervals, number_of_levels)
- else:
- print("Persistence interals can be constructed from vector of birth-death pairs, vector_of_intervals or a Gudhi-style file.")
- self.thisptr = new Persistence_landscape_interface()
-
- def __dealloc__(self):
- """
- destructor
- """
- if self.thisptr != NULL:
- del self.thisptr
-
- def load_landscape_from_file(self,filename):
- """
- This procedure loads a landscape from file. It erase all the data
- that was previously stored in this landscape.
- :param Name of the file.
- :type String
- """
- if ( self.thisptr != NULL ) and ( filename is not None ):
- self.thisptr.load_landscape_from_file(filename)
-
- def print_to_file(self,filename) :
- """
- The procedure stores a landscape to a file. The file can be later
- used by a procedure load_landscape_from_file.
- :param Name of the file.
- :type String
- """
- if ( self.thisptr != NULL ) and ( filename is not None ):
- self.thisptr.print_to_file(filename)
-
- def compute_integral_of_landscape(self):
- """
- This function compute integral of the landscape (defined formally as
- sum of integrals on R of all landscape functions)
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.compute_integral_of_landscape()
-
- def compute_integral_of_a_level_of_a_landscape(self,level):
- """
- This function compute integral of the 'level'-level of a landscape.
- :param Level of the landscape, n, so that the integral of lambda_n is
- computed
- :type nonnegative integer.
- """
- if ( self.thisptr != NULL ) and ( level is not None ):
- return self.thisptr.compute_integral_of_landscape(level)
-
- def compute_integral_of_landscape(self,p):
- """
- This function compute integral of the landscape p-th power of a
- landscape (defined formally as sum of integrals on R of p-th powers
- of all landscape functions)
- :param An positive real p such that the integral of p-th power of
- landscape is computed.
- :type Real value
- """
- if ( self.thisptr != NULL ) and ( p is not None ):
- return self.thisptr.compute_integral_of_landscape(p)
-
- def compute_value_at_a_given_point(self, level, x):
- """
- A function that computes the value of a landscape at a given point.
- The parameters of the function are: unsigned
- level and double x.
- The procedure will compute the value of the level-landscape at the
- point x.
- param: level n of lanscape (positive integer) and real number x.
- The value \lambda_n(x) is
- type: nonnegative integer
- :param A real number x. The value \lambda_n(x) is computed.
- :type real
- """
- if ( self.thisptr != NULL ) and ( level is not None ) and ( x is not None ):
- return self.thisptr.compute_value_at_a_given_point(level,x)
-
- def compute_maximum( self ):
- """
- Computations of maximum (y) value of landscape.
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.compute_maximum()
-
- def compute_minimum( self ):
- """
- Computations of minimum (y) value of landscape.
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.compute_minimum()
-
- def compute_norm_of_landscape(self,i):
- """
- Computations of a \f$L^i\f$ norm of landscape, where i is the input parameter.
- :type integer.
- :param i
- """
- if ( self.thisptr != NULL ) and ( i is not None ):
- return self.thisptr.compute_norm_of_landscape(i)
-
- def abs( self ):
- """
- Function to compute absolute value of a PL function. The representation
- of persistence landscapes allow to store
- general PL-function. When computing distance between two landscapes,
- we compute difference between
- them. In this case, a general PL-function with negative value can
- appear as a result. Then in order to compute
- distance, we need to take its absolute value. This is the purpose of
- this procedure.
- """
- if ( self.thisptr != NULL ):
- abs_ = PersistenceLandscapes()
- abs_.thisptr = self.thisptr.new_abs_interface()
- return abs_
-
-
- def size( self ):
- """
- Computes the number of landscape functions.
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.size()
-
- def find_max(self, lambda_):
- """
- Compute maximal value of lambda-level landscape.
- :param level of landscape
- :type nonnegative integer
- """
- if ( self.thisptr != NULL ) and ( lambda_ is not None ):
- return self.thisptr.find_max(lambda_)
-
- def project_to_R(self, number_of_function):
- """
- The number of projections to R is defined to the number of nonzero
- landscape functions. I-th projection is an
- integral of i-th landscape function over whole R.
- This function is required by the Real_valued_topological_data concept.
- At the moment this function is not tested, since it is quite likely
- to be changed in the future. Given this, when
- using it, keep in mind that it
- will be most likely changed in the next versions.
- :param number of function
- :type nonnegative integer
- """
- if ( self.thisptr != NULL ) and ( number_of_function is not None ):
- return self.thisptr.project_to_R(number_of_function)
-
- def number_of_projections_to_R(self):
- """
- The function gives the number of possible projections to R. This
- function is required by the
- Real_valued_topological_data concept
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.number_of_projections_to_R()
-
- def vectorize(self, number_of_function):
- """
- This function produce a vector of doubles based on a landscape. It
- is required in a concept
- Vectorized_topological_data
- :param number of function
- :type nonnegative intege
- """
- if ( self.thisptr != NULL ) and ( number_of_function is not None ):
- return self.thisptr.vectorize(number_of_function)
-
- def number_of_vectorize_functions(self):
- """
- The number of projections to R is defined to the number of nonzero
- landscape functions. I-th projection is an
- integral of i-th landscape function over whole R.
- This function is required by the Real_valued_topological_data concept.
- At the moment this function is not tested, since it is quite likely
- to be changed in the future. Given this, when
- using it, keep in mind that it
- will be most likely changed in the next versions
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.number_of_vectorize_functions()
-
- def compute_average( self,to_average=[] ):
- """
- A function to compute averaged persistence landscape, based on vector
- of persistence landscapes.
- This function is required by Topological_data_with_averages concept.
- :param vector of persistence landscapes to average
- :type vectors of references to persistence landscapes
- """
- #TODO -- add a check if all objects in the to_average are of the same type.
- cdef vector[Persistence_landscape_interface*] cpp_list
- if ( self.thisptr != NULL ) and ( to_average is not None ):
- for elt in to_average:
- cpp_list.push_back((<PersistenceLandscapes>elt).thisptr)
- self.thisptr.new_compute_average( cpp_list )
-
-
-
- def distance(self, PersistenceLandscapes second, power):
- """
- A function to compute distance between persistence landscape.
- The parameter of this function is a Persistence_landscape.
- This function is required in Topological_data_with_distances concept.
- For max norm distance, set power to std::numeric_limits<double>::max()
- :param the landascape to compute distance to
- :type PersistenceLandscape
- """
- if ( self.thisptr != NULL ) and ( second is not None ) and ( power is not None ):
- return self.thisptr.distance( deref(second.thisptr), power)
-
- def compute_scalar_product(self, PersistenceLandscapes second):
- """
- A function to compute scalar product of persistence landscapes.
- The parameter of this function is a Persistence_landscape.
- This function is required in Topological_data_with_scalar_product concept.
- :param the landascape to compute scalar product with
- :type PersistenceLandscape
- """
- if ( self.thisptr != NULL ) and ( second is not None ):
- return self.thisptr.compute_scalar_product( deref(second.thisptr) )
-
- def get_y_range(self, level):
- """
- This procedure returns y-range of a given level persistence landscape.
- If a default value is used, the y-range
- of 0th level landscape is given (and this range contains the ranges
- of all other landscapes).
- :param The level of lrandscape
- :type nonnegative integer
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.get_y_range(level)
-
diff --git a/src/cython/cython/persistence_representations_landscapes_on_grid.pyx b/src/cython/cython/persistence_representations_landscapes_on_grid.pyx
deleted file mode 100644
index f69496cd..00000000
--- a/src/cython/cython/persistence_representations_landscapes_on_grid.pyx
+++ /dev/null
@@ -1,388 +0,0 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libcpp cimport bool
-from cython.operator cimport dereference as deref
-import os
-import sys
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-
-
-cdef extern from "Persistence_landscape_on_grid_interface.h" namespace "Gudhi::Persistence_representations":
- cdef cppclass Persistence_landscape_on_grid_interface "Gudhi::Persistence_representations::Persistence_landscape_on_grid_interface":
- Persistence_landscape_on_grid_interface()
- Persistence_landscape_on_grid_interface(vector[pair[double, double]], double grid_min_, double grid_max_, size_t number_of_points_)
- Persistence_landscape_on_grid_interface(vector[pair[double, double]], double grid_min_, double grid_max_, size_t number_of_points_, unsigned number_of_levels_of_landscape)
- Persistence_landscape_on_grid_interface(const char* filename, double grid_min_, double grid_max_, size_t number_of_points_,
- unsigned number_of_levels_of_landscape, unsigned)
- Persistence_landscape_on_grid_interface(const char* filename, double grid_min_, double grid_max_, size_t number_of_points_,
- unsigned dimension_)
- Persistence_landscape_on_grid_interface(const char* filename, size_t number_of_points, unsigned number_of_levels_of_landscape, unsigned dimension)
- Persistence_landscape_on_grid_interface(const char* filename, size_t number_of_points, unsigned dimension)
- void load_landscape_from_file(const char*)
- void print_to_file(const char*)const
- double compute_integral_of_landscape()const
- double compute_integral_of_a_level_of_a_landscape(size_t)const
- double compute_integral_of_landscape(double)const
- double compute_value_at_a_given_point(unsigned , double)const
- double compute_maximum()const
- double compute_minimum()const
- double compute_norm_of_landscape(double)
- Persistence_landscape_on_grid_interface* new_abs_interface()
- size_t size()const
- double find_max(unsigned)const
- double project_to_R(int)const
- size_t number_of_projections_to_R()const
- vector[double] vectorize(int)const
- size_t number_of_vectorize_functions()const
- void compute_average(const vector[Persistence_landscape_on_grid_interface*]&)
- void new_compute_average(const vector[Persistence_landscape_on_grid_interface*]&)
- double distance(Persistence_landscape_on_grid_interface&, double)
- double compute_scalar_product(const Persistence_landscape_on_grid_interface&)const
- pair[double, double] get_y_range(size_t)const
-
-
-
-
-#convention for python class is PersistenceIntervals instead of Persistence_intervals
-#for methods it is def num_simplices(self).
-cdef class PersistenceLandscapesOnGrid:
-
- cdef Persistence_landscape_on_grid_interface* thisptr
-
-#Can we have only one constructor, or can we have more
- def __init__(self, grid_min_, grid_max_,number_of_points_, file_with_intervals='',
- vector_of_intervals=None, dimension=None,number_of_levels=sys.maxsize):
- """
- This is a class implementing persistence landscapes data structures.
- For theoretical description, please consult <i>Statistical topological
- data analysis using persistence landscapes</i>\cite bubenik_landscapes_2015,
- and for details of algorithms, A persistence landscapes toolbox for
- topological statistics</i>\cite bubenik_dlotko_landscapes_2016.
-
- Persistence landscapes allow vectorization, computations of distances,
- computations of projections to Real, computations of averages and
- scalar products. Therefore they implement suitable interfaces. It
- implements the following concepts: Vectorized_topological_data,
- Topological_data_with_distances, Real_valued_topological_data,
- Topological_data_with_averages, Topological_data_with_scalar_product
-
- Note that at the moment, due to rounding errors during the construction
- of persistence landscapes, elements which are different by 0.000005 are
- considered the same. If the scale in your persistence diagrams is
- comparable to this value, please rescale them before use this code.
- """
-
-
-
- def __cinit__(self, grid_min_, grid_max_,number_of_points_, file_with_intervals='',
- vector_of_intervals=None, dimension=None,number_of_levels=sys.maxsize):
- """
- This is a constructor of a class PersistenceLandscapes.
- It either take text file and a positive integer, or a vector
- of pairs. The last optional parameter is the nunmer of levels of
- the landscapes to be generated. If not set, all the levels will
- be generated. In case of file, each line of the input file is,
- supposed to contain two numbers of a type double
- (or convertible to double) representing the birth and the death
- of the persistence interval. If the pairs are not sorted so that
- birth <= death, then the constructor will sort then that way.
- In case of vector of pairs, it simply accept vector of pair of
- doubles.
- :param vector_of_intervals -- vector of pairs of doubles with
- birth-death pairs. None if we construct it from file.
- :type vector of pairs of doubles or None
- :param dimension -- diension of intervals to be extracted from file
- :type nonnegative integer or None
- :param file_with_intervals - a path to Gudhi style file with
- persistence interfals.
- :type string of None.
- :param grid_min_ - minumum of the grid to be cosntructed.
- :type double
- :param grid_max_ - maximum of the grid to be cosntructed.
- :type double
- :param number_of_points_ - number of points in the grid.
- :type positive integer
- :param number_of_levels - number of levels of landscape to be
- generated (if not set, all of the are generated).
- :type positive integer
- """
- if ( (grid_min_ is None) or ( grid_max_ is None ) or ( number_of_points_ is None ) ):
- print "Please provide parameters of the grid in order to construct the persistence landscape on a grid."
- else:
- if (vector_of_intervals is None) and (file_with_intervals is not ''):
- if (dimension is not None):
- if os.path.isfile(file_with_intervals):
- self.thisptr = new Persistence_landscape_on_grid_interface(file_with_intervals, dimension, grid_min_, grid_max_,number_of_points_, number_of_levels)
- else:
- print("file " + file_with_intervals + " not found.")
- else:
- self.thisptr = new Persistence_landscape_on_grid_interface(file_with_intervals,0, grid_min_, grid_max_,number_of_points_, number_of_levels)
- else:
- if (file_with_intervals is '') and (vector_of_intervals is not None):
- self.thisptr = new Persistence_landscape_on_grid_interface(vector_of_intervals, grid_min_, grid_max_, number_of_points_,number_of_levels)
- #Persistence_landscape_on_grid_interface(vector_of_intervals, grid_min_, grid_max_,number_of_points_, number_of_levels)
- else:
- print("Persistence interals can be constructed from vector of birth-death pairs, vector_of_intervals or a Gudhi-style file.")
-
-
-#Persistence_landscape_on_grid_interface(const vector[pair[double, double]], double grid_min_, double grid_max_, size_t number_of_points_, unsigned number_of_levels_of_landscape)
-
-
-
- def __dealloc__(self):
- """
- destructor
- """
- if self.thisptr != NULL:
- del self.thisptr
-
- def load_landscape_from_file(self,filename):
- """
- This procedure loads a landscape from file. It erase all the data
- that was previously stored in this landscape.
- :param Name of the file.
- :type String
- """
- if ( self.thisptr != NULL ) and ( filename is not None ):
- self.thisptr.load_landscape_from_file(filename)
-
- def print_to_file(self,filename) :
- """
- The procedure stores a landscape to a file. The file can be later
- used by a procedure load_landscape_from_file.
- :param Name of the file.
- :type String
- """
- if ( self.thisptr != NULL ) and ( filename is not None ):
- self.thisptr.print_to_file(filename)
-
- def compute_integral_of_landscape(self):
- """
- This function compute integral of the landscape (defined formally as
- sum of integrals on R of all landscape functions)
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.compute_integral_of_landscape()
-
- def compute_integral_of_a_level_of_a_landscape(self,level):
- """
- This function compute integral of the 'level'-level of a landscape.
- :param Level of the landscape, n, so that the integral of lambda_n is
- computed
- :type nonnegative integer.
- """
- if ( self.thisptr != NULL ) and ( level is not None ):
- return self.thisptr.compute_integral_of_landscape(level)
-
- def compute_integral_of_landscape(self,p):
- """
- This function compute integral of the landscape p-th power of a
- landscape (defined formally as sum of integrals on R of p-th powers
- of all landscape functions)
- :param An positive real p such that the integral of p-th power of
- landscape is computed.
- :type Real value
- """
- if ( self.thisptr != NULL ) and ( p is not None ):
- return self.thisptr.compute_integral_of_landscape(p)
-
- def compute_value_at_a_given_point(self, level, x):
- """
- A function that computes the value of a landscape at a given point.
- The parameters of the function are: unsigned
- level and double x.
- The procedure will compute the value of the level-landscape at the
- point x.
- param: level n of lanscape (positive integer) and real number x.
- The value \lambda_n(x) is
- type: nonnegative integer
- :param A real number x. The value \lambda_n(x) is computed.
- :type real
- """
- if ( self.thisptr != NULL ) and ( level is not None ) and ( x is not None ):
- return self.thisptr.compute_value_at_a_given_point(level,x)
-
- def compute_maximum( self ):
- """
- Computations of maximum (y) value of landscape.
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.compute_maximum()
-
- def compute_minimum( self ):
- """
- Computations of minimum (y) value of landscape.
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.compute_minimum()
-
- def compute_norm_of_landscape(self,i):
- """
- Computations of a \f$L^i\f$ norm of landscape, where i is the input parameter.
- :type integer.
- :param i
- """
- if ( self.thisptr != NULL ) and ( i is not None ):
- return self.thisptr.compute_norm_of_landscape(i)
-
- def abs( self ):
- """
- Function to compute absolute value of a PL function. The representation
- of persistence landscapes allow to store
- general PL-function. When computing distance between two landscapes,
- we compute difference between
- them. In this case, a general PL-function with negative value can
- appear as a result. Then in order to compute
- distance, we need to take its absolute value. This is the purpose of
- this procedure.
- """
- if ( self.thisptr != NULL ):
- abs_ = PersistenceLandscapesOnGrid()
- abs_.thisptr = self.thisptr.new_abs_interface()
- return abs_
-
-
- def size( self ):
- """
- Computes the number of landscape functions.
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.size()
-
- def find_max(self, lambda_):
- """
- Compute maximal value of lambda-level landscape.
- :param level of landscape
- :type nonnegative integer
- """
- if ( self.thisptr != NULL ) and ( lambda_ is not None ):
- return self.thisptr.find_max(lambda_)
-
- def project_to_R(self, number_of_function):
- """
- The number of projections to R is defined to the number of nonzero
- landscape functions. I-th projection is an
- integral of i-th landscape function over whole R.
- This function is required by the Real_valued_topological_data concept.
- At the moment this function is not tested, since it is quite likely
- to be changed in the future. Given this, when
- using it, keep in mind that it
- will be most likely changed in the next versions.
- :param number of function
- :type nonnegative integer
- """
- if ( self.thisptr != NULL ) and ( number_of_function is not None ):
- return self.thisptr.project_to_R(number_of_function)
-
- def number_of_projections_to_R(self):
- """
- The function gives the number of possible projections to R. This
- function is required by the
- Real_valued_topological_data concept
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.number_of_projections_to_R()
-
- def vectorize(self, number_of_function):
- """
- This function produce a vector of doubles based on a landscape. It
- is required in a concept
- Vectorized_topological_data
- :param number of function
- :type nonnegative intege
- """
- if ( self.thisptr != NULL ) and ( number_of_function is not None ):
- return self.thisptr.vectorize(number_of_function)
-
- def number_of_vectorize_functions(self):
- """
- The number of projections to R is defined to the number of nonzero
- landscape functions. I-th projection is an
- integral of i-th landscape function over whole R.
- This function is required by the Real_valued_topological_data concept.
- At the moment this function is not tested, since it is quite likely
- to be changed in the future. Given this, when
- using it, keep in mind that it
- will be most likely changed in the next versions
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.number_of_vectorize_functions()
-
- def compute_average( self,to_average=[] ):
- """
- A function to compute averaged persistence landscape, based on vector
- of persistence landscapes.
- This function is required by Topological_data_with_averages concept.
- :param vector of persistence landscapes to average
- :type vectors of references to persistence landscapes
- """
- cdef vector[Persistence_landscape_on_grid_interface*] cpp_list
- if ( self.thisptr != NULL ) and ( to_average is not None ):
- for elt in to_average:
- cpp_list.push_back((<PersistenceLandscapesOnGrid>elt).thisptr)
- self.thisptr.new_compute_average( cpp_list )
-
-
-
- def distance(self, PersistenceLandscapesOnGrid second, power):
- """
- A function to compute distance between persistence landscape.
- The parameter of this function is a Persistence_landscape.
- This function is required in Topological_data_with_distances concept.
- For max norm distance, set power to numeric_limits<double>::max()
- :param the landascape to compute distance to
- :type PersistenceLandscape
- """
- if ( self.thisptr != NULL ) and ( second is not None ) and ( power is not None ):
- return self.thisptr.distance(deref(second.thisptr), power)
-
- def compute_scalar_product(self, PersistenceLandscapesOnGrid second):
- """
- A function to compute scalar product of persistence landscapes.
- The parameter of this function is a Persistence_landscape.
- This function is required in Topological_data_with_scalar_product concept.
- :param the landascape to compute scalar product with
- :type PersistenceLandscape
- """
- if ( self.thisptr != NULL ) and ( second is not None ):
- return self.thisptr.compute_scalar_product( deref(second.thisptr) )
-
- def get_y_range(self, level):
- """
- This procedure returns y-range of a given level persistence landscape.
- If a default value is used, the y-range
- of 0th level landscape is given (and this range contains the ranges
- of all other landscapes).
- :param The level of lrandscape
- :type nonnegative integer
- """
- if ( self.thisptr != NULL ):
- return self.thisptr.get_y_range(level)
-
diff --git a/src/cython/cython/reader_utils.pyx b/src/cython/cython/reader_utils.pyx
new file mode 100644
index 00000000..3a17c5a0
--- /dev/null
+++ b/src/cython/cython/reader_utils.pyx
@@ -0,0 +1,95 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp.map cimport map
+from libcpp.pair cimport pair
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Reader_utils_interface.h" namespace "Gudhi":
+ vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator)
+ map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename)
+ vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim)
+
+def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'):
+ """Read lower triangular matrix from a CSV style file.
+
+ :param csv_file: A CSV file style name.
+ :type csv_file: string
+ :param separator: The value separator in the CSV file. Default value is ';'
+ :type separator: char
+
+ :returns: The lower triangular matrix.
+ :rtype: vector[vector[double]]
+ """
+ if csv_file is not '':
+ if os.path.isfile(csv_file):
+ return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0]))
+ print("file " + csv_file + " not set or not found.")
+ return []
+
+def read_persistence_intervals_grouped_by_dimension(persistence_file=''):
+ """Reads a file containing persistence intervals.
+ Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+ The return value is an `map[dim, vector[pair[birth, death]]]`
+ where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+ Note: the function does not check that birth <= death.
+
+ :param persistence_file: A persistence file style name.
+ :type persistence_file: string
+
+ :returns: The persistence pairs grouped by dimension.
+ :rtype: map[int, vector[pair[double, double]]]
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file))
+ print("file " + persistence_file + " not set or not found.")
+ return []
+
+def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1):
+ """Reads a file containing persistence intervals.
+ Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+ If `only_this_dim` = -1, dimension is ignored and all lines are returned.
+ If `only_this_dim` is >= 0, only the lines where dimension = `only_this_dim`
+ (or where dimension is not specified) are returned.
+ The return value is an `vector[pair[birth, death]]`
+ where `birth` a `double`, and `death` a `double`.
+ Note: the function does not check that birth <= death.
+
+ :param persistence_file: A persistence file style name.
+ :type persistence_file: string
+
+ :returns: The persistence pairs grouped by dimension.
+ :rtype: map[int, vector[pair[double, double]]]
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ return read_pers_intervals_in_dimension(str.encode(persistence_file), only_this_dim)
+ print("file " + persistence_file + " not set or not found.")
+ return []
diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx
index 2acdac3c..8a436619 100644
--- a/src/cython/cython/simplex_tree.pyx
+++ b/src/cython/cython/simplex_tree.pyx
@@ -2,6 +2,7 @@ from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
from libcpp cimport bool
+from libcpp.string cimport string
"""This file is part of the Gudhi Library. The Gudhi library
(Geometric Understanding in Higher Dimensions) is a generic C++
@@ -35,14 +36,14 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface<Gudhi::Simplex_tree_options_full_featured>":
Simplex_tree()
- double filtration()
double simplex_filtration(vector[int] simplex)
- void set_filtration(double filtration)
+ void assign_simplex_filtration(vector[int] simplex, double filtration)
void initialize_filtration()
int num_vertices()
int num_simplices()
void set_dimension(int dimension)
int dimension()
+ int upper_bound_dimension()
bint find_simplex(vector[int] simplex)
bint insert_simplex_and_subfaces(vector[int] simplex,
double filtration)
@@ -51,8 +52,9 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
vector[pair[vector[int], double]] get_star(vector[int] simplex)
vector[pair[vector[int], double]] get_cofaces(vector[int] simplex,
int dimension)
- void remove_maximal_simplex(vector[int] simplex)
void expansion(int max_dim)
+ void remove_maximal_simplex(vector[int] simplex)
+ bool prune_above_filtration(double filtration)
cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>":
@@ -61,6 +63,7 @@ cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
vector[int] betti_numbers()
vector[int] persistent_betti_numbers(double from_value, double to_value)
vector[pair[double,double]] intervals_in_dimension(int dimension)
+ void write_output_diagram(string diagram_file_name)
# SimplexTree python interface
cdef class SimplexTree:
@@ -113,13 +116,16 @@ cdef class SimplexTree:
"""
return self.thisptr.simplex_filtration(simplex)
- def set_filtration(self, filtration):
- """This function sets the main simplicial complex filtration value.
+ def assign_filtration(self, simplex, filtration):
+ """This function assigns the simplicial complex filtration value for a
+ given N-simplex.
- :param filtration: The filtration value.
- :type filtration: float.
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :param filtration: The simplicial complex filtration value.
+ :type filtration: float
"""
- self.thisptr.set_filtration(<double> filtration)
+ self.thisptr.assign_simplex_filtration(simplex, filtration)
def initialize_filtration(self):
"""This function initializes and sorts the simplicial complex
@@ -127,9 +133,14 @@ cdef class SimplexTree:
.. note::
- This function must be launched before persistence, betti_numbers,
- persistent_betti_numbers or get_filtration after inserting or
- removing simplices.
+ This function must be launched before
+ :func:`persistence()<gudhi.SimplexTree.persistence>`,
+ :func:`betti_numbers()<gudhi.SimplexTree.betti_numbers>`,
+ :func:`persistent_betti_numbers()<gudhi.SimplexTree.persistent_betti_numbers>`,
+ or :func:`get_filtration()<gudhi.SimplexTree.get_filtration>`
+ after :func:`inserting<gudhi.SimplexTree.insert>` or
+ :func:`removing<gudhi.SimplexTree.remove_maximal_simplex>`
+ simplices.
"""
self.thisptr.initialize_filtration()
@@ -156,21 +167,42 @@ cdef class SimplexTree:
:returns: the simplicial complex dimension.
:rtype: int
+
+ .. note::
+
+ This function is not constant time because it can recompute
+ dimension if required (can be triggered by
+ :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ or
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ methods).
"""
return self.thisptr.dimension()
- def set_dimension(self, dimension):
- """This function sets the dimension of the simplicial complex.
+ def upper_bound_dimension(self):
+ """This function returns a valid dimension upper bound of the
+ simplicial complex.
- insert and remove_maximal_simplex functions do not update dimension
- value of the `SimplexTree`.
+ :returns: an upper bound on the dimension of the simplicial complex.
+ :rtype: int
+ """
+ return self.thisptr.upper_bound_dimension()
- `AlphaComplex`, `RipsComplex`, `TangentialComplex` and `WitnessComplex`
- automatically sets the correct dimension in their `create_simplex_tree`
- functions.
+ def set_dimension(self, dimension):
+ """This function sets the dimension of the simplicial complex.
:param dimension: The new dimension value.
:type dimension: int.
+
+ .. note::
+
+ This function must be used with caution because it disables
+ dimension recomputation when required
+ (this recomputation can be triggered by
+ :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ or
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ ).
"""
self.thisptr.set_dimension(<int>dimension)
@@ -294,9 +326,57 @@ cdef class SimplexTree:
:param simplex: The N-simplex, represented by a list of vertex.
:type simplex: list of int.
+
+ .. note::
+
+ Be aware that removing is shifting data in a flat_map
+ (:func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>` to be done).
+
+ .. note::
+
+ The dimension of the simplicial complex may be lower after calling
+ remove_maximal_simplex than it was before. However,
+ :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ method will return the old value, which
+ remains a valid upper bound. If you care, you can call
+ :func:`dimension()<gudhi.SimplexTree.dimension>`
+ to recompute the exact dimension.
"""
self.thisptr.remove_maximal_simplex(simplex)
+ def prune_above_filtration(self, filtration):
+ """Prune above filtration value given as parameter.
+
+ :param filtration: Maximum threshold value.
+ :type filtration: float.
+ :returns: The filtration modification information.
+ :rtype: bint
+
+
+ .. note::
+
+ Some simplex tree functions require the filtration to be valid.
+ prune_above_filtration function is not launching
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ but returns the filtration modification
+ information. If the complex has changed , please call
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ to recompute it.
+
+ .. note::
+
+ Note that the dimension of the simplicial complex may be lower
+ after calling
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ than it was before. However,
+ :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ will return the old value, which remains a
+ valid upper bound. If you care, you can call
+ :func:`dimension()<gudhi.SimplexTree.dimension>`
+ method to recompute the exact dimension.
+ """
+ return self.thisptr.prune_above_filtration(filtration)
+
def expansion(self, max_dim):
"""Expands the Simplex_tree containing only its one skeleton
until dimension max_dim.
@@ -320,7 +400,7 @@ cdef class SimplexTree:
"""This function returns the persistence of the simplicial complex.
:param homology_coeff_field: The homology coefficient field. Must be a
- prime number
+ prime number. Default value is 11.
:type homology_coeff_field: int.
:param min_persistence: The minimum persistence value to take into
account (strictly greater than min_persistence). Default value is
@@ -344,8 +424,9 @@ cdef class SimplexTree:
:returns: The Betti numbers ([B0, B1, ..., Bn]).
:rtype: list of int
- :note: betti_numbers function requires persistence function to be
- launched first.
+ :note: betti_numbers function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
"""
cdef vector[int] bn_result
if self.pcohptr != NULL:
@@ -369,7 +450,8 @@ cdef class SimplexTree:
:returns: The persistent Betti numbers ([B0, B1, ..., Bn]).
:rtype: list of int
- :note: persistent_betti_numbers function requires persistence
+ :note: persistent_betti_numbers function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
function to be launched first.
"""
cdef vector[int] pbn_result
@@ -385,12 +467,13 @@ cdef class SimplexTree:
complex in a specific dimension.
:param dimension: The specific dimension.
- :type from_value: int.
+ :type dimension: int.
:returns: The persistence intervals.
:rtype: list of pair of float
- :note: intervals_in_dim function requires persistence function to be
- launched first.
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
"""
cdef vector[pair[double,double]] intervals_result
if self.pcohptr != NULL:
@@ -399,3 +482,23 @@ cdef class SimplexTree:
print("intervals_in_dim function requires persistence function"
" to be launched first.")
return intervals_result
+
+ def write_persistence_diagram(self, persistence_file=''):
+ """This function writes the persistence intervals of the simplicial
+ complex in a user given file name.
+
+ :param persistence_file: The specific dimension.
+ :type persistence_file: string.
+
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ if self.pcohptr != NULL:
+ if persistence_file != '':
+ self.pcohptr.write_output_diagram(str.encode(persistence_file))
+ else:
+ print("persistence_file must be specified")
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
diff --git a/src/cython/doc/Makefile.in b/src/cython/doc/Makefile.in
deleted file mode 100644
index 526350b3..00000000
--- a/src/cython/doc/Makefile.in
+++ /dev/null
@@ -1,44 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = @SPHINX_PATH@
-PAPER =
-BUILDDIR = _build
-
-# User-friendly check for sphinx-build
-ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
-$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
-endif
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
-
-clean:
- rm -f examples.inc
- rm -rf $(BUILDDIR)/*
-
-# GUDHI specific : Examples.inc is generated with generate_examples.py (and deleted on clean)
-
-html:
- ./generate_examples.py
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-doctest:
- $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
- @echo "Testing of doctests in the sources finished, look at the " \
- "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html
index b11c1236..243f33c6 100644
--- a/src/cython/doc/_templates/layout.html
+++ b/src/cython/doc/_templates/layout.html
@@ -65,6 +65,7 @@
{#- old style sidebars: using blocks -- should be deprecated #}
{%- block sidebartoc %}
<h2><a href="index.html">GUDHI</a></h2>
+<h2><a href="fileformats.html">File formats</a></h2>
<h2><a href="installation.html">GUDHI installation</a></h2>
<h2><a href="citation.html">Acknowledging the GUDHI library</a></h2>
<h2><a href="genindex.html">Index</a></h2>
diff --git a/src/cython/doc/alpha_complex_sum.rst b/src/cython/doc/alpha_complex_sum.rst
index a5f6420a..1680a712 100644
--- a/src/cython/doc/alpha_complex_sum.rst
+++ b/src/cython/doc/alpha_complex_sum.rst
@@ -5,7 +5,7 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | Alpha_complex is a simplicial complex constructed from the finite |
-| img/alpha_complex_representation.png | cells of a Delaunay Triangulation. |
+| ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. |
| :alt: Alpha complex representation | |
| :figclass: align-center | The filtration value of each simplex is computed as the square of the |
| | circumradius of the simplex if the circumsphere is empty (the simplex |
diff --git a/src/cython/doc/alpha_complex_user.rst b/src/cython/doc/alpha_complex_user.rst
index e8268ef1..db7edd6f 100644
--- a/src/cython/doc/alpha_complex_user.rst
+++ b/src/cython/doc/alpha_complex_user.rst
@@ -75,7 +75,7 @@ In order to build the alpha complex, first, a Simplex tree is built from the cel
(The filtration value is set to NaN, which stands for unknown value):
.. figure::
- img/alpha_complex_doc.png
+ ../../doc/Alpha_complex/alpha_complex_doc.png
:figclass: align-center
:alt: Simplex tree structure construction example
@@ -112,7 +112,7 @@ computes the filtration value of the triangle, and then propagates the filtratio
here:
.. figure::
- img/alpha_complex_doc_420.png
+ ../../doc/Alpha_complex/alpha_complex_doc_420.png
:figclass: align-center
:alt: Filtration value propagation example
@@ -142,7 +142,7 @@ Prune above given filtration value
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()`
-int he `C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
+in the `C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
In the following example, the value is given by the user as argument of the program.
@@ -158,7 +158,8 @@ Then, it is asked to display information about the alpha complex:
.. testcode::
import gudhi
- alpha_complex = gudhi.AlphaComplex(off_file='alphacomplexdoc.off')
+ alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off')
simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0)
result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
@@ -200,6 +201,6 @@ the program output is:
CGAL citations
==============
-.. bibliography:: how_to_cite_cgal.bib
+.. bibliography:: ../../biblio/how_to_cite_cgal.bib
:filter: docnames
:style: unsrt
diff --git a/src/cython/doc/bottleneck_distance_sum.rst b/src/cython/doc/bottleneck_distance_sum.rst
index 5c475d0d..030fad9e 100644
--- a/src/cython/doc/bottleneck_distance_sum.rst
+++ b/src/cython/doc/bottleneck_distance_sum.rst
@@ -5,7 +5,7 @@
+-----------------------------------------------------------------+----------------------------------------------------------------------+
| .. figure:: | Bottleneck distance measures the similarity between two persistence |
-| img/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a |
+| ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a |
| :figclass: align-center | perfect matching between the points of the two diagrams (+ all the |
| | diagonal points) such that any couple of matched points are at |
| Bottleneck distance is the length of | distance at most b. |
diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst
index 0066992f..7692dce2 100644
--- a/src/cython/doc/bottleneck_distance_user.rst
+++ b/src/cython/doc/bottleneck_distance_user.rst
@@ -25,7 +25,7 @@ This example computes the bottleneck distance from 2 persistence diagrams:
message = "Bottleneck distance approximation=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1)
print(message)
- message = "Bottleneck distance exact value=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0)
+ message = "Bottleneck distance value=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2)
print(message)
The output is:
@@ -33,4 +33,4 @@ The output is:
.. testoutput::
Bottleneck distance approximation=0.81
- Bottleneck distance exact value=0.75
+ Bottleneck distance value=0.75
diff --git a/src/cython/doc/citation.rst b/src/cython/doc/citation.rst
index 6cdfb7cc..f4fdf83b 100644
--- a/src/cython/doc/citation.rst
+++ b/src/cython/doc/citation.rst
@@ -12,4 +12,4 @@ Manual, as well as for publications directly related to the GUDHI library.
GUDHI bibtex
************
-.. literalinclude:: how_to_cite_gudhi.bib
+.. literalinclude:: ../../biblio/how_to_cite_gudhi.bib
diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py
index 42bfd59c..19a880d4 100755
--- a/src/cython/doc/conf.py
+++ b/src/cython/doc/conf.py
@@ -21,7 +21,7 @@ import os
#sys.path.insert(0, os.path.abspath('.'))
# Path to Gudhi.so from source path
-sys.path.insert(0, os.path.abspath('..'))
+sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
@@ -58,18 +58,20 @@ source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
+import gudhi
+
# General information about the project.
-project = u'GUDHI'
-copyright = u'2016, GUDHI Editorial Board'
+project = gudhi.__name__
+copyright = gudhi.__copyright__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-version = '2.0'
+version = gudhi.__version__
# The full version, including alpha/beta/rc tags.
-release = '2.0.0'
+#release = '2.0.1-rc1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -198,81 +200,3 @@ html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'GUDHIdoc'
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- ('index', 'GUDHI.tex', u'GUDHI Documentation',
- u'Vincent Rouvreau', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'gudhi', u'GUDHI Documentation',
- [u'Vincent Rouvreau'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- ('index', 'GUDHI', u'GUDHI Documentation',
- u'Vincent Rouvreau', 'GUDHI', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
diff --git a/src/cython/doc/cubical_complex_sum.rst b/src/cython/doc/cubical_complex_sum.rst
index 3ddf6375..280ad0e0 100644
--- a/src/cython/doc/cubical_complex_sum.rst
+++ b/src/cython/doc/cubical_complex_sum.rst
@@ -2,14 +2,14 @@
:Author: Pawel Dlotko :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
================================================================= =================================== ===================================
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| .. figure:: | The cubical complex is an example of a structured complex useful in |
-| img/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image |
-| :alt: Cubical complex representation | analysis. |
-| :figclass: align-center | |
-| | |
-| Cubical complex representation | |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
-| | * :doc:`periodic_cubical_complex_ref` |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
++--------------------------------------------------------------------------+----------------------------------------------------------------------+
+| .. figure:: | The cubical complex is an example of a structured complex useful in |
+| ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image |
+| :alt: Cubical complex representation | analysis. |
+| :figclass: align-center | |
+| | |
+| Cubical complex representation | |
++--------------------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++--------------------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/src/cython/doc/cubical_complex_user.rst b/src/cython/doc/cubical_complex_user.rst
index 344b9554..2bfac62a 100644
--- a/src/cython/doc/cubical_complex_user.rst
+++ b/src/cython/doc/cubical_complex_user.rst
@@ -59,7 +59,7 @@ directions, allows to determine, dimension, neighborhood, boundary and coboundar
:math:`C \in \mathcal{K}`.
.. figure::
- img/Cubical_complex_representation.png
+ ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png
:alt: Cubical complex.
:figclass: align-center
@@ -87,7 +87,7 @@ in the example below). Next, in lexicographical order, the filtration of top dim
20 4 7 6 5 in the example below).
.. figure::
- img/exampleBitmap.png
+ ../../doc/Bitmap_cubical_complex/exampleBitmap.png
:alt: Example of a input data.
:figclass: align-center
@@ -95,14 +95,15 @@ in the example below). Next, in lexicographical order, the filtration of top dim
The input file for the following complex is:
-.. literalinclude:: cubicalcomplexdoc.txt
+.. literalinclude:: ../../data/bitmap/cubicalcomplexdoc.txt
-.. centered:: cubicalcomplexdoc.txt
+.. centered:: ../../data/bitmap/cubicalcomplexdoc.txt
.. testcode::
import gudhi
- cubical_complex = gudhi.CubicalComplex(perseus_file='cubicalcomplexdoc.txt')
+ cubical_complex = gudhi.CubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/cubicalcomplexdoc.txt')
result_str = 'Cubical complex is of dimension ' + repr(cubical_complex.dimension()) + ' - ' + \
repr(cubical_complex.num_simplices()) + ' simplices.'
print(result_str)
@@ -127,16 +128,17 @@ complex with periodic boundary conditions. One can also use Perseus style input
conditions in a given direction, then number of top dimensional cells in this direction have to be multiplied by -1.
For instance:
-.. literalinclude:: periodiccubicalcomplexdoc.txt
+.. literalinclude:: ../../data/bitmap/periodiccubicalcomplexdoc.txt
-.. centered:: periodiccubicalcomplexdoc.txt
+.. centered:: ../../data/bitmap/periodiccubicalcomplexdoc.txt
Indicate that we have imposed periodic boundary conditions in the direction x, but not in the direction y.
.. testcode::
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='periodiccubicalcomplexdoc.txt')
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/periodiccubicalcomplexdoc.txt')
result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \
repr(periodic_cc.num_simplices()) + ' simplices.'
print(result_str)
@@ -155,6 +157,6 @@ End user programs are available in cython/example/ folder.
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/src/cython/doc/examples.rst b/src/cython/doc/examples.rst
index a89e0596..1e596e18 100644
--- a/src/cython/doc/examples.rst
+++ b/src/cython/doc/examples.rst
@@ -1,4 +1,21 @@
Examples
########
-.. include:: examples.inc
+.. only:: builder_html
+
+ * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+ * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>`
+ * :download:`alpha_rips_persistence_bottleneck_distance.py <../example/alpha_rips_persistence_bottleneck_distance.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
diff --git a/src/cython/doc/fileformats.rst b/src/cython/doc/fileformats.rst
new file mode 100644
index 00000000..156ef4e4
--- /dev/null
+++ b/src/cython/doc/fileformats.rst
@@ -0,0 +1,33 @@
+File formats
+############
+
+Persistence Diagram
+*******************
+
+Such a file, whose extension is usually ``.pers``, contains a list of
+persistence intervals.
+
+Lines starting with ``#`` are ignored (comments).
+
+Other lines might contain 2, 3 or 4 values (the number of values on each line
+must be the same for all lines)::
+
+ [[field] dimension] birth death
+
+Here is a simple sample file::
+
+ # Persistence diagram example
+ 2 2.7 3.7
+ 2 9.6 14.
+ # Some comments
+ 3 34.2 34.974
+ 4 3. inf
+
+Other sample files can be found in the data/persistence_diagram folder.
+
+Such files can be generated with
+:meth:`gudhi.SimplexTree.write_persistence_diagram`, read with
+:meth:`gudhi.read_persistence_intervals_grouped_by_dimension`, or
+:meth:`gudhi.read_persistence_intervals_in_dimension` and displayed with
+:meth:`gudhi.plot_persistence_barcode` or
+:meth:`gudhi.plot_persistence_diagram`.
diff --git a/src/cython/doc/generate_examples.py b/src/cython/doc/generate_examples.py
deleted file mode 100755
index d64d506c..00000000
--- a/src/cython/doc/generate_examples.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-from os import listdir
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Vincent Rouvreau
-
- Copyright (C) 2017 INRIA
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Vincent Rouvreau"
-__copyright__ = "Copyright (C) 2017 INRIA"
-__license__ = "GPL v3"
-
-"""
-generate_examples.py generates examples.inc to be included in examples.rst.
-Refer to Makefile and make.bat to see if it is correctly launched.
-"""
-
-output_file = open('examples.inc','w')
-
-output_file.write('.. only:: builder_html\n\n')
-
-for file in listdir('../example/'):
- output_file.write(" * :download:`" + file + " <../example/" + file + ">`\n")
-
-output_file.close()
diff --git a/src/cython/doc/index.rst b/src/cython/doc/index.rst
index f6d10567..3945d72a 100644
--- a/src/cython/doc/index.rst
+++ b/src/cython/doc/index.rst
@@ -1,8 +1,10 @@
GUDHI Python module documentation
#################################
-.. image:: img/Gudhi_banner.png
- :align: center
+.. figure::
+ ../../doc/common/Gudhi_banner.png
+ :alt: Gudhi banner
+ :figclass: align-center
Introduction
************
@@ -81,6 +83,6 @@ Persistence graphical tools
Bibliography
************
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst
index f98a5039..c182f176 100644
--- a/src/cython/doc/installation.rst
+++ b/src/cython/doc/installation.rst
@@ -68,31 +68,32 @@ The :doc:`Alpha complex </alpha_complex_user>`,
C++ library which provides easy access to efficient and reliable geometric
algorithms.
-Having CGAL version 4.6.0 or higher installed is recommended. The procedure to
-install this library according to your operating system is detailed
+Having CGAL, the Computational Geometry Algorithms Library, version 4.7.0 or
+higher installed is recommended. The procedure to install this library
+according to your operating system is detailed
`here <http://doc.cgal.org/latest/Manual/installation.html>`_.
-The following examples require the Computational Geometry Algorithms Library:
-
-.. only:: builder_html
-
- * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
- * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
-
-The following example requires CGAL version ≥ 4.7.0:
+The following examples requires CGAL version ≥ 4.7.0:
.. only:: builder_html
* :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
* :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
-The following example requires CGAL version ≥ 4.8.0:
+The following examples requires CGAL version ≥ 4.8.0:
.. only:: builder_html
* :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
* :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+The following examples requires CGAL version ≥ 4.8.1:
+
+.. only:: builder_html
+
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
Eigen3
======
diff --git a/src/cython/doc/make.bat.in b/src/cython/doc/make.bat.in
deleted file mode 100644
index ff1a6d56..00000000
--- a/src/cython/doc/make.bat.in
+++ /dev/null
@@ -1,67 +0,0 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=@SPHINX_PATH@
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-set I18NSPHINXOPTS=%SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
- set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
- set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
- :help
- echo.Please use `make ^<target^>` where ^<target^> is one of
- echo. html to make standalone HTML files
- echo. doctest to run all doctests embedded in the documentation if enabled
- goto end
-)
-
-if "%1" == "clean" (
- del examples.inc
- for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
- del /q /s %BUILDDIR%\*
- goto end
-)
-
-
-%SPHINXBUILD% 2> nul
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.http://sphinx-doc.org/
- exit /b 1
-)
-
-:: GUDHI specific : Examples.inc is generated with generate_examples.py (and deleted on clean)
-
-if "%1" == "html" (
- generate_examples.py
- %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/html.
- goto end
-)
-
-if "%1" == "doctest" (
- %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
- goto end
-)
-
-:end
diff --git a/src/cython/doc/persistence_graphical_tools_user.rst b/src/cython/doc/persistence_graphical_tools_user.rst
index cae18323..9033331f 100644
--- a/src/cython/doc/persistence_graphical_tools_user.rst
+++ b/src/cython/doc/persistence_graphical_tools_user.rst
@@ -14,12 +14,14 @@ This function is useful to show the color palette values of dimension:
.. testcode::
import gudhi
- gudhi.show_palette_values(alpha=1.0)
+ plt = gudhi.show_palette_values(alpha=1.0)
+ plt.show()
.. plot::
import gudhi
- gudhi.show_palette_values(alpha=1.0)
+ plt = gudhi.show_palette_values(alpha=1.0)
+ plt.show()
Show persistence as a barcode
-----------------------------
@@ -30,17 +32,22 @@ This function can display the persistence result as a barcode:
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='3d_torus.txt')
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/3d_torus.txt')
diag = periodic_cc.persistence()
- gudhi.plot_persistence_barcode(diag)
+ plt = gudhi.plot_persistence_barcode(diag)
+ plt.show()
.. plot::
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='3d_torus.txt')
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/3d_torus.txt')
diag = periodic_cc.persistence()
- gudhi.plot_persistence_barcode(diag)
+ print("diag = ", diag)
+ plt = gudhi.plot_persistence_barcode(diag)
+ plt.show()
Show persistence as a diagram
-----------------------------
@@ -51,16 +58,20 @@ This function can display the persistence result as a diagram:
import gudhi
- rips_complex = gudhi.RipsComplex(off_file='tore3D_300.off', max_edge_length=2.0)
+ rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/tore3D_1307.off', max_edge_length=0.2)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
diag = simplex_tree.persistence()
- gudhi.plot_persistence_diagram(diag)
+ plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+ plt.show()
.. plot::
import gudhi
- rips_complex = gudhi.RipsComplex(off_file='tore3D_300.off', max_edge_length=2.0)
+ rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/tore3D_1307.off', max_edge_length=0.2)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
diag = simplex_tree.persistence()
- gudhi.plot_persistence_diagram(diag)
+ plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+ plt.show()
diff --git a/src/cython/doc/persistent_cohomology_sum.rst b/src/cython/doc/persistent_cohomology_sum.rst
index d1f79cb4..a26df1dc 100644
--- a/src/cython/doc/persistent_cohomology_sum.rst
+++ b/src/cython/doc/persistent_cohomology_sum.rst
@@ -4,7 +4,7 @@
+-----------------------------------------------------------------+-----------------------------------------------------------------------+
| .. figure:: | The theory of homology consists in attaching to a topological space |
-| img/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological |
+| ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological |
| :figclass: align-center | features like connected components, holes, cavities, etc. Persistent |
| | homology studies the evolution -- birth, life and death -- of these |
| Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the |
diff --git a/src/cython/doc/persistent_cohomology_user.rst b/src/cython/doc/persistent_cohomology_user.rst
index 72f1a7f7..bf90c163 100644
--- a/src/cython/doc/persistent_cohomology_user.rst
+++ b/src/cython/doc/persistent_cohomology_user.rst
@@ -109,6 +109,6 @@ We provide several example files: run these examples with -h for details on thei
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/src/cython/doc/pyplots/barcode_persistence.py b/src/cython/doc/pyplots/barcode_persistence.py
index c06ac5a7..de33d506 100755
--- a/src/cython/doc/pyplots/barcode_persistence.py
+++ b/src/cython/doc/pyplots/barcode_persistence.py
@@ -1,5 +1,7 @@
import gudhi
-periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='../3d_torus.txt')
+periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/3d_torus.txt')
diag = periodic_cc.persistence()
-gudhi.plot_persistence_barcode(diag)
+plt = gudhi.plot_persistence_barcode(diag)
+plt.show()
diff --git a/src/cython/doc/pyplots/diagram_persistence.py b/src/cython/doc/pyplots/diagram_persistence.py
index b4714fe3..c2fbf801 100755
--- a/src/cython/doc/pyplots/diagram_persistence.py
+++ b/src/cython/doc/pyplots/diagram_persistence.py
@@ -1,5 +1,8 @@
import gudhi
-alpha_complex = gudhi.AlphaComplex(off_file='../tore3D_300.off')
-diag = alpha_complex.persistence()
-gudhi.plot_persistence_diagram(diag)
+rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/tore3D_1307.off', max_edge_length=0.2)
+simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
+diag = simplex_tree.persistence()
+plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+plt.show()
diff --git a/src/cython/doc/pyplots/show_palette_values.py b/src/cython/doc/pyplots/show_palette_values.py
index e72a55fd..fdf9645f 100755
--- a/src/cython/doc/pyplots/show_palette_values.py
+++ b/src/cython/doc/pyplots/show_palette_values.py
@@ -1,2 +1,3 @@
import gudhi
-gudhi.show_palette_values(alpha=1.0)
+plt = gudhi.show_palette_values(alpha=1.0)
+plt.show()
diff --git a/src/cython/doc/python3-sphinx-build b/src/cython/doc/python3-sphinx-build.py
index 44b94169..84d158cf 100755
--- a/src/cython/doc/python3-sphinx-build
+++ b/src/cython/doc/python3-sphinx-build.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
"""
Emulate sphinx-build for python3
diff --git a/src/cython/doc/reader_utils_ref.rst b/src/cython/doc/reader_utils_ref.rst
new file mode 100644
index 00000000..9c1ea6fc
--- /dev/null
+++ b/src/cython/doc/reader_utils_ref.rst
@@ -0,0 +1,11 @@
+=============================
+Reader utils reference manual
+=============================
+
+.. autofunction:: gudhi.read_off
+
+.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file
+
+.. autofunction:: gudhi.read_persistence_intervals_grouped_by_dimension
+
+.. autofunction:: gudhi.read_persistence_intervals_in_dimension
diff --git a/src/cython/doc/rips_complex_sum.rst b/src/cython/doc/rips_complex_sum.rst
index 2b65fc19..5616bfa9 100644
--- a/src/cython/doc/rips_complex_sum.rst
+++ b/src/cython/doc/rips_complex_sum.rst
@@ -4,7 +4,7 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton |
-| img/rips_complex_representation.png | graph. |
+| ../../doc/Rips_complex/rips_complex_representation.png | graph. |
| :figclass: align-center | |
| | The filtration value of each edge is computed from a user-given |
| Rips complex representation | distance function and is inserted until a user-given threshold |
diff --git a/src/cython/doc/rips_complex_user.rst b/src/cython/doc/rips_complex_user.rst
index f9760976..96ba9944 100644
--- a/src/cython/doc/rips_complex_user.rst
+++ b/src/cython/doc/rips_complex_user.rst
@@ -26,7 +26,7 @@ structure, and then expands the simplicial complex when required.
Vertex name correspond to the index of the point in the given range (aka. the point cloud).
.. figure::
- img/rips_complex_representation.png
+ ../../doc/Rips_complex/rips_complex_representation.png
:align: center
Rips-complex one skeleton graph representation
@@ -101,7 +101,8 @@ Finally, it is asked to display information about the Rips complex.
.. testcode::
import gudhi
- rips_complex = gudhi.RipsComplex(off_file='alphacomplexdoc.off', max_edge_length=12.0)
+ rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off', max_edge_length=12.0)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
@@ -205,7 +206,8 @@ Finally, it is asked to display information about the Rips complex.
.. testcode::
import gudhi
- rips_complex = gudhi.RipsComplex(csv_file='full_square_distance_matrix.csv', max_edge_length=12.0)
+ rips_complex = gudhi.RipsComplex(csv_file=gudhi.__root_source_dir__ + \
+ '/data/distance_matrix/full_square_distance_matrix.csv', max_edge_length=12.0)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
diff --git a/src/cython/doc/simplex_tree_sum.rst b/src/cython/doc/simplex_tree_sum.rst
index 3174fb62..fb0e54c1 100644
--- a/src/cython/doc/simplex_tree_sum.rst
+++ b/src/cython/doc/simplex_tree_sum.rst
@@ -4,7 +4,7 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | The simplex tree is an efficient and flexible data structure for |
-| img/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. |
+| ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. |
| :alt: Simplex tree representation | |
| :figclass: align-center | The data structure is described in |
| | :cite:`boissonnatmariasimplextreealgorithmica` |
diff --git a/src/cython/doc/tangential_complex_sum.rst b/src/cython/doc/tangential_complex_sum.rst
index 2b05bc10..72b4d7ba 100644
--- a/src/cython/doc/tangential_complex_sum.rst
+++ b/src/cython/doc/tangential_complex_sum.rst
@@ -5,10 +5,10 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to |
-| img/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- |
+| ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- |
| :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from |
| | an unknown manifold. The running time depends only linearly on the |
-| **Tangential complex representation** | extrinsic dimension :math:`d` and exponentially on the intrinsic |
+| Tangential complex representation | extrinsic dimension :math:`d` and exponentially on the intrinsic |
| | dimension :math:`k`. |
+----------------------------------------------------------------+------------------------------------------------------------------------+
| :doc:`tangential_complex_user` | :doc:`tangential_complex_ref` |
diff --git a/src/cython/doc/tangential_complex_user.rst b/src/cython/doc/tangential_complex_user.rst
index 03f9fea6..efa6d7ce 100644
--- a/src/cython/doc/tangential_complex_user.rst
+++ b/src/cython/doc/tangential_complex_user.rst
@@ -22,7 +22,7 @@ Let us start with the description of the Tangential complex of a simple
example, with :math:`k = 1` and :math:`d = 2`. The input data is 4 points
:math:`P` located on a curve embedded in 2D.
-.. figure:: img/tc_example_01.png
+.. figure:: ../../doc/Tangential_complex/tc_example_01.png
:alt: The input
:figclass: align-center
@@ -31,7 +31,7 @@ example, with :math:`k = 1` and :math:`d = 2`. The input data is 4 points
For each point :math:`p`, estimate its tangent subspace :math:`T_p` (e.g.
using PCA).
-.. figure:: img/tc_example_02.png
+.. figure:: ../../doc/Tangential_complex/tc_example_02.png
:alt: The estimated normals
:figclass: align-center
@@ -42,7 +42,7 @@ Let us add the Voronoi diagram of the points in orange. For each point
:math:`p`, construct its star in the Delaunay triangulation of :math:`P`
restricted to :math:`T_p`.
-.. figure:: img/tc_example_03.png
+.. figure:: ../../doc/Tangential_complex/tc_example_03.png
:alt: The Voronoi diagram
:figclass: align-center
@@ -62,7 +62,7 @@ simplex is not in the star of all its vertices.
Let us take the same example.
-.. figure:: img/tc_example_07_before.png
+.. figure:: ../../doc/Tangential_complex/tc_example_07_before.png
:alt: Before
:figclass: align-center
@@ -70,7 +70,7 @@ Let us take the same example.
Let us slightly move the tangent subspace :math:`T_q`
-.. figure:: img/tc_example_07_after.png
+.. figure:: ../../doc/Tangential_complex/tc_example_07_after.png
:alt: After
:figclass: align-center
@@ -79,7 +79,7 @@ Let us slightly move the tangent subspace :math:`T_q`
Now, the star of :math:`Q` contains :math:`QP`, but the star of :math:`P` does
not contain :math:`QP`. We have an inconsistency.
-.. figure:: img/tc_example_08.png
+.. figure:: ../../doc/Tangential_complex/tc_example_08.png
:alt: After
:figclass: align-center
@@ -122,7 +122,8 @@ This example builds the Tangential complex of point set read in an OFF file.
.. testcode::
import gudhi
- tc = gudhi.TangentialComplex(off_file='alphacomplexdoc.off')
+ tc = gudhi.TangentialComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off')
result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \
' simplices - ' + repr(tc.num_vertices()) + ' vertices.'
print(result_str)
@@ -190,6 +191,6 @@ The output is:
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/src/cython/doc/witness_complex_sum.rst b/src/cython/doc/witness_complex_sum.rst
index b65522ba..a8a126a0 100644
--- a/src/cython/doc/witness_complex_sum.rst
+++ b/src/cython/doc/witness_complex_sum.rst
@@ -3,15 +3,17 @@
:Euclidean version requires: CGAL :math:`\geq` 4.6.0 Eigen3
================================================================= =================================== ===================================
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| .. image:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on |
-| img/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. |
-| | |
-| | The data structure is described in |
-| | :cite:`boissonnatmariasimplextreealgorithmica`. |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| :doc:`witness_complex_user` | * :doc:`witness_complex_ref` |
-| | * :doc:`strong_witness_complex_ref` |
-| | * :doc:`euclidean_witness_complex_ref` |
-| | * :doc:`euclidean_strong_witness_complex_ref` |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
++-------------------------------------------------------------------+----------------------------------------------------------------------+
+| .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on |
+| ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. |
+| :alt: Witness complex representation | |
+| :figclass: align-center | The data structure is described in |
+| | :cite:`boissonnatmariasimplextreealgorithmica`. |
+| | |
+| Witness complex representation | |
++-------------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`witness_complex_user` | * :doc:`witness_complex_ref` |
+| | * :doc:`strong_witness_complex_ref` |
+| | * :doc:`euclidean_witness_complex_ref` |
+| | * :doc:`euclidean_strong_witness_complex_ref` |
++-------------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/src/cython/doc/witness_complex_user.rst b/src/cython/doc/witness_complex_user.rst
index aa9cbb2c..29413269 100644
--- a/src/cython/doc/witness_complex_user.rst
+++ b/src/cython/doc/witness_complex_user.rst
@@ -33,7 +33,7 @@ Both definitions can be relaxed by a real value :math:`\alpha`:
which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and
**strong relaxed witness complex** respectively.
-.. figure:: img/swit.svg
+.. figure:: ../../doc/Witness_complex/swit.svg
:alt: Strongly witnessed simplex
:figclass: align-center
@@ -126,6 +126,6 @@ Here is an example of constructing a strong witness complex filtration and compu
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
index adedc7d2..b4487be4 100755
--- a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -38,6 +38,7 @@ parser = argparse.ArgumentParser(description='AlphaComplex creation from '
'points from the given OFF file.')
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -63,7 +64,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
index 2371c36c..e3f362dc 100755
--- a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -40,6 +40,7 @@ parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -70,8 +71,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
-
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
index 5748aa8a..c236d992 100755
--- a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -40,6 +40,7 @@ parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -70,8 +71,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
-
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py
index bc3b16ec..ed87806b 100755
--- a/src/cython/example/gudhi_graphical_tools_example.py
+++ b/src/cython/example/gudhi_graphical_tools_example.py
@@ -44,4 +44,11 @@ gudhi.plot_persistence_barcode(persistence)
print("#####################################################################")
print("Show diagram persistence example")
-gudhi.plot_persistence_diagram(persistence)
+pplot = gudhi.plot_persistence_diagram(persistence)
+pplot.show()
+
+print("#####################################################################")
+print("Show diagram persistence example with a confidence band")
+
+pplot = gudhi.plot_persistence_diagram(persistence, band_boot=0.2)
+pplot.show()
diff --git a/src/cython/example/persistence_representations_diagrams_example.py b/src/cython/example/persistence_representations_diagrams_example.py
deleted file mode 100755
index bd7452a0..00000000
--- a/src/cython/example/persistence_representations_diagrams_example.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-
-import gudhi
-import argparse
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-print("#####################################################################")
-print("Persistence representations diagrams example")
-
-
-parser = argparse.ArgumentParser(description='Statistics of persistence diagrams from file ',
- epilog='Example: '
- 'example/persistence_representations_diagrams_example.py '
- '-f file_with_diagram -d 1')
-parser.add_argument("-f", "--file", type=str, required=True)
-parser.add_argument("-d", "--dimension", type=int, default=0)
-
-args = parser.parse_args()
-
-print "Here are the parameters of the program: ",args.file," , " ,args.dimension
-
-p = gudhi.PersistenceIntervals(None,args.dimension,args.file);
-min_max_ = p.get_x_range();
-print "Birth-death range : ", min_max_
-
-dominant_ten_intervals_length = p.length_of_dominant_intervals(10)
-print "Length of ten dominant intervals : ", dominant_ten_intervals_length
-
-ten_dominant_intervals = p.dominant_intervals(10);
-print "Here are the dominant intervals : " , ten_dominant_intervals
-
-histogram = p.histogram_of_lengths(10);
-print "Here is the histogram of barcode's length : ", histogram
-
-cumulative_histogram = p.cumulative_histogram_of_lengths(10)
-print "Cumulative histogram : " ,cumulative_histogram
-
-char_funct_diag = p.characteristic_function_of_diagram(min_max_[0], min_max_[1],None)
-print "Characteristic function of diagram : ",char_funct_diag
-
-cumul_char_funct_diag = p.cumulative_characteristic_function_of_diagram(min_max_[0], min_max_[1],None)
-print "Cumulative characteristic function of diagram : ",cumul_char_funct_diag
-
-pbns = p.compute_persistent_betti_numbers()
-print "Persistence Betti numbers ", pbns
diff --git a/src/cython/example/persistence_representations_landscapes_example.py b/src/cython/example/persistence_representations_landscapes_example.py
deleted file mode 100755
index 94b68225..00000000
--- a/src/cython/example/persistence_representations_landscapes_example.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-
-import gudhi
-
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-print("#####################################################################")
-print("Persistence representations landscapes example")
-
-persistence1 = [(1,2),(6,8),(0,4),(3,8)]
-persistence2 = [(2,9),(1,6),(3,5),(6,10)]
-
-
-#create two persistence landscapes based on persistence1 and persistence2:
-l1 = gudhi.PersistenceLandscapes(vector_of_intervals=persistence1, dimension=3)
-l2 = gudhi.PersistenceLandscapes(vector_of_intervals=persistence2)
-
-#This is how to compute integral of landscapes:
-print "Integral of the first landscape : ", l1.compute_integral_of_landscape()
-print "Integral of the second landscape : ", l2.compute_integral_of_landscape()
-
-#here are the maxima of the functions:
-print "Maximum of l1 : ", l1.compute_maximum()
-print "Maximum of l2 : ", l2.compute_maximum()
-
-#here are the norms of landscapes:
-print "L^1 Norm of l1 : ", l1.compute_norm_of_landscape(1.)
-print "L^1 Norm of l2 : ", l2.compute_norm_of_landscape(1.)
-
-#here is the average of landscapes:
-average = gudhi.PersistenceLandscapes()
-average.compute_average(to_average=[l1, l2])
-
-#here is the distance of landscapes:
-print "Distance : ", l1.distance(average,1)
-
-#here is the scalar product of landscapes:
-print "Scalar product : ", l1.compute_scalar_product(l2)
diff --git a/src/cython/example/persistence_representations_landscapes_on_grid_example.py b/src/cython/example/persistence_representations_landscapes_on_grid_example.py
deleted file mode 100755
index 60b0e873..00000000
--- a/src/cython/example/persistence_representations_landscapes_on_grid_example.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-
-import gudhi
-
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-print("#####################################################################")
-print("Persistence representations landscapes on a grid example")
-
-persistence1 = [(1, 2),(6, 8),(0, 4),(3, 8)]
-persistence2 = [(2, 9),(1, 6),(3, 5),(6, 10)]
-
-#create two persistence landscapes based on persistence1 and persistence2:
-l1 = PersistenceLandscapeOnGrid(persistence1, 0, 11, 20)
-l2 = PersistenceLandscapeOnGrid(persistence2, 0, 11, 20)
-
-#This is how to compute integral of landscapes:
-print "Integral of the first landscape : " , l1.compute_integral_of_landscape()
-print "Integral of the second landscape : " , l2.compute_integral_of_landscape()
-
-#here are the maxima of the functions:
-print "Maximum of l1 : " , l1.compute_maximum()
-print "Maximum of l2 : " , l2.compute_maximum()
-
-#here are the norms of landscapes:
-print "L^1 Norm of l1 : " , l1.compute_norm_of_landscape(1.)
-print "L^1 Norm of l2 : " , l2.compute_norm_of_landscape(1.)
-
-#here is the average of landscapes:
-average = PersistenceLandscapeOnGrid();
-average.compute_average(to_average=[l1, l2]);
-
-
-#here is the distance of landscapes:
-print "Distance : " , l1.distance(l2)
-
-#here is the scalar product of landscapes:
-print "Scalar product : " , l1.compute_scalar_product(l2)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-persistence1 = [(1,2),(6,8),(0,4),(3,8)]
-persistence2 = [(2,9),(1,6),(3,5),(6,10)]
-
-
-#create two persistence landscapes based on persistence1 and persistence2:
-l1 = gudhi.PersistenceLandscapes(vector_of_intervals=persistence1, dimension=3)
-l2 = gudhi.PersistenceLandscapes(vector_of_intervals=persistence2)
-
-#This is how to compute integral of landscapes:
-print "Integral of the first landscape : ", l1.compute_integral_of_landscape()
-print "Integral of the second landscape : ", l2.compute_integral_of_landscape()
-
-#here are the maxima of the functions:
-print "Maximum of l1 : ", l1.compute_maximum()
-print "Maximum of l2 : ", l2.compute_maximum()
-
-#here are the norms of landscapes:
-print "L^1 Norm of l1 : ", l1.compute_norm_of_landscape(1.)
-print "L^1 Norm of l2 : ", l2.compute_norm_of_landscape(1.)
-
-#here is the average of landscapes:
-average = gudhi.PersistenceLandscapes()
-average.compute_average(to_average=[l1, l2])
-
-#here is the distance of landscapes:
-print "Distance : ", l1.distance(average,1)
-
-#here is the scalar product of landscapes:
-print "Scalar product : ", l1.compute_scalar_product(l2)
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
index 984dbf1b..3baebd17 100755
--- a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
+++ b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
@@ -39,6 +39,7 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -61,4 +62,5 @@ print("betti_numbers()=")
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
index 4c21b98e..5951eedf 100755
--- a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -39,6 +39,7 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -64,7 +65,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/rips_persistence_diagram.py b/src/cython/example/rips_persistence_diagram.py
index 4e5cd2c8..9bfea41c 100755
--- a/src/cython/example/rips_persistence_diagram.py
+++ b/src/cython/example/rips_persistence_diagram.py
@@ -39,4 +39,5 @@ simplex_tree = rips.create_simplex_tree(max_dimension=1)
diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0)
print("diag=", diag)
-gudhi.plot_persistence_diagram(diag)
+pplot = gudhi.plot_persistence_diagram(diag)
+pplot.show()
diff --git a/src/cython/example/simplex_tree_example.py b/src/cython/example/simplex_tree_example.py
index 3af20fcf..51a60e73 100755
--- a/src/cython/example/simplex_tree_example.py
+++ b/src/cython/example/simplex_tree_example.py
@@ -48,11 +48,8 @@ if st.insert([0, 1, 2], filtration=4.0):
else:
print("Not inserted...")
-# FIXME: Remove this line
-st.set_dimension(3)
print("dimension=", st.dimension())
-st.set_filtration(4.0)
st.initialize_filtration()
print("filtration=", st.get_filtration())
print("filtration[1, 2]=", st.filtration([1, 2]))
diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
index 4845eb47..6145e7f2 100755
--- a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
+++ b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -37,6 +37,7 @@ parser = argparse.ArgumentParser(description='TangentialComplex creation from '
'- Constructs a tangential complex with the '
'points from the given OFF file')
parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -59,7 +60,8 @@ with open(args.file, 'r') as f:
print(st.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in
index 53f09c4b..a8dd9f80 100644
--- a/src/cython/gudhi.pyx.in
+++ b/src/cython/gudhi.pyx.in
@@ -23,18 +23,19 @@
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 INRIA"
__license__ = "GPL v3"
+__version__ = "@GUDHI_VERSION@"
+# This variable is used by doctest to find files
+__root_source_dir__ = "@CMAKE_SOURCE_DIR@"
-include "cython/off_reader.pyx"
-include "cython/simplex_tree.pyx"
-include "cython/rips_complex.pyx"
-include "cython/cubical_complex.pyx"
-include "cython/periodic_cubical_complex.pyx"
-include "cython/persistence_graphical_tools.py"
-include "cython/witness_complex.pyx"
-include "cython/strong_witness_complex.pyx"
-include "cython/persistence_representations_intervals.pyx"
-include "cython/persistence_representations_landscapes.pyx"
-#include "cython/persistence_representations_landscapes_on_grid.pyx"
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/off_reader.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/rips_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/cubical_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/periodic_cubical_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/persistence_graphical_tools.py'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/reader_utils.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/witness_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx'
@GUDHI_CYTHON_ALPHA_COMPLEX@
@GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX@
@GUDHI_CYTHON_SUBSAMPLING@
diff --git a/src/cython/include/Cubical_complex_interface.h b/src/cython/include/Cubical_complex_interface.h
index 7c0148f1..fad92c2c 100644
--- a/src/cython/include/Cubical_complex_interface.h
+++ b/src/cython/include/Cubical_complex_interface.h
@@ -43,6 +43,12 @@ class Cubical_complex_interface : public Bitmap_cubical_complex<CubicalComplexOp
: Bitmap_cubical_complex<CubicalComplexOptions>(dimensions, top_dimensional_cells) {
}
+ Cubical_complex_interface(const std::vector<unsigned>& dimensions,
+ const std::vector<double>& top_dimensional_cells,
+ const std::vector<bool>& periodic_dimensions)
+ : Bitmap_cubical_complex<CubicalComplexOptions>(dimensions, top_dimensional_cells, periodic_dimensions) {
+ }
+
Cubical_complex_interface(const std::string& perseus_file)
: Bitmap_cubical_complex<CubicalComplexOptions>(perseus_file.c_str()) {
}
diff --git a/src/cython/include/PSSK_interface.h b/src/cython/include/PSSK_interface.h
deleted file mode 100644
index 3b2d336a..00000000
--- a/src/cython/include/PSSK_interface.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2017 Swansea University
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PSSK_INTERFACE_H_
-#define PSSK_INTERFACE__H_
-
-
-#include <gudhi/PSSK.h>
-
-
-namespace Gudhi {
-namespace Persistence_representations {
-
-/**
-* This is a version of a representation presented in https://arxiv.org/abs/1412.6821
-* In that paper the authors are using the representation just to compute kernel. Over here, we extend the usability by
-*far.
-* Note that the version presented here is not exact, since we are discretizing the kernel.
-* The only difference with respect to the original class is the method of creation. We have full (square) image, and for
-*every point (p,q), we add a kernel at (p,q) and the negative kernel
-* at (q,p)
-**/
-
-class PSSK_interface : public PSSK {
- public:
- PSSK_interface(){}
-
- PSSK_interface(const std::vector<std::pair<double, double> >& interval,
- std::vector<std::vector<double> > filter = create_Gaussian_filter(5, 1), size_t number_of_pixels = 1000,
- double min_ = -1, double max_ = -1)
- :
- PSSK(interval,filter,number_of_pixels,min_,max_){}
-
- PSSK_interface(const char* filename, std::vector<std::vector<double> > filter = create_Gaussian_filter(5, 1),
- size_t number_of_pixels = 1000, double min_ = -1, double max_ = -1,
- unsigned dimension = std::numeric_limits<unsigned>::max())
- :PSSK(filename,filter,number_of_pixels,min_,max_,dimension){}
-
-};
-
-} // namespace Persistence_representations
-} // namespace Gudhi
-
-#endif // PSSK_INTERFACE__H_
diff --git a/src/cython/include/Persistence_heat_maps_interface.h b/src/cython/include/Persistence_heat_maps_interface.h
deleted file mode 100644
index fe565313..00000000
--- a/src/cython/include/Persistence_heat_maps_interface.h
+++ /dev/null
@@ -1,156 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2016 INRIA (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PERSISTENCE_HEAT_MAPS_INTERFACE_H_
-#define PERSISTENCE_HEAT_MAPS_INTERFACE_H_
-
-// gudhi include
-#include <gudhi/read_persistence_from_file.h>
-#include <gudhi/common_persistence_representations.h>
-
-// standard include
-#include <vector>
-#include <sstream>
-#include <iostream>
-#include <cmath>
-#include <limits>
-#include <algorithm>
-#include <utility>
-#include <string>
-#include <functional>
-
-namespace Gudhi {
-namespace Persistence_representations {
-
-class Persistence_heat_maps_interface : public Persistence_heat_maps {
- public:
-
- Persistence_heat_maps_interface():Persistence_heat_maps(){}
-
-
- Persistence_heat_maps_interface(const std::vector<std::pair<double, double> >& interval,
- std::vector<std::vector<double> > filter = create_Gaussian_filter(5, 1),
- bool erase_below_diagonal = false, size_t number_of_pixels = 1000,
- double min_ = std::numeric_limits<double>::max(),
- double max_ = std::numeric_limits<double>::max()):
- Persistence_heat_maps(interval,filter,erase_below_diagonal,number_of_pixels,min_max_){}
-
-
- Persistence_heat_maps_interface(const char* filename, std::vector<std::vector<double> > filter = create_Gaussian_filter(5, 1),
- bool erase_below_diagonal = false, size_t number_of_pixels = 1000,
- double min_ = std::numeric_limits<double>::max(),
- double max_ = std::numeric_limits<double>::max(),
- unsigned dimension = std::numeric_limits<unsigned>::max()):
- Persistence_heat_maps(filename,filter,erase_below_diagonal,number_of_pixels,min_,max_,dimension){}
-
- void compute_mean_interface(const std::vector<Persistence_heat_maps*>& maps)
- {
- this->compute_mean(maps);
- }
-
- void compute_median_interface(const std::vector<Persistence_heat_maps*>& maps)
- {
- this->compute_median(maps);
- }
-
- void compute_percentage_of_active_interface(const std::vector<Persistence_heat_maps*>& maps, size_t cutoff = 1)
- {
- this->compute_percentage_of_active(maps,cutoff);
- }
-
- void print_to_file_interface(const char* filename) const
- {
- this->print_to_file(filename);
- }
-
- void load_from_file_interface(const char* filename)
- {
- this->load_from_file( filename );
- }
-
- inline bool check_if_the_same_interface(const Persistence_heat_maps& second) const
- {
- return this->check_if_the_same( second );
- }
-
- inline double get_min_interface() const
- {
- return this->get_min();
- }
-
- inline double get_max_interface() const
- {
- return this->get_max();
- }
-
- std::vector<double> vectorize_interface(int number_of_function) const
- {
- return this->vectorize(number_of_function);
- }
-
- size_t number_of_vectorize_functions_interface() const
- {
- return this->number_of_vectorize_functions();
- }
-
- double project_to_R_interface(int number_of_function) const
- {
- return this->project_to_R( number_of_function );
- }
-
- size_t number_of_projections_to_R_interface() const
- {
- return this->number_of_projections_to_R();
- }
-
- double distance_interface(const Persistence_heat_maps& second_, double power = 1) const
- {
- return this->distance( second, power );
- }
-
- void compute_average_interface(const std::vector<Persistence_heat_maps*>& to_average)
- {
- this->compute_average( to_average );
- }
-
- double compute_scalar_product_interface(const Persistence_heat_maps& second_) const
- {
- return this->compute_scalar_product( second_ );
- }
-
- std::pair<double, double> get_x_range_interface() const
- {
- return this->get_x_range();
- }
-
- std::pair<double, double> get_y_range_interface() const
- {
- return this->get_y_range();
- }
-
-};
-
-
-} // namespace Persistence_representations
-} // namespace Gudhi
-
-#endif // PERSISTENCE_HEAT_MAPS_INTERFACE_H_
diff --git a/src/cython/include/Persistence_intervals_interface.h b/src/cython/include/Persistence_intervals_interface.h
deleted file mode 100644
index c3a3dde7..00000000
--- a/src/cython/include/Persistence_intervals_interface.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2017 Swansea University
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef INCLUDE_PERSISTENCE_REPRESENTATIONS_INTERVALS_
-#define INCLUDE_PERSISTENCE_REPRESENTATIONS_INTERVALS_
-
-#include <gudhi/Persistence_intervals.h>
-
-#include <iostream>
-#include <vector>
-#include <string>
-
-namespace Gudhi {
-
-//But if we want to have the same names of classes in C++ and cyton side we ned this interface, because othervise we will have a name conflict. And we want to have the same names on the
-//C++ and python side for various reasonc (clarity, documentantions etc.).
-//If the C++ class we inherid from are template class, we are inherid from concretization, for instance Persistence_intervals<double>.
-//Also in this class, we create an interface functions that will be used in the python side. That will allow to have the same name of the functions in the C++ and python side.
-
-namespace Persistence_representations {
-
-class Persistence_intervals_interface : public Persistence_intervals
-{
- public:
- Persistence_intervals_interface(const char* filename, unsigned dimension = std::numeric_limits<unsigned>::max())
- : Persistence_intervals(filename, dimension) {
- }
-
- Persistence_intervals_interface(const std::vector<std::pair<double, double> >& intervals)
- : Persistence_intervals(intervals) {
- }
-
-};
-
-} // namespace Persistence_representations
-
-} // namespace Gudhi
-
-#endif // INCLUDE_PERSISTENCE_REPRESENTATIONS_DIAGRAMS_
-
diff --git a/src/cython/include/Persistence_intervals_with_distances_interface.h b/src/cython/include/Persistence_intervals_with_distances_interface.h
deleted file mode 100644
index 05caa14a..00000000
--- a/src/cython/include/Persistence_intervals_with_distances_interface.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/* This file is part of the Gudhi hiLibrary. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2017 Swansea University
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PERSISTENCE_INTERVALS_WITH_DISTANCES_INTERFACE_H_
-#define PERSISTENCE_INTERVALS_WITH_DISTANCES_INTERFACE_H_
-
-#include <gudhi/Persistence_intervals_with_distances.h>
-
-namespace Gudhi {
-namespace Persistence_representations {
-
-class Persistence_intervals_with_distances_interface : public Persistence_intervals_with_distances {
- public:
- double distance_interface(const Persistence_intervals_with_distances& second, double power = std::numeric_limits<double>::max(),
- double tolerance = 0) const
- {
- return this->distance( second, power, tolerance );
- }
-};
-
-} // namespace Persistence_representations
-} // namespace Gudhi
-
-#endif // PERSISTENCE_INTERVALS_WITH_DISTANCES_INTERFACE_H_
diff --git a/src/cython/include/Persistence_landscape_interface.h b/src/cython/include/Persistence_landscape_interface.h
deleted file mode 100644
index 339031d4..00000000
--- a/src/cython/include/Persistence_landscape_interface.h
+++ /dev/null
@@ -1,200 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2017 Swansea University
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PERSISTENCE_LANDSCAPE_INTERFACE_H_
-#define PERSISTENCE_LANDSCAPE_INTERFACE_H_
-
-#include <gudhi/Persistence_landscape.h>
-
-namespace Gudhi {
-namespace Persistence_representations {
-
-
-class Persistence_landscape_interface : public Persistence_landscape
-{
- public:
- Persistence_landscape_interface():Persistence_landscape(){}
-
- Persistence_landscape_interface(const std::vector<std::pair<double, double> >& p, size_t number_of_levels = std::numeric_limits<size_t>::max() ):Persistence_landscape(p,number_of_levels){}
-
- Persistence_landscape_interface(const char* filename, size_t dimension = std::numeric_limits<unsigned>::max() , size_t number_of_levels = std::numeric_limits<size_t>::max() ):Persistence_landscape(filename,dimension,number_of_levels){}
-
- //****************
- static Persistence_landscape_interface* construct_from_file( const char* filename, size_t dimension = std::numeric_limits<unsigned>::max() , size_t number_of_levels = std::numeric_limits<size_t>::max() )
- {
- Persistence_landscape_interface* result = new Persistence_landscape_interface(filename,dimension,number_of_levels);
- return result;
- }
- static Persistence_landscape_interface* construct_from_vector_of_pairs( const std::vector<std::pair<double, double> >& p, size_t number_of_levels = std::numeric_limits<size_t>::max() )
- {
- Persistence_landscape_interface* result = new Persistence_landscape_interface(p,number_of_levels);
- return result;
- }
-
- //****************
-
-
- Persistence_landscape_interface* new_abs_interface()
- {
- return (Persistence_landscape_interface*)this->new_abs();
- }
-
- void new_compute_average(const std::vector<Persistence_landscape_interface*>& to_average)
- {
- std::vector<Persistence_landscape*> to_average_new;
- to_average_new.reserve( to_average.size() );
- for ( size_t i = 0 ; i != to_average.size() ; ++i )
- {
- to_average_new.push_back( (Persistence_landscape*)to_average[i] );
- }
- this->compute_average(to_average_new);
- }
-
-/*
- void load_landscape_from_file_interface(const char* filename)
- {
- this->load_landscape_from_file(filename);
- }
-
-
- void print_to_file_interface(const char* filename) const
- {
- this->print_to_file(filename);
- }
-
-
- double compute_integral_of_landscape_interface() const
- {
- return this->compute_integral_of_landscape();
- }
-
-
- double compute_integral_of_a_level_of_a_landscape_interface(size_t level) const
- {
- return this->compute_integral_of_a_level_of_a_landscape(level);
- }
-
-
- double compute_integral_of_landscape_interface(double p) const
- {
- return this->compute_integral_of_landscape(p);
- }
-
-
- double compute_value_at_a_given_point_interface(unsigned level, double x) const
- {
- return this->compute_value_at_a_given_point(level,x);
- }
-
-
- double compute_maximum_interface() const
- {
- return this->compute_maximum();
- }
-
-
- double compute_minimum_interface() const
- {
- return this->compute_minimum();
- }
-
-
-
- double compute_norm_of_landscape_interface(double i)
- {
- return this->compute_norm_of_landscape(i);
- }
-
-
- Persistence_landscape abs_interface()
- {
- return this->abs();
- }
-
- size_t size_interface() const
- {
- return this->size();
- }
-
- double find_max_interface(unsigned lambda) const
- {
- return this->find_max();
- }
-
- friend double compute_inner_product_interface(const Persistence_landscape& l1, const Persistence_landscape& l2)
- {
- return this->compute_inner_product(l1,l2);
- }
-
- double project_to_R_interface(int number_of_function) const
- {
- return this->project_to_R(number_of_function);
- }
-
-
- size_t number_of_projections_to_R_interface() const
- {
- return this->number_of_projections_to_R();
- }
-
-
- std::vector<double> vectorize_interface(int number_of_function) const
- {
- return this->vectorize( number_of_function );
- }
-
-
- size_t number_of_vectorize_function_interface() const
- {
- return this->number_of_vectorize_function();
- }
-
-
- void compute_average_interface(const std::vector<Persistence_landscape*>& to_average)
- {
- return this->compute_average(to_average);
- }
-
-
- double distance_interface(const Persistence_landscape& second, double power = 1)
- {
- return this->distance( second, power );
- }
-
-
- double compute_scalar_product_interface(const Persistence_landscape& second) const
- {
- return this->compute_scalar_product( second );
- }
-
-
- std::pair<double, double> get_y_range_interface(size_t level = 0) const
- {
- return this->get_y_range( level );
- }
- */
-};
-
-} // namespace Persistence_representations
-} // namespace Gudhi
-
-#endif // PERSISTENCE_LANDSCAPE_INTERFACE_H_
diff --git a/src/cython/include/Persistence_landscape_on_grid_interface.h b/src/cython/include/Persistence_landscape_on_grid_interface.h
deleted file mode 100644
index c8ba9f76..00000000
--- a/src/cython/include/Persistence_landscape_on_grid_interface.h
+++ /dev/null
@@ -1,207 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2017 Swansea University
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PERSISTENCE_LANDSCAPE_ON_GRID_INTERFACE_H_
-#define PERSISTENCE_LANDSCAPE_ON_GRID_INTERFACE_H_
-
-#include <gudhi/Persistence_landscape_on_grid.h>
-
-namespace Gudhi {
-namespace Persistence_representations {
-
-
-class Persistence_landscape_on_grid_interface : public Persistence_landscape_on_grid
-{
- public:
- Persistence_landscape_on_grid_interface():Persistence_landscape_on_grid(){}
-
- Persistence_landscape_on_grid_interface(const std::vector<std::pair<double, double> >& p, double grid_min_, double grid_max_,
- size_t number_of_points_):Persistence_landscape_on_grid(p, grid_min_, grid_max_,number_of_points_){}
-
-
- Persistence_landscape_on_grid_interface(const std::vector<std::pair<double, double> >& p, double grid_min_, double grid_max_,
- size_t number_of_points_, unsigned number_of_levels_of_landscape):
- Persistence_landscape_on_grid(p, grid_min_, grid_max_,number_of_points_, number_of_levels_of_landscape){}
-
-
- Persistence_landscape_on_grid_interface(const char* filename, double grid_min_, double grid_max_, size_t number_of_points_,
- unsigned number_of_levels_of_landscape, uint16_t dimension_ = std::numeric_limits<uint16_t>::max()):
- Persistence_landscape_on_grid(filename, grid_min_, grid_max_, number_of_points_, number_of_levels_of_landscape, dimension_ ){}
-
-
- Persistence_landscape_on_grid_interface(const char* filename, double grid_min_, double grid_max_, size_t number_of_points_,
- uint16_t dimension_ = std::numeric_limits<uint16_t>::max()):Persistence_landscape_on_grid(filename,grid_min_,grid_max_,number_of_points_,dimension_ ){}
-
-
- Persistence_landscape_on_grid_interface(const char* filename, size_t number_of_points, unsigned number_of_levels_of_landscape, uint16_t dimension = std::numeric_limits<uint16_t>::max()):
- Persistence_landscape_on_grid(filename,number_of_points,number_of_levels_of_landscape,dimension){}
-
-
- Persistence_landscape_on_grid_interface(const char* filename, size_t number_of_points, uint16_t dimension = std::numeric_limits<uint16_t>::max()):
- Persistence_landscape_on_grid(filename,number_of_points,dimension){}
-
-
- Persistence_landscape_on_grid_interface* new_abs_interface()
- {
- return (Persistence_landscape_on_grid_interface*)this->new_abs();
- }
-
- void new_compute_average(const std::vector<Persistence_landscape_on_grid_interface*>& to_average)
- {
- std::vector<Persistence_landscape_on_grid*> to_average_new;
- to_average_new.reserve( to_average.size() );
- for ( size_t i = 0 ; i != to_average.size() ; ++i )
- {
- to_average_new.push_back( (Persistence_landscape_on_grid*)to_average[i] );
- }
- this->compute_average(to_average_new);
- }
-
-
-/*
- void load_landscape_from_file_interface(const char* filename)
- {
- this->load_landscape_from_file(filename);
- }
-
-
- void print_to_file_interface(const char* filename) const
- {
- this->print_to_file(filename);
- }
-
-
- double compute_integral_of_landscape_interface() const
- {
- return this->compute_integral_of_landscape();
- }
-
-
- double compute_integral_of_a_level_of_a_landscape_interface(size_t level) const
- {
- return this->compute_integral_of_a_level_of_a_landscape(level);
- }
-
-
- double compute_integral_of_landscape_interface(double p) const
- {
- return this->compute_integral_of_landscape(p);
- }
-
-
- double compute_value_at_a_given_point_interface(unsigned level, double x) const
- {
- return this->compute_value_at_a_given_point(level,x);
- }
-
-
- double compute_maximum_interface() const
- {
- return this->compute_maximum();
- }
-
-
- double compute_minimum_interface() const
- {
- return this->compute_minimum();
- }
-
-
- double compute_norm_of_landscape_interface(double i)
- {
- return this->compute_norm_of_landscape(i);
- }
-
-
- Persistence_landscape abs_interface()
- {
- return this->abs();
- }
-
- size_t size_interface() const
- {
- return this->size();
- }
-
- double find_max_interface(unsigned lambda) const
- {
- return this->find_max();
- }
-
- friend double compute_inner_product_interface(const Persistence_landscape& l1, const Persistence_landscape& l2)
- {
- return this->compute_inner_product(l1,l2);
- }
-
- double project_to_R_interface(int number_of_function) const
- {
- return this->project_to_R(number_of_function);
- }
-
-
- size_t number_of_projections_to_R_interface() const
- {
- return this->number_of_projections_to_R();
- }
-
-
- std::vector<double> vectorize_interface(int number_of_function) const
- {
- return this->vectorize( number_of_function );
- }
-
-
- size_t number_of_vectorize_function_interface() const
- {
- return this->number_of_vectorize_function();
- }
-
-
- void compute_average_interface(const std::vector<Persistence_landscape*>& to_average)
- {
- return this->compute_average(to_average);
- }
-
-
- double distance_interface(const Persistence_landscape& second, double power = 1)
- {
- return this->distance( second, power );
- }
-
-
- double compute_scalar_product_interface(const Persistence_landscape& second) const
- {
- return this->compute_scalar_product( second );
- }
-
-
- std::pair<double, double> get_y_range_interface(size_t level = 0) const
- {
- return this->get_y_range( level );
- }
- */
-};
-
-} // namespace Persistence_representations
-} // namespace Gudhi
-
-#endif // PERSISTENCE_LANDSCAPE_ON_GRID_INTERFACE_H_
diff --git a/src/cython/include/Persistence_vectors_interface.h b/src/cython/include/Persistence_vectors_interface.h
deleted file mode 100644
index 3fc482d7..00000000
--- a/src/cython/include/Persistence_vectors_interface.h
+++ /dev/null
@@ -1,121 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Pawel Dlotko
- *
- * Copyright (C) 2016 INRIA (France)
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PERSISTENCE_VECTORS_INTERFACE_H_
-#define PERSISTENCE_VECTORS_INTERFACE_H_
-
-// gudhi include
-#include <gudhi/persistence_vectors.h>
-s
-namespace Gudhi {
-namespace Persistence_representations {
-
-
-
-template <typename F>
-class Vector_distances_in_diagram_interface : Vector_distances_in_diagram<Euclidean_distance> {
- public:
- Vector_distances_in_diagram_interface():Vector_distances_in_diagram(){}
-
- Vector_distances_in_diagram_interface(const std::vector<std::pair<double, double> >& intervals, size_t where_to_cut):
- Vector_distances_in_diagram(intervals,where_to_cut){}
-
- Vector_distances_in_diagram_interface(const char* filename, size_t where_to_cut,
- unsigned dimension = std::numeric_limits<unsigned>::max()):
- Vector_distances_in_diagram_interface(filename,where_to_cut,dimension){}
-
- inline double vector_in_position_interface(size_t position) const
- {
- return this->vector_in_position(position):
- }
-
- inline size_t size_interface() const
- {
- return this->size();
- }
-
- void write_to_file_interface(const char* filename) const
- {
- this->write_to_file( filename );
- }
-
- void print_to_file_interface(const char* filename) const
- {
- this->print_to_file(filename);
- }
-
- void load_from_file_interface(const char* filename)
- {
- this->load_from_file(filename);
- }
-
- double project_to_R_interface(int number_of_function) const
- {
- return this->project_to_R(number_of_function);
- }
-
- size_t number_of_projections_to_R_interface() const
- {
- return this->number_of_projections_to_R();
- }
-
- std::vector<double> vectorize_interface(int number_of_function) const
- {
- return this->vectorize(number_of_function);
- }
-
- size_t number_of_vectorize_functions_interface() const
- {
- return this->number_of_vectorize_functions();
- }
-
- void compute_average_interface(const std::vector<Vector_distances_in_diagram*>& to_average)
- {
- this->compute_average(to_average);
- }
-
- double distance_interface(const Vector_distances_in_diagram& second, double power = 1) const
- {
- return this->distance(second,power);
- }
-
- double compute_scalar_product_interface(const Vector_distances_in_diagram& second) const
- {
- return this->compute_scalar_product(second);
- }
-
- std::pair<double, double> get_x_range_interface() const
- {
- return this->get_x_range();
- }
-
- std::pair<double, double> get_y_range_interface() const
- {
- return this->get_y_range();
- }
-
-};
-
-} // namespace Persistence_representations
-} // namespace Gudhi
-
-#endif // PERSISTENCE_VECTORS_INTERFACE_H_
diff --git a/src/cython/include/Reader_utils_interface.h b/src/cython/include/Reader_utils_interface.h
new file mode 100644
index 00000000..8ec34f61
--- /dev/null
+++ b/src/cython/include/Reader_utils_interface.h
@@ -0,0 +1,56 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_READER_UTILS_INTERFACE_H_
+#define INCLUDE_READER_UTILS_INTERFACE_H_
+
+#include <gudhi/reader_utils.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+#include <map>
+#include <utility> // for pair<>
+
+namespace Gudhi {
+
+// Redefine functions with a different name in order the original name can be used in the Python version.
+std::vector<std::vector<double>> read_matrix_from_csv_file(const std::string& filename,
+ const char separator = ';') {
+ return read_lower_triangular_matrix_from_csv_file<double>(filename, separator);
+}
+
+inline std::map<int, std::vector<std::pair<double, double>>>
+ read_pers_intervals_grouped_by_dimension(std::string const& filename) {
+ return read_persistence_intervals_grouped_by_dimension(filename);
+}
+
+inline std::vector<std::pair<double, double>>
+ read_pers_intervals_in_dimension(std::string const& filename, int only_this_dim = -1) {
+ return read_persistence_intervals_in_dimension(filename, only_this_dim);
+}
+
+
+} // namespace Gudhi
+
+
+#endif // INCLUDE_READER_UTILS_INTERFACE_H_
diff --git a/src/cython/include/Rips_complex_interface.h b/src/cython/include/Rips_complex_interface.h
index 6d813f4a..02985727 100644
--- a/src/cython/include/Rips_complex_interface.h
+++ b/src/cython/include/Rips_complex_interface.h
@@ -66,11 +66,15 @@ class Rips_complex_interface {
} else {
// Rips construction where values is a distance matrix
Distance_matrix distances =
- read_lower_triangular_matrix_from_csv_file<Simplex_tree_interface<>::Filtration_value>(file_name);
+ Gudhi::read_lower_triangular_matrix_from_csv_file<Simplex_tree_interface<>::Filtration_value>(file_name);
rips_complex_ = new Rips_complex<Simplex_tree_interface<>::Filtration_value>(distances, threshold);
}
}
+ ~Rips_complex_interface() {
+ delete rips_complex_;
+ }
+
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, int dim_max) {
rips_complex_->create_complex(*simplex_tree, dim_max);
simplex_tree->initialize_filtration();
diff --git a/src/cython/include/Simplex_tree_interface.h b/src/cython/include/Simplex_tree_interface.h
index 09e7e992..54a4f824 100644
--- a/src/cython/include/Simplex_tree_interface.h
+++ b/src/cython/include/Simplex_tree_interface.h
@@ -52,6 +52,10 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
return (Base::find(vh) != Base::null_simplex());
}
+ void assign_simplex_filtration(const Simplex& vh, Filtration_value filtration) {
+ Base::assign_filtration(Base::find(vh), filtration);
+ }
+
bool insert(const Simplex& simplex, Filtration_value filtration = 0) {
Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
return (result.second);
diff --git a/src/cython/include/Tangential_complex_interface.h b/src/cython/include/Tangential_complex_interface.h
index 5e9dc0e4..ecf014b3 100644
--- a/src/cython/include/Tangential_complex_interface.h
+++ b/src/cython/include/Tangential_complex_interface.h
@@ -106,8 +106,6 @@ class Tangential_complex_interface {
void create_simplex_tree(Simplex_tree<>* simplex_tree) {
int max_dim = tangential_complex_->create_complex<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>(*simplex_tree);
- // FIXME
- simplex_tree->set_dimension(max_dim);
simplex_tree->initialize_filtration();
}
diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in
index c1a1717a..fefa36bb 100644
--- a/src/cython/setup.py.in
+++ b/src/cython/setup.py.in
@@ -29,7 +29,7 @@ __license__ = "GPL v3"
gudhi = Extension(
"gudhi",
- sources = ['gudhi.pyx',],
+ sources = ['@CMAKE_CURRENT_BINARY_DIR@/gudhi.pyx',],
language = 'c++',
extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@],
extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@],
diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py
index 9a365823..0e81554d 100755
--- a/src/cython/test/test_cubical_complex.py
+++ b/src/cython/test/test_cubical_complex.py
@@ -62,17 +62,17 @@ def test_dimension_or_perseus_file_constructor():
assert cub.__is_defined() == False
assert cub.__is_persistence_defined() == False
-def test_dimension_constructor():
+def test_dimension_simple_constructor():
cub = CubicalComplex(dimensions=[3, 3],
top_dimensional_cells = [1,2,3,4,5,6,7,8,9])
assert cub.__is_defined() == True
assert cub.__is_persistence_defined() == False
- assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float('inf')))]
+ assert cub.persistence() == [(0, (1.0, float('inf')))]
assert cub.__is_persistence_defined() == True
- assert cub.betti_numbers() == [1, 0]
- assert cub.persistent_betti_numbers(0, 1000) == [0, 0]
+ assert cub.betti_numbers() == [1, 0, 0]
+ assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0]
-def test_dimension_constructor():
+def test_dimension_file_constructor():
# Create test file
test_file = open('CubicalOneSphere.txt', 'w')
test_file.write('2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n')
diff --git a/src/cython/test/test_persistence_representations_intervals.py b/src/cython/test/test_persistence_representations_intervals.py
deleted file mode 100755
index a8f3686e..00000000
--- a/src/cython/test/test_persistence_representations_intervals.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import gudhi
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-epsilon = 0.0000005;
-
-def test_check_min_max_function():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- min_max_ = p.get_x_range()
- assert fabs(min_max_[0] - 0.0290362) <= epsilon
- assert fabs(min_max_[1] - 0.994537) <= epsilon
-
-def test_check_length_of_dominant_intervals():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- dominant_ten_intervals_length = p.length_of_dominant_intervals(10)
- dominant_intervals_length_ = [0.862625,0.800893,0.762061,0.756501,0.729367,0.718177,0.708395,0.702844,0.700468,0.622177]
- assert dominant_ten_intervals_length == dominant_intervals_length_
-
-
-def test_check_dominant_intervals():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram");
- ten_dominant_intervals = p.dominant_intervals(10);
- templ = [ (0.114718, 0.977343) , (0.133638, 0.93453) , (0.104599, 0.866659) , (0.149798, 0.906299), (0.247352, 0.976719) , (0.192675, 0.910852) , (0.191836, 0.900231) , (0.284998, 0.987842) , (0.294069, 0.994537), (0.267421, 0.889597)]
- assert fabs(ten_dominant_intervals - templ) <= epsilon
-
-
-def test_check_histogram_of_lengths():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- histogram = p.histogram_of_lengths(10);
- template_histogram = [10,5,3,4,4,3,6,1,7,1,1]
- assert fabs(histogram - template_histogram) <= epsilon
-
-
-def test_check_cumulative_histograms_of_lengths():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- cumulative_histogram = p.cumulative_histogram_of_lengths(10)
- template_cumulative_histogram = [10,15,18,22,26,29,35,36,43,44,45]
- assert fabs(cumulative_histogram - template_cumulative_histogram) <= epsilon
-
-
-def test_check_characteristic_function_of_diagram():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- min_max_ = p.get_x_range();
- char_funct_diag = p.characteristic_function_of_diagram(min_max_[0], min_max_[1]);
- template_char_funct_diag = [0.370665,0.84058,1.24649,1.3664,1.34032,1.31904,1.14076,0.991259,0.800714,0.0676303]
- assert fabs(char_funct_diag - template_char_funct_diag) <= 0.0001
-
-
-def test_check_cumulative_characteristic_function_of_diagram():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- min_max_ = p.get_x_range()
- cumul_char_funct_diag = p.cumulative_characteristic_function_of_diagram(min_max_.first, min_max_.second,None);
- template_char_funct_diag_cumul = [0.370665,1.21125,2.45774,3.82414,5.16446,6.4835,7.62426,8.61552,9.41623,9.48386]
- assert fabs(cumul_char_funct_diag - template_char_funct_diag_cumul) <= 0.0001
-
-
-def test_check_compute_persistent_betti_numbers():
- p = gudhi.PersistenceIntervals(None,None,"data/file_with_diagram")
- pbns = [(0.0290362, 1),(0.0307676, 2),(0.0366312, 3),(0.0544614, 4),(0.0920033, 5),(0.104599, 6),(0.114718, 7),(0.117379, 8),(0.123493, 9),(0.133638, 10)(0.137798, 9),(0.149798, 10),(0.155421, 11),(0.158443, 12)(0.176956, 13),(0.183234, 12),(0.191069, 13),(0.191333, 14),(0.191836, 15),(0.192675, 16),(0.208564, 17),(0.218425, 18),(0.219902, 17),(0.23233, 16),(0.234558, 17),(0.237166, 16),(0.247352, 17),(0.267421, 18),(0.268093, 19),(0.278734, 18),(0.284722, 19),(0.284998, 20),(0.294069, 21),(0.306293, 22),(0.322361, 21),(0.323152, 22),(0.371021, 23),(0.372395, 24),(0.387744, 25),(0.435537, 26),(0.462911, 25),(0.483569, 26),(0.489209, 25),(0.517115, 24),(0.522197, 23),(0.532665, 22),(0.545262, 23),(0.587227, 22),(0.593036, 23),(0.602647, 24),(0.605044, 25),(0.621962, 24),(0.629449, 23),(0.636719, 22),(0.64957, 21),(0.650781, 22),(0.654951, 23),(0.683489, 24),(0.687172, 23),(0.69703, 22),(0.701174, 21),(0.717623, 22),(0.722023, 21),(0.722298, 20),(0.725347, 19),(0.73071, 18),(0.758355, 17),(0.770913, 18),(0.790833, 17),(0.821211, 16),(0.849305, 17),(0.853669, 16),(0.866659, 15),(0.872896, 16),(0.889597, 15),(0.900231, 14),(0.903847, 13),(0.906299, 12),(0.910852, 11),(0.93453, 10),(0.944757, 9),(0.947812, 8),(0.959154, 7),(0.975654, 6),(0.976719, 5),(0.977343, 4),(0.980129, 3),(0.987842, 2),(0.990127, 1),(0.994537, 0)]
- pbns_new = p.compute_persistent_betti_numbers();
- assert fabs(pbns - pbns_new) <= epsilon
-
-
diff --git a/src/cython/test/test_persistence_representations_landscapes.py b/src/cython/test/test_persistence_representations_landscapes.py
deleted file mode 100755
index f5e6f351..00000000
--- a/src/cython/test/test_persistence_representations_landscapes.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import gudhi
-
-"""
- This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-epsilon = 0.0000005;
-
-def test_check_construction_of_landscape():
- p = gudhi.Persistence_landscape("data/file_with_diagram",0)
- q = gudhi.Persistence_landscape()
- q.load_landscape_from_file("data/file_with_landscape_from_file_with_diagram")
- assert p == q
-
-
-def test_check_construction_of_landscape_form_gudhi_style_file():
- p = gudhi.Persistence_landscape("data/persistence_file_with_four_entries_per_line", 1)
- q = gudhi.Persistence_landscape()
- q.load_landscape_from_file("data/persistence_file_with_four_entries_per_line_landscape");
- assert p == q;
-
-def test_check_computations_of_integrals():
- p = gudhi.Persistence_landscape("data/file_with_diagram",0)
- integral = p.compute_integral_of_landscape()
- assert fabs(integral - 2.34992) <= 0.00001
-
-
-def test_check_computations_of_integrals_for_each_level_separatelly():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram");
- p = gudhi.Persistence_landscape(diag)
- integrals_for_different_levels = [0.216432,0.204763,0.188793,0.178856,0.163142,0.155015,0.143046,0.133765,0.123531,0.117393,0.111269,0.104283,0.0941308,0.0811208,0.0679001,0.0580801,0.0489647,0.0407936,0.0342599,0.02896,0.0239881,0.0171792,0.0071511,0.00462067,0.00229033,0.000195296]
- for lv in range(0, len(integrals_for_different_levels)):
- integral = p.compute_integral_of_a_level_of_a_landscape(lv);
- assert fabs(integral - integrals_fir_different_levels[lv]) <= 0.00001
-
-def test_check_computations_of_integrals_of_powers_of_landscape():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag)
- integrals_fir_different_powers = [17.1692,2.34992,0.49857,0.126405,0.0355235]
- for power in range(0,5):
- integral = p.compute_integral_of_landscape(power)
- assert fabs(integral - integrals_fir_different_powers[power]) <= 0.00005
-
-
-def test_check_computations_of_values_on_different_points():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag);
- assert fabs(p.compute_value_at_a_given_point(1, 0.0)) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(1, 0.1) - 0.0692324) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(1, 0.2) - 0.163369) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(1, 0.3) - 0.217115) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(2, 0.0)) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(2, 0.1) - 0.0633688) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(2, 0.2) - 0.122361) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(2, 0.3) - 0.195401) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(3, 0.0)) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(3, 0.1) - 0.0455386) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(3, 0.2) - 0.0954012) <= 0.00001
- assert fabs(p.compute_value_at_a_given_point(3, 0.3) - 0.185282) <= 0.00001
-
-
-def test_check_computations_of_maxima_and_norms():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag)
- second = gudhi.Persistence_landscape()
- second.load_landscape_from_file("data/file_with_landscape_from_file_with_diagram_1")
- sum_ = gudhi.Persistence_landscape()
- sum_ = p + second;
- assert fabs(p.compute_maximum() - 0.431313) <= 0.00001
- assert fabs(p.compute_norm_of_landscape(1) - 2.34992) <= 0.00001
- assert fabs(p.compute_norm_of_landscape(2) - 0.706095) <= 0.00001
- assert fabs(p.compute_norm_of_landscape(3) - 0.501867) <= 0.00001
- assert fabs(compute_distance_of_landscapes(p, sum_, 1) - 27.9323) <= 0.00005
- assert fabs(compute_distance_of_landscapes(p, sum_, 2) - 2.35199) <= 0.00001
-
-
-
-def test_check_default_parameters_of_distances():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag)
- diag1 = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram_1")
- q = gudhi.Persistence_landscape(diag1)
- dist_numeric_limit_max = p.distance(q, sys.float_info.max);
- dist_infinity = p.distance(q, sys.float_info.max);
- assert dist_numeric_limit_max == dist_infinity
-
-
-def test_check_computations_of_averages():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag)
- diag2 = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram_1")
- q = gudhi.Persistence_landscape(diag2)
- av = gudhi.Persistence_landscape()
- av.compute_average({p, q})
- template_average = Persistence_landscape()
- template_averagetemplate_average.load_landscape_from_file("data/average")
- assert template_average == av
-
-
-def test_check_computations_of_distances():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag)
- diag2 = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram_1")
- q = Persistence_landscape(diag2)
- assert fabs(p.distance(q) - 25.5824) <= 0.00005
- assert fabs(p.distance(q, 2) - 2.12636) <= 0.00001
- assert fabs(p.distance(q, sys.float_info.max) - 0.359068) <= 0.00001
-
-
-def test_check_computations_of_scalar_product():
- diag = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram")
- p = gudhi.Persistence_landscape(diag)
- diag2 = read_persistence_intervals_in_one_dimension_from_file("data/file_with_diagram_1")
- q = Persistence_landscape(diag2)
- assert fabs(p.compute_scalar_product(q) - 0.754498) <= 0.00001
-
diff --git a/src/cython/test/test_persistence_representations_landscapes_on_grid.py b/src/cython/test/test_persistence_representations_landscapes_on_grid.py
deleted file mode 100755
index 7a1f2b49..00000000
--- a/src/cython/test/test_persistence_representations_landscapes_on_grid.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import gudhi
-
-"""
- This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Pawel Dlotko
-
- Copyright (C) 2017 Swansea University
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Pawel Dlotko"
-__copyright__ = "Copyright (C) 2017 Swansea University"
-__license__ = "GPL v3"
-
-epsilon = 0.0000005;
-
-
-
-def test_check_construction_of_landscape:
- l = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100,sys.maxsize)
- l.print_to_file("landscape_from_file_with_diagram_1")
- g = gudhi.PersistenceLandscapeOnGrid()
- g.load_landscape_from_file("landscape_from_file_with_diagram_1")
- assert l == g
-
-
-def test_check_construction_of_landscape_using_only_ten_levels:
- number = 10
- l = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100, number)
- g = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100, sys.maxsize)
- for level in range(0,number):
- v1 = l.vectorize(level)
- v2 = g.vectorize(level)
- assert v1 == v2
-
-def test_check_computations_of_integrals:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100, sys.maxsize)
- integral = p.compute_integral_of_landscape()
- assert fabs(integral - 27.343) <= 0.00005
-
-def test_check_computations_of_integrals_for_each_level_separatelly:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100, sys.maxsize)
- integrals_fir_different_levels = [0.241168,0.239276,0.237882,0.235193,0.230115,0.227626,0.226132.0.223643,0.221651,0.220556,0.21727,0.215976,0.213685,0.211993,0.2102,0.208707,0.207014,0.205122,0.204226,0.202633]
- for level in range(0,len(integrals_fir_different_levels))
- integral = p.compute_integral_of_landscape(level);
- assert fabs(integral - integrals_fir_different_levels[level]) <= 0.00005
-
-def test_check_computations_of_integrals_of_powers_of_landscape:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100, sys.maxsize)
- integrals_fir_different_powers = [0.241168,0.239276,0.237882,0.235193,0.23011]
- for power in range(0:5):
- integral = p.compute_integral_of_landscape(power)
- assert fabs(integral - integrals_fir_different_powers[power]) <= 0.00001
-
-def test_check_computations_of_values_on_different_points:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 100, sys.maxsize)
- results_level_0 = [0.00997867,0.0521921,0.104312,0.156432,0.208552,0.260672,0.312792,0.364912,0.417032,0.429237]
- results_level_10 = [7.21433e-05,0.0422135,0.0943335,0.146453,0.198573,0.240715,0.272877,0.324997,0.359232,0.379344]
- double x = 0.0012321;
- double dx = 0.05212;
- for i in range(0,10):
- assert almost_equal(p.compute_value_at_a_given_point(0, x), results_level_0[i]));
- assert almost_equal(p.compute_value_at_a_given_point(10, x), results_level_10[i]));
- x += dx;
-
-def test_check_computations_of_maxima_and_norms:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 0., 1., 100)
- second = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_2", 0., 1., 100)
- assert fabs(p.compute_maximum() - 0.46) <= 0.00001
- assert fabs(p.compute_norm_of_landscape(1) - 27.3373) <= 0.00001
- assert fabs(p.compute_norm_of_landscape(2) - 1.84143) <= 0.00001
- assert fabs(p.compute_norm_of_landscape(3) - 0.927067) <= 0.00001
-
-def test_check_default_parameters_of_distances:
- diag = read_persistence_intervals_in_dimension("data/file_with_diagram")
- p = gudhi.PersistenceLandscapeOnGrid(diag, 0., 1., 100)
- diag1 = read_persistence_intervals_in_dimension("data/file_with_diagram_1")
- q = gudhi.PersistenceLandscapeOnGrid(diag1, 0., 1., 100)
- dist_numeric_limit_max = p.distance(q, sys.maxsize);
- dist_infinity = p.distance(q, sys.maxsize);
- assert dist_numeric_limit_max == dist_infinity
-
-def test_check_computations_of_averages:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram", 0., 1., 100)
- q = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 0., 1., 100)
- av = gudhi.PersistenceLandscapeOnGrid()
- av.compute_average({&p, &q)
-
- template_average = gudhi.PersistenceLandscapeOnGrid()
- template_average.load_landscape_from_file("data/average_on_a_grid")
- assert template_average == av
-
-
-def test_check_computations_of_distances:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram", 0., 1., 10000)
- q = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 0., 1., 10000)
- assert fabs(p.distance(q) - 25.5779) <= 0.00005
- assert fabs(p.distance(q, 2) - 2.04891) <= 0.00001
- assert fabs(p.distance(q, sys.maxsize) - 0.359) <= 0.00001
-
-
-def test_check_computations_of_scalar_product:
- p = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram", 0., 1., 10000)
- q = gudhi.PersistenceLandscapeOnGrid("data/file_with_diagram_1", 0., 1., 10000)
- assert almost_equal(p.compute_scalar_product(q), 0.754367)
diff --git a/src/cython/test/test_reader_utils.py b/src/cython/test/test_reader_utils.py
new file mode 100755
index 00000000..25591fb3
--- /dev/null
+++ b/src/cython/test/test_reader_utils.py
@@ -0,0 +1,88 @@
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+
+def test_non_existing_csv_file():
+ # Try to open a non existing file
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file='pouetpouettralala.toubiloubabdou')
+ assert matrix == []
+
+def test_full_square_distance_matrix_csv_file():
+ # Create test file
+ test_file = open('full_square_distance_matrix.csv', 'w')
+ test_file.write('0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;')
+ test_file.close()
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file="full_square_distance_matrix.csv")
+ assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]]
+
+def test_lower_triangular_distance_matrix_csv_file():
+ # Create test file
+ test_file = open('lower_triangular_distance_matrix.csv', 'w')
+ test_file.write('\n1,\n2,3,\n4,5,6,\n7,8,9,10,')
+ test_file.close()
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file="lower_triangular_distance_matrix.csv", separator=",")
+ assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]]
+
+def test_non_existing_persistence_file():
+ # Try to open a non existing file
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='pouetpouettralala.toubiloubabdou')
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='pouetpouettralala.toubiloubabdou', only_this_dim=1)
+ assert persistence == []
+
+def test_read_persistence_intervals_without_dimension():
+ # Create test file
+ test_file = open('persistence_intervals_without_dimension.pers', 'w')
+ test_file.write('# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf')
+ test_file.close()
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers')
+ assert persistence == [(2.7, 3.7), (9.6, 14.), (34.2, 34.974), (3., float('Inf'))]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers', only_this_dim=0)
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers', only_this_dim=1)
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='persistence_intervals_without_dimension.pers')
+ assert persistence == {-1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float('Inf'))]}
+
+def test_read_persistence_intervals_with_dimension():
+ # Create test file
+ test_file = open('persistence_intervals_with_dimension.pers', 'w')
+ test_file.write('# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf')
+ test_file.close()
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers')
+ assert persistence == [(2.7, 3.7), (9.6, 14.), (34.2, 34.974), (3., float('Inf'))]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=0)
+ assert persistence == [(2.7, 3.7)]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=1)
+ assert persistence == [(9.6, 14.), (3., float('Inf'))]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=2)
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=3)
+ assert persistence == [(34.2, 34.974)]
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='persistence_intervals_with_dimension.pers')
+ assert persistence == {0: [(2.7, 3.7)], 1: [(9.6, 14.0), (3.0, float('Inf'))], 3: [(34.2, 34.974)]}
diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py
index 3ae537e3..6dec5d94 100755
--- a/src/cython/test/test_simplex_tree.py
+++ b/src/cython/test/test_simplex_tree.py
@@ -34,9 +34,13 @@ def test_insertion():
# insert test
assert st.insert([0, 1]) == True
+
+ assert st.dimension() == 1
+
assert st.insert([0, 1, 2], filtration=4.0) == True
- # FIXME: Remove this line
- st.set_dimension(2)
+
+ assert st.dimension() == 2
+
assert st.num_simplices() == 7
assert st.num_vertices() == 3
@@ -53,7 +57,6 @@ def test_insertion():
assert st.find([2, 3]) == False
# filtration test
- st.set_filtration(5.0)
st.initialize_filtration()
assert st.filtration([0, 1, 2]) == 4.0
assert st.filtration([0, 2]) == 4.0
@@ -87,8 +90,9 @@ def test_insertion():
assert st.find([2]) == True
st.initialize_filtration()
- assert st.persistence() == [(1, (4.0, float('inf'))), (0, (0.0, float('inf')))]
+ assert st.persistence(persistence_dim_max = True) == [(1, (4.0, float('inf'))), (0, (0.0, float('inf')))]
assert st.__is_persistence_defined() == True
+
assert st.betti_numbers() == [1, 1]
assert st.persistent_betti_numbers(-0.1, 10000.0) == [0, 0]
assert st.persistent_betti_numbers(0.0, 10000.0) == [1, 0]
@@ -130,3 +134,30 @@ def test_expansion():
([1, 2], 0.5), ([0, 1, 2], 0.5), ([1, 2, 3], 0.5), ([5], 0.6), ([6], 0.6),
([5, 6], 0.6), ([4], 0.7), ([2, 4], 0.7), ([0, 3], 0.8), ([0, 1, 3], 0.8),
([0, 2, 3], 0.8), ([0, 1, 2, 3], 0.8), ([4, 6], 0.9), ([3, 6], 1.0)]
+
+def test_automatic_dimension():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # insert test
+ assert st.insert([0,1,3], filtration=0.5) == True
+ assert st.insert([0,1,2], filtration=1.) == True
+
+ assert st.num_vertices() == 4
+ assert st.num_simplices() == 11
+
+ assert st.dimension() == 2
+ assert st.upper_bound_dimension() == 2
+
+ assert st.prune_above_filtration(0.6) == True
+ assert st.dimension() == 2
+ assert st.upper_bound_dimension() == 2
+
+ st.assign_filtration([0, 1, 3], 0.7)
+ assert st.filtration([0, 1, 3]) == 0.7
+
+ st.remove_maximal_simplex([0, 1, 3])
+ assert st.upper_bound_dimension() == 2
+ assert st.dimension() == 1
+ assert st.upper_bound_dimension() == 1