summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CMakeGUDHIVersion.txt2
-rw-r--r--CMakeLists.txt32
-rw-r--r--data/persistence_diagram/rips_on_tore3D_1307.pers2044
-rw-r--r--data/points/human.COPYRIGHT77
-rwxr-xr-xscripts/metagen.sh15
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h88
-rw-r--r--src/Alpha_complex/example/CMakeLists.txt1
-rw-r--r--src/Alpha_complex/test/CMakeLists.txt1
-rw-r--r--src/Alpha_complex/utilities/CMakeLists.txt1
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp4
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_persistence.cpp2
-rw-r--r--src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp4
-rw-r--r--src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp4
-rw-r--r--src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp4
-rw-r--r--src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp2
-rw-r--r--src/Bitmap_cubical_complex/example/CMakeLists.txt1
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h4
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h2
-rw-r--r--src/Bitmap_cubical_complex/test/CMakeLists.txt1
-rw-r--r--src/Bitmap_cubical_complex/utilities/CMakeLists.txt1
-rw-r--r--src/Bottleneck_distance/benchmark/CMakeLists.txt1
-rw-r--r--src/Bottleneck_distance/example/CMakeLists.txt1
-rw-r--r--src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp2
-rw-r--r--src/Bottleneck_distance/include/gudhi/Bottleneck.h12
-rw-r--r--src/Bottleneck_distance/test/CMakeLists.txt1
-rw-r--r--src/Bottleneck_distance/utilities/CMakeLists.txt1
-rw-r--r--src/CMakeLists.txt42
-rw-r--r--src/Cech_complex/benchmark/CMakeLists.txt12
-rw-r--r--src/Cech_complex/benchmark/cech_complex_benchmark.cpp144
-rw-r--r--src/Cech_complex/concept/SimplicialComplexForCech.h66
-rw-r--r--src/Cech_complex/doc/COPYRIGHT19
-rw-r--r--src/Cech_complex/doc/Intro_cech_complex.h114
-rw-r--r--src/Cech_complex/doc/cech_complex_representation.ipe330
-rw-r--r--src/Cech_complex/doc/cech_complex_representation.pngbin0 -> 39938 bytes
-rw-r--r--src/Cech_complex/doc/cech_one_skeleton.ipe314
-rw-r--r--src/Cech_complex/doc/cech_one_skeleton.pngbin0 -> 24662 bytes
-rw-r--r--src/Cech_complex/example/CMakeLists.txt16
-rw-r--r--src/Cech_complex/example/cech_complex_example_from_points.cpp54
-rw-r--r--src/Cech_complex/example/cech_complex_example_from_points_for_doc.txt31
-rw-r--r--src/Cech_complex/example/cech_complex_step_by_step.cpp166
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex.h130
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex_blocker.h91
-rw-r--r--src/Cech_complex/include/gudhi/Miniball.COPYRIGHT4
-rw-r--r--src/Cech_complex/include/gudhi/Miniball.README26
-rw-r--r--src/Cech_complex/include/gudhi/Miniball.hpp523
-rw-r--r--src/Cech_complex/test/CMakeLists.txt15
-rw-r--r--src/Cech_complex/test/README12
-rw-r--r--src/Cech_complex/test/test_cech_complex.cpp264
-rw-r--r--src/Cech_complex/utilities/CMakeLists.txt14
-rw-r--r--src/Cech_complex/utilities/cech_persistence.cpp136
-rw-r--r--src/Cech_complex/utilities/cechcomplex.md38
-rw-r--r--src/Contraction/example/CMakeLists.txt1
-rw-r--r--src/Doxyfile.in (renamed from src/Doxyfile)36
-rw-r--r--src/GUDHIConfig.cmake.in7
-rw-r--r--src/GudhUI/CMakeLists.txt3
-rw-r--r--src/Nerve_GIC/example/CMakeLists.txt7
-rw-r--r--src/Nerve_GIC/example/CoordGIC.cpp4
-rw-r--r--src/Nerve_GIC/include/gudhi/GIC.h84
-rw-r--r--src/Nerve_GIC/test/CMakeLists.txt1
-rw-r--r--src/Nerve_GIC/utilities/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/example/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_landscape.h59
-rw-r--r--src/Persistence_representations/include/gudhi/read_persistence_from_file.h6
-rw-r--r--src/Persistence_representations/test/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/utilities/persistence_heat_maps/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/utilities/persistence_landscapes_on_grid/CMakeLists.txt1
-rw-r--r--src/Persistence_representations/utilities/persistence_vectors/CMakeLists.txt1
-rw-r--r--src/Persistent_cohomology/benchmark/CMakeLists.txt2
-rw-r--r--src/Persistent_cohomology/concept/FilteredComplex.h34
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt1
-rw-r--r--src/Persistent_cohomology/example/persistence_from_file.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_multifield_persistence.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp2
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h5
-rw-r--r--src/Persistent_cohomology/test/CMakeLists.txt1
-rw-r--r--src/Rips_complex/example/CMakeLists.txt1
-rw-r--r--src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp10
-rw-r--r--src/Rips_complex/include/gudhi/Sparse_rips_complex.h4
-rw-r--r--src/Rips_complex/test/CMakeLists.txt1
-rw-r--r--src/Rips_complex/utilities/CMakeLists.txt1
-rw-r--r--src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp3
-rw-r--r--src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp2
-rw-r--r--src/Rips_complex/utilities/rips_persistence.cpp2
-rw-r--r--src/Rips_complex/utilities/sparse_rips_persistence.cpp2
-rw-r--r--src/Simplex_tree/example/CMakeLists.txt1
-rw-r--r--src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp2
-rw-r--r--src/Simplex_tree/example/graph_expansion_with_blocker.cpp44
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h11
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h10
-rw-r--r--src/Simplex_tree/test/CMakeLists.txt1
-rw-r--r--src/Skeleton_blocker/example/CMakeLists.txt1
-rw-r--r--src/Skeleton_blocker/test/CMakeLists.txt1
-rw-r--r--src/Spatial_searching/example/CMakeLists.txt1
-rw-r--r--src/Spatial_searching/test/CMakeLists.txt1
-rw-r--r--src/Subsampling/example/CMakeLists.txt1
-rw-r--r--src/Subsampling/test/CMakeLists.txt1
-rw-r--r--src/Tangential_complex/benchmark/CMakeLists.txt1
-rw-r--r--src/Tangential_complex/example/CMakeLists.txt1
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h1095
-rw-r--r--src/Tangential_complex/test/CMakeLists.txt1
-rw-r--r--src/Witness_complex/example/CMakeLists.txt1
-rw-r--r--src/Witness_complex/example/generators.h6
-rw-r--r--src/Witness_complex/test/CMakeLists.txt1
-rw-r--r--src/Witness_complex/utilities/CMakeLists.txt1
-rw-r--r--src/Witness_complex/utilities/strong_witness_persistence.cpp2
-rw-r--r--src/Witness_complex/utilities/weak_witness_persistence.cpp2
-rw-r--r--src/cmake/modules/FindCython.cmake44
-rw-r--r--src/cmake/modules/GUDHI_compilation_flags.cmake72
-rw-r--r--src/cmake/modules/GUDHI_doxygen_target.cmake7
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake36
-rw-r--r--src/cmake/modules/GUDHI_user_version_target.cmake171
-rw-r--r--src/common/doc/header.html111
-rw-r--r--src/common/doc/installation.h29
-rw-r--r--src/common/doc/main_page.h20
-rw-r--r--src/common/example/CMakeLists.txt1
-rw-r--r--src/common/include/gudhi/Off_reader.h2
-rw-r--r--src/common/include/gudhi/distance_functions.h50
-rw-r--r--src/common/include/gudhi/graph_simplicial_complex.h6
-rw-r--r--src/common/include/gudhi/random_point_generators.h3
-rw-r--r--src/common/include/gudhi/writing_persistence_to_file.h8
-rw-r--r--src/common/test/CMakeLists.txt1
-rw-r--r--src/common/utilities/CMakeLists.txt1
-rw-r--r--src/cython/CMakeLists.txt232
-rw-r--r--src/cython/cython/cubical_complex.pyx25
-rw-r--r--src/cython/cython/nerve_gic.pyx410
-rw-r--r--src/cython/cython/periodic_cubical_complex.pyx25
-rwxr-xr-xsrc/cython/cython/persistence_graphical_tools.py364
-rw-r--r--src/cython/cython/rips_complex.pyx2
-rw-r--r--src/cython/cython/simplex_tree.pyx41
-rw-r--r--src/cython/cython/subsampling.pyx8
-rw-r--r--src/cython/cython/tangential_complex.pyx15
-rw-r--r--src/cython/doc/_templates/layout.html124
-rw-r--r--src/cython/doc/alpha_complex_ref.rst4
-rw-r--r--src/cython/doc/alpha_complex_sum.inc (renamed from src/cython/doc/alpha_complex_sum.rst)0
-rw-r--r--src/cython/doc/alpha_complex_user.rst12
-rw-r--r--src/cython/doc/bottleneck_distance_sum.inc (renamed from src/cython/doc/bottleneck_distance_sum.rst)0
-rw-r--r--src/cython/doc/bottleneck_distance_user.rst6
-rw-r--r--src/cython/doc/citation.rst4
-rwxr-xr-xsrc/cython/doc/conf.py3
-rw-r--r--src/cython/doc/cubical_complex_ref.rst4
-rw-r--r--src/cython/doc/cubical_complex_sum.inc (renamed from src/cython/doc/cubical_complex_sum.rst)0
-rw-r--r--src/cython/doc/cubical_complex_user.rst5
-rw-r--r--src/cython/doc/euclidean_strong_witness_complex_ref.rst4
-rw-r--r--src/cython/doc/euclidean_witness_complex_ref.rst4
-rw-r--r--src/cython/doc/examples.rst8
-rw-r--r--src/cython/doc/fileformats.rst4
-rw-r--r--src/cython/doc/index.rst23
-rw-r--r--src/cython/doc/installation.rst67
-rw-r--r--src/cython/doc/nerve_gic_complex_ref.rst10
-rw-r--r--src/cython/doc/nerve_gic_complex_sum.rst15
-rw-r--r--src/cython/doc/nerve_gic_complex_user.rst312
-rw-r--r--src/cython/doc/periodic_cubical_complex_ref.rst4
-rw-r--r--src/cython/doc/persistence_graphical_tools_ref.rst5
-rw-r--r--src/cython/doc/persistence_graphical_tools_sum.inc (renamed from src/cython/doc/persistence_graphical_tools_sum.rst)0
-rw-r--r--src/cython/doc/persistence_graphical_tools_user.rst71
-rw-r--r--src/cython/doc/persistent_cohomology_sum.inc (renamed from src/cython/doc/persistent_cohomology_sum.rst)0
-rw-r--r--src/cython/doc/persistent_cohomology_user.rst4
-rwxr-xr-xsrc/cython/doc/pyplots/barcode_persistence.py7
-rwxr-xr-xsrc/cython/doc/pyplots/diagram_persistence.py9
-rwxr-xr-xsrc/cython/doc/pyplots/show_palette_values.py3
-rw-r--r--src/cython/doc/reader_utils_ref.rst4
-rw-r--r--src/cython/doc/rips_complex_ref.rst4
-rw-r--r--src/cython/doc/rips_complex_sum.inc (renamed from src/cython/doc/rips_complex_sum.rst)0
-rw-r--r--src/cython/doc/rips_complex_user.rst6
-rw-r--r--src/cython/doc/simplex_tree_ref.rst4
-rw-r--r--src/cython/doc/simplex_tree_sum.inc (renamed from src/cython/doc/simplex_tree_sum.rst)0
-rw-r--r--src/cython/doc/simplex_tree_user.rst6
-rw-r--r--src/cython/doc/strong_witness_complex_ref.rst4
-rw-r--r--src/cython/doc/tangential_complex_ref.rst4
-rw-r--r--src/cython/doc/tangential_complex_sum.inc (renamed from src/cython/doc/tangential_complex_sum.rst)0
-rw-r--r--src/cython/doc/tangential_complex_user.rst13
-rw-r--r--src/cython/doc/todos.rst4
-rw-r--r--src/cython/doc/witness_complex_ref.rst4
-rw-r--r--src/cython/doc/witness_complex_sum.inc (renamed from src/cython/doc/witness_complex_sum.rst)0
-rw-r--r--src/cython/doc/witness_complex_user.rst6
-rwxr-xr-xsrc/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xsrc/cython/example/coordinate_graph_induced_complex.py68
-rwxr-xr-xsrc/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xsrc/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xsrc/cython/example/functional_graph_induced_complex.py69
-rwxr-xr-xsrc/cython/example/gudhi_graphical_tools_example.py7
-rwxr-xr-xsrc/cython/example/nerve_of_a_covering.py70
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py4
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py4
-rwxr-xr-xsrc/cython/example/rips_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xsrc/cython/example/tangential_complex_plain_homology_from_off_file_example.py9
-rwxr-xr-xsrc/cython/example/voronoi_graph_induced_complex.py65
-rw-r--r--src/cython/gudhi.pyx.in2
-rw-r--r--src/cython/include/Nerve_gic_interface.h61
-rw-r--r--src/cython/include/Persistent_cohomology_interface.h26
-rw-r--r--src/cython/include/Tangential_complex_interface.h13
-rw-r--r--src/cython/setup.py.in1
-rwxr-xr-xsrc/cython/test/test_cover_complex.py92
-rwxr-xr-xsrc/cython/test/test_cubical_complex.py11
-rwxr-xr-xsrc/cython/test/test_simplex_tree.py44
-rwxr-xr-xsrc/cython/test/test_tangential_complex.py2
199 files changed, 7690 insertions, 1677 deletions
diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt
index 5e71f7eb..6811d7e1 100644
--- a/CMakeGUDHIVersion.txt
+++ b/CMakeGUDHIVersion.txt
@@ -1,5 +1,5 @@
set (GUDHI_MAJOR_VERSION 2)
-set (GUDHI_MINOR_VERSION 1)
+set (GUDHI_MINOR_VERSION 2)
set (GUDHI_PATCH_VERSION 0)
set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION})
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 10373f75..afacede9 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,35 +1,20 @@
-cmake_minimum_required(VERSION 2.6)
+cmake_minimum_required(VERSION 3.1)
+
project(GUDHIdev)
include(CMakeGUDHIVersion.txt)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/")
-enable_testing()
-
-# This variable is used by Cython CMakeLists.txt to know its path
+# This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path
set(GUDHI_CYTHON_PATH "src/cython")
-# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
-include(GUDHI_third_party_libraries)
-
-if(MSVC)
- # Turn off some VC++ warnings
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
-else()
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -pedantic")
-endif()
-if(CMAKE_BUILD_TYPE MATCHES Debug)
- message("++ Debug compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
-else()
- message("++ Release compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_RELEASE}")
-endif()
+# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
+include(GUDHI_third_party_libraries NO_POLICY_SCOPE)
-if (DEBUG_TRACES)
- # For programs to be more verbose
- message(STATUS "DEBUG_TRACES are activated")
- add_definitions(-DDEBUG_TRACES)
-endif()
+include(GUDHI_compilation_flags)
+# Only for dev version
+add_cxx_compiler_flag("-pedantic")
# Add your new module in the list, order is not important
include(GUDHI_modules)
@@ -39,6 +24,7 @@ add_gudhi_module(Alpha_complex)
add_gudhi_module(Bitmap_cubical_complex)
add_gudhi_module(Bottleneck_distance)
add_gudhi_module(Contraction)
+add_gudhi_module(Cech_complex)
add_gudhi_module(Hasse_complex)
add_gudhi_module(Persistence_representations)
add_gudhi_module(Persistent_cohomology)
diff --git a/data/persistence_diagram/rips_on_tore3D_1307.pers b/data/persistence_diagram/rips_on_tore3D_1307.pers
new file mode 100644
index 00000000..f1bd600b
--- /dev/null
+++ b/data/persistence_diagram/rips_on_tore3D_1307.pers
@@ -0,0 +1,2044 @@
+2 0.138335 inf
+1 0.104347 inf
+1 0.0983494 inf
+0 0 inf
+0 0 0.122545
+0 0 0.121171
+0 0 0.120964
+0 0 0.12057
+0 0 0.12047
+0 0 0.120414
+0 0 0.119758
+0 0 0.119091
+0 0 0.118893
+0 0 0.118874
+0 0 0.118398
+0 0 0.118204
+0 0 0.118102
+0 0 0.118095
+0 0 0.117908
+0 0 0.117806
+0 0 0.117768
+0 0 0.117751
+0 0 0.117407
+0 0 0.117253
+0 0 0.116946
+0 0 0.116892
+0 0 0.116706
+0 0 0.116528
+0 0 0.116232
+0 0 0.115975
+0 0 0.115683
+0 0 0.115646
+0 0 0.115363
+0 0 0.115251
+0 0 0.115146
+0 0 0.115117
+0 0 0.115023
+0 0 0.114815
+0 0 0.114723
+0 0 0.1147
+0 0 0.114614
+0 0 0.114402
+0 0 0.114275
+0 0 0.114223
+0 0 0.113893
+0 0 0.113764
+0 0 0.113647
+0 0 0.113442
+0 0 0.113398
+0 0 0.113164
+0 0 0.113063
+0 0 0.113049
+0 0 0.112954
+0 0 0.112901
+0 0 0.112891
+0 0 0.112885
+0 0 0.112865
+0 0 0.112826
+0 0 0.112796
+0 0 0.112766
+0 0 0.112608
+0 0 0.112307
+0 0 0.112264
+0 0 0.112184
+0 0 0.112147
+0 0 0.112082
+0 0 0.111801
+0 0 0.111735
+0 0 0.111625
+0 0 0.111604
+0 0 0.111575
+0 0 0.111377
+0 0 0.111343
+0 0 0.111156
+0 0 0.111117
+0 0 0.11106
+0 0 0.111002
+0 0 0.110975
+0 0 0.110881
+0 0 0.11069
+0 0 0.110493
+0 0 0.110475
+0 0 0.110435
+0 0 0.110311
+0 0 0.110289
+0 0 0.110272
+0 0 0.110233
+0 0 0.110208
+0 0 0.110205
+0 0 0.110137
+0 0 0.110108
+0 0 0.109917
+0 0 0.109875
+0 0 0.109867
+0 0 0.109799
+0 0 0.109791
+0 0 0.109754
+0 0 0.109736
+0 0 0.109555
+0 0 0.109497
+0 0 0.109492
+0 0 0.109298
+0 0 0.109242
+0 0 0.109079
+0 0 0.10898
+0 0 0.108948
+0 0 0.108943
+0 0 0.108932
+0 0 0.10893
+0 0 0.108922
+0 0 0.108825
+0 0 0.108627
+0 0 0.10856
+0 0 0.108534
+0 0 0.108511
+0 0 0.108472
+0 0 0.108444
+0 0 0.10836
+0 0 0.108346
+0 0 0.108339
+0 0 0.108289
+0 0 0.108224
+0 0 0.108176
+0 0 0.108162
+0 0 0.108154
+0 0 0.108103
+0 0 0.10808
+0 0 0.108051
+0 0 0.10802
+0 0 0.107985
+0 0 0.107938
+0 0 0.107932
+0 0 0.107859
+0 0 0.107802
+0 0 0.107766
+0 0 0.107761
+0 0 0.107733
+0 0 0.107731
+0 0 0.107725
+0 0 0.107557
+0 0 0.107536
+0 0 0.107535
+0 0 0.107516
+0 0 0.107477
+0 0 0.107471
+0 0 0.107469
+0 0 0.107445
+0 0 0.10744
+0 0 0.107419
+0 0 0.107336
+0 0 0.107313
+0 0 0.107297
+0 0 0.107267
+0 0 0.10717
+0 0 0.107131
+0 0 0.107067
+0 0 0.107039
+0 0 0.10689
+0 0 0.106828
+0 0 0.106744
+0 0 0.10666
+0 0 0.106639
+0 0 0.106637
+0 0 0.106593
+0 0 0.106535
+0 0 0.106455
+0 0 0.106438
+0 0 0.106428
+0 0 0.106402
+0 0 0.106382
+0 0 0.106273
+0 0 0.106187
+0 0 0.106182
+0 0 0.105991
+0 0 0.105911
+0 0 0.105803
+0 0 0.105792
+0 0 0.105763
+0 0 0.105749
+0 0 0.105725
+0 0 0.105672
+0 0 0.105622
+0 0 0.105605
+0 0 0.105558
+0 0 0.105484
+0 0 0.105439
+0 0 0.105415
+0 0 0.105389
+0 0 0.105369
+0 0 0.105346
+0 0 0.105339
+0 0 0.1053
+0 0 0.105274
+0 0 0.105206
+0 0 0.105187
+0 0 0.105133
+0 0 0.105093
+0 0 0.105088
+0 0 0.105083
+0 0 0.10505
+0 0 0.105021
+0 0 0.105015
+0 0 0.104978
+0 0 0.10494
+0 0 0.104898
+0 0 0.104883
+0 0 0.104836
+0 0 0.104832
+0 0 0.104746
+0 0 0.104722
+0 0 0.104718
+0 0 0.104692
+0 0 0.10468
+0 0 0.104641
+0 0 0.104638
+0 0 0.104527
+0 0 0.104507
+0 0 0.104477
+0 0 0.10447
+0 0 0.104422
+0 0 0.104414
+0 0 0.104411
+0 0 0.10434
+0 0 0.104314
+0 0 0.104286
+0 0 0.104274
+0 0 0.10427
+0 0 0.104222
+0 0 0.104206
+0 0 0.104201
+0 0 0.104185
+0 0 0.10416
+0 0 0.10415
+0 0 0.104114
+0 0 0.104113
+0 0 0.104099
+0 0 0.103988
+0 0 0.103974
+0 0 0.103946
+0 0 0.103925
+0 0 0.103897
+0 0 0.103852
+0 0 0.103724
+0 0 0.103667
+0 0 0.103651
+0 0 0.103641
+0 0 0.103637
+0 0 0.103574
+0 0 0.103557
+0 0 0.103536
+0 0 0.103516
+0 0 0.103469
+0 0 0.103456
+0 0 0.10344
+0 0 0.103427
+0 0 0.103421
+0 0 0.103376
+0 0 0.103358
+0 0 0.103266
+0 0 0.103263
+0 0 0.103249
+0 0 0.103192
+0 0 0.103156
+0 0 0.103092
+0 0 0.103081
+0 0 0.103058
+0 0 0.102872
+0 0 0.102835
+0 0 0.102805
+0 0 0.102768
+0 0 0.102758
+0 0 0.102752
+0 0 0.102741
+0 0 0.102739
+0 0 0.102715
+0 0 0.102696
+0 0 0.10265
+0 0 0.102553
+0 0 0.102486
+0 0 0.102486
+0 0 0.102467
+0 0 0.102441
+0 0 0.102363
+0 0 0.102346
+0 0 0.102298
+0 0 0.102266
+0 0 0.102253
+0 0 0.102217
+0 0 0.102193
+0 0 0.102191
+0 0 0.10216
+0 0 0.102117
+0 0 0.102031
+0 0 0.102007
+0 0 0.101945
+0 0 0.101907
+0 0 0.101896
+0 0 0.101855
+0 0 0.101847
+0 0 0.101843
+0 0 0.101833
+0 0 0.101822
+0 0 0.101821
+0 0 0.101809
+0 0 0.101699
+0 0 0.10155
+0 0 0.101538
+0 0 0.101493
+0 0 0.101468
+0 0 0.101447
+0 0 0.101403
+0 0 0.101392
+0 0 0.101377
+0 0 0.101373
+0 0 0.101355
+0 0 0.101341
+0 0 0.101286
+0 0 0.10122
+0 0 0.101184
+0 0 0.101126
+0 0 0.101084
+0 0 0.101084
+0 0 0.101072
+0 0 0.100981
+0 0 0.100929
+0 0 0.100921
+0 0 0.100844
+0 0 0.100805
+0 0 0.100797
+0 0 0.100772
+0 0 0.100749
+0 0 0.100749
+0 0 0.100679
+0 0 0.10064
+0 0 0.100635
+0 0 0.100613
+0 0 0.100599
+0 0 0.10052
+0 0 0.100512
+0 0 0.100462
+0 0 0.100399
+0 0 0.100346
+0 0 0.100324
+0 0 0.100276
+0 0 0.100269
+0 0 0.10026
+0 0 0.100249
+0 0 0.100173
+0 0 0.100143
+0 0 0.100132
+0 0 0.100016
+0 0 0.100004
+0 0 0.0999805
+0 0 0.0999609
+0 0 0.0999313
+0 0 0.0999304
+0 0 0.0998899
+0 0 0.099884
+0 0 0.0998646
+0 0 0.0998279
+0 0 0.0997317
+0 0 0.0997064
+0 0 0.0996974
+0 0 0.0996745
+0 0 0.0996681
+0 0 0.0996236
+0 0 0.0995811
+0 0 0.099502
+0 0 0.0994573
+0 0 0.0993738
+0 0 0.0993579
+0 0 0.0993118
+0 0 0.0993115
+0 0 0.099307
+0 0 0.099301
+0 0 0.0992841
+0 0 0.0992493
+0 0 0.0992196
+0 0 0.0992118
+0 0 0.0992002
+0 0 0.0991901
+0 0 0.0991748
+0 0 0.0991699
+0 0 0.0991355
+0 0 0.099127
+0 0 0.0991122
+0 0 0.0991061
+0 0 0.0991026
+0 0 0.0990777
+0 0 0.0990577
+0 0 0.0990106
+0 0 0.0989608
+0 0 0.0989511
+0 0 0.0989433
+0 0 0.0989068
+0 0 0.0989061
+0 0 0.0989017
+0 0 0.0988136
+0 0 0.0987844
+0 0 0.0987155
+0 0 0.0987137
+0 0 0.0987027
+0 0 0.0986656
+0 0 0.0986589
+0 0 0.0985188
+0 0 0.0985127
+0 0 0.0984978
+0 0 0.0984845
+0 0 0.0984384
+0 0 0.0984361
+0 0 0.0984105
+0 0 0.0983949
+0 0 0.0983794
+0 0 0.0983099
+0 0 0.0983022
+0 0 0.0982802
+0 0 0.098261
+0 0 0.0982605
+0 0 0.0982498
+0 0 0.0982427
+0 0 0.0982118
+0 0 0.0981918
+0 0 0.0981537
+0 0 0.0981249
+0 0 0.0981003
+0 0 0.0980403
+0 0 0.0980052
+0 0 0.0979611
+0 0 0.0979211
+0 0 0.0979103
+0 0 0.0978291
+0 0 0.0977825
+0 0 0.0977791
+0 0 0.0977691
+0 0 0.0977678
+0 0 0.0977566
+0 0 0.0977458
+0 0 0.0977308
+0 0 0.0976979
+0 0 0.0976947
+0 0 0.0976571
+0 0 0.0976316
+0 0 0.097628
+0 0 0.0976173
+0 0 0.097617
+0 0 0.0976144
+0 0 0.097594
+0 0 0.0975906
+0 0 0.0975423
+0 0 0.0975044
+0 0 0.0974811
+0 0 0.0974776
+0 0 0.09746
+0 0 0.0974517
+0 0 0.0974161
+0 0 0.097411
+0 0 0.0974102
+0 0 0.0973811
+0 0 0.0973704
+0 0 0.097361
+0 0 0.0973594
+0 0 0.0973475
+0 0 0.097347
+0 0 0.0973354
+0 0 0.0972601
+0 0 0.0972476
+0 0 0.0972407
+0 0 0.097222
+0 0 0.0971562
+0 0 0.097079
+0 0 0.0970225
+0 0 0.0970099
+0 0 0.0969931
+0 0 0.0969318
+0 0 0.0968345
+0 0 0.096738
+0 0 0.0967069
+0 0 0.0966177
+0 0 0.0966121
+0 0 0.0966075
+0 0 0.0965669
+0 0 0.0965524
+0 0 0.0965467
+0 0 0.0965306
+0 0 0.0964752
+0 0 0.0964273
+0 0 0.0964214
+0 0 0.0964044
+0 0 0.0963393
+0 0 0.0962953
+0 0 0.0962618
+0 0 0.0962443
+0 0 0.0962396
+0 0 0.0962072
+0 0 0.0961728
+0 0 0.0961412
+0 0 0.0960799
+0 0 0.0960719
+0 0 0.0960553
+0 0 0.0959975
+0 0 0.0959885
+0 0 0.0959874
+0 0 0.0959671
+0 0 0.0959571
+0 0 0.0959249
+0 0 0.0958821
+0 0 0.0958582
+0 0 0.0958079
+0 0 0.095807
+0 0 0.0957505
+0 0 0.0957349
+0 0 0.095686
+0 0 0.0956663
+0 0 0.0956426
+0 0 0.0955825
+0 0 0.095551
+0 0 0.095509
+0 0 0.0954092
+0 0 0.0954003
+0 0 0.0953929
+0 0 0.0953089
+0 0 0.0953083
+0 0 0.0952959
+0 0 0.0952952
+0 0 0.0952427
+0 0 0.0951664
+0 0 0.0951311
+0 0 0.0951128
+0 0 0.0950144
+0 0 0.0949971
+0 0 0.0949351
+0 0 0.0948798
+0 0 0.0948727
+0 0 0.0947763
+0 0 0.0947681
+0 0 0.0947623
+0 0 0.0947217
+0 0 0.0946984
+0 0 0.0946978
+0 0 0.0946832
+0 0 0.0946395
+0 0 0.0946177
+0 0 0.0945759
+0 0 0.0945205
+0 0 0.0944995
+0 0 0.0944882
+0 0 0.0944571
+0 0 0.0944524
+0 0 0.0944435
+0 0 0.0944357
+0 0 0.0943982
+0 0 0.0943872
+0 0 0.094384
+0 0 0.0943324
+0 0 0.0942979
+0 0 0.094255
+0 0 0.0942476
+0 0 0.0942305
+0 0 0.0941642
+0 0 0.094092
+0 0 0.0940547
+0 0 0.0940288
+0 0 0.0939997
+0 0 0.09392
+0 0 0.0938552
+0 0 0.0937687
+0 0 0.0936632
+0 0 0.0936529
+0 0 0.0936278
+0 0 0.0936091
+0 0 0.093571
+0 0 0.0935084
+0 0 0.0935022
+0 0 0.0934953
+0 0 0.0934922
+0 0 0.0934884
+0 0 0.0933871
+0 0 0.093311
+0 0 0.0932948
+0 0 0.0932911
+0 0 0.0932711
+0 0 0.0932653
+0 0 0.0932573
+0 0 0.0932562
+0 0 0.0932506
+0 0 0.0932066
+0 0 0.0932042
+0 0 0.0931597
+0 0 0.0931377
+0 0 0.0931191
+0 0 0.0930766
+0 0 0.0930576
+0 0 0.0930453
+0 0 0.0930159
+0 0 0.0928905
+0 0 0.0928695
+0 0 0.0928598
+0 0 0.0928533
+0 0 0.0928206
+0 0 0.0926988
+0 0 0.0926135
+0 0 0.0925997
+0 0 0.0925371
+0 0 0.0924703
+0 0 0.0924285
+0 0 0.0924272
+0 0 0.0924161
+0 0 0.0924104
+0 0 0.0922615
+0 0 0.0922589
+0 0 0.0922246
+0 0 0.0922069
+0 0 0.0921781
+0 0 0.0921505
+0 0 0.0921288
+0 0 0.0921087
+0 0 0.092054
+0 0 0.0920027
+0 0 0.091992
+0 0 0.0919474
+0 0 0.0919331
+0 0 0.0918839
+0 0 0.0918463
+0 0 0.091819
+0 0 0.0918129
+0 0 0.0917888
+0 0 0.0917854
+0 0 0.0917808
+0 0 0.0917657
+0 0 0.0917037
+0 0 0.0916728
+0 0 0.09166
+0 0 0.0916551
+0 0 0.0916386
+0 0 0.0915873
+0 0 0.091561
+0 0 0.0915438
+0 0 0.0915334
+0 0 0.0914912
+0 0 0.09148
+0 0 0.0914736
+0 0 0.0914634
+0 0 0.0913736
+0 0 0.0913735
+0 0 0.0913423
+0 0 0.0912014
+0 0 0.0912006
+0 0 0.0911837
+0 0 0.0911558
+0 0 0.0911005
+0 0 0.0910993
+0 0 0.0910305
+0 0 0.0910083
+0 0 0.0910021
+0 0 0.0909993
+0 0 0.0909587
+0 0 0.0909123
+0 0 0.0909078
+0 0 0.0908981
+0 0 0.0908775
+0 0 0.0908673
+0 0 0.0907541
+0 0 0.0907067
+0 0 0.0906992
+0 0 0.090662
+0 0 0.0905038
+0 0 0.0904268
+0 0 0.0904089
+0 0 0.0903719
+0 0 0.0903532
+0 0 0.0902778
+0 0 0.090237
+0 0 0.0902248
+0 0 0.0901963
+0 0 0.090193
+0 0 0.090185
+0 0 0.0901586
+0 0 0.0901274
+0 0 0.0900876
+0 0 0.0900408
+0 0 0.0900098
+0 0 0.0900073
+0 0 0.0899733
+0 0 0.0899145
+0 0 0.0898825
+0 0 0.0898324
+0 0 0.0898248
+0 0 0.089792
+0 0 0.0897916
+0 0 0.089778
+0 0 0.0897022
+0 0 0.0896718
+0 0 0.0896671
+0 0 0.0895692
+0 0 0.0895243
+0 0 0.0895045
+0 0 0.0894882
+0 0 0.0894629
+0 0 0.0893629
+0 0 0.0893336
+0 0 0.0893309
+0 0 0.0893193
+0 0 0.0892099
+0 0 0.089175
+0 0 0.0891563
+0 0 0.0891518
+0 0 0.0890728
+0 0 0.0890717
+0 0 0.0890423
+0 0 0.0890158
+0 0 0.0890093
+0 0 0.0890021
+0 0 0.0889876
+0 0 0.0889287
+0 0 0.0889174
+0 0 0.0888924
+0 0 0.0888761
+0 0 0.0888682
+0 0 0.0887979
+0 0 0.0887824
+0 0 0.0887418
+0 0 0.0887001
+0 0 0.0886274
+0 0 0.0886194
+0 0 0.0886061
+0 0 0.0885183
+0 0 0.0885029
+0 0 0.0884997
+0 0 0.0884676
+0 0 0.0884379
+0 0 0.0884268
+0 0 0.0884101
+0 0 0.0883594
+0 0 0.0883318
+0 0 0.0883027
+0 0 0.0882989
+0 0 0.0882955
+0 0 0.088251
+0 0 0.0882503
+0 0 0.0881693
+0 0 0.0881399
+0 0 0.0881344
+0 0 0.088097
+0 0 0.0880887
+0 0 0.0880663
+0 0 0.0880612
+0 0 0.0880102
+0 0 0.0879131
+0 0 0.0878652
+0 0 0.08786
+0 0 0.087834
+0 0 0.0878146
+0 0 0.087757
+0 0 0.0877406
+0 0 0.0876805
+0 0 0.0876786
+0 0 0.0876403
+0 0 0.087596
+0 0 0.0875844
+0 0 0.0875718
+0 0 0.0875645
+0 0 0.087518
+0 0 0.0874942
+0 0 0.0874353
+0 0 0.0874336
+0 0 0.0873725
+0 0 0.087351
+0 0 0.087348
+0 0 0.0872871
+0 0 0.0872623
+0 0 0.0872584
+0 0 0.0872349
+0 0 0.0871851
+0 0 0.0871834
+0 0 0.0870855
+0 0 0.0870338
+0 0 0.0870283
+0 0 0.0869914
+0 0 0.0869755
+0 0 0.0869607
+0 0 0.0869219
+0 0 0.0869096
+0 0 0.0868956
+0 0 0.0868514
+0 0 0.0868374
+0 0 0.0868301
+0 0 0.0867937
+0 0 0.0867655
+0 0 0.0867603
+0 0 0.0867516
+0 0 0.0867123
+0 0 0.0866914
+0 0 0.0866306
+0 0 0.0866296
+0 0 0.0866279
+0 0 0.0866149
+0 0 0.0866046
+0 0 0.086589
+0 0 0.0865754
+0 0 0.0865427
+0 0 0.0865283
+0 0 0.0865053
+0 0 0.0864874
+0 0 0.0864538
+0 0 0.0864228
+0 0 0.0864194
+0 0 0.0863696
+0 0 0.0862914
+0 0 0.0862708
+0 0 0.0862707
+0 0 0.0862618
+0 0 0.0862079
+0 0 0.0861181
+0 0 0.0861139
+0 0 0.0860775
+0 0 0.0859588
+0 0 0.0858682
+0 0 0.0858302
+0 0 0.0858208
+0 0 0.085786
+0 0 0.0857713
+0 0 0.0857462
+0 0 0.0857051
+0 0 0.0855555
+0 0 0.0855326
+0 0 0.0854601
+0 0 0.0854086
+0 0 0.0853601
+0 0 0.0853515
+0 0 0.0853294
+0 0 0.0852582
+0 0 0.085249
+0 0 0.0851983
+0 0 0.0851962
+0 0 0.0851742
+0 0 0.0851614
+0 0 0.0851573
+0 0 0.0851292
+0 0 0.0850985
+0 0 0.0850658
+0 0 0.0850454
+0 0 0.0849863
+0 0 0.0849704
+0 0 0.0849174
+0 0 0.0849149
+0 0 0.084807
+0 0 0.0847976
+0 0 0.0847651
+0 0 0.0847603
+0 0 0.0847507
+0 0 0.084721
+0 0 0.0847015
+0 0 0.0846691
+0 0 0.0846272
+0 0 0.0846259
+0 0 0.084625
+0 0 0.0845387
+0 0 0.0845105
+0 0 0.0844777
+0 0 0.0844517
+0 0 0.0844504
+0 0 0.0844359
+0 0 0.0844267
+0 0 0.0843912
+0 0 0.0843866
+0 0 0.0843828
+0 0 0.0843443
+0 0 0.0843409
+0 0 0.0843398
+0 0 0.0843247
+0 0 0.0842665
+0 0 0.0842532
+0 0 0.0842054
+0 0 0.0841559
+0 0 0.0841412
+0 0 0.08408
+0 0 0.0840751
+0 0 0.0839919
+0 0 0.0839865
+0 0 0.0839348
+0 0 0.083926
+0 0 0.0839201
+0 0 0.0838728
+0 0 0.0838659
+0 0 0.083832
+0 0 0.0838253
+0 0 0.0838225
+0 0 0.08381
+0 0 0.0837678
+0 0 0.0837502
+0 0 0.0836721
+0 0 0.0834546
+0 0 0.083452
+0 0 0.0834329
+0 0 0.0834192
+0 0 0.0834107
+0 0 0.0833158
+0 0 0.0832953
+0 0 0.0832912
+0 0 0.0832852
+0 0 0.0832816
+0 0 0.0831875
+0 0 0.0831751
+0 0 0.0831381
+0 0 0.08313
+0 0 0.0830867
+0 0 0.0830303
+0 0 0.0830191
+0 0 0.0829953
+0 0 0.0829902
+0 0 0.082963
+0 0 0.0829545
+0 0 0.0829356
+0 0 0.0829183
+0 0 0.0829129
+0 0 0.082855
+0 0 0.0828445
+0 0 0.0828289
+0 0 0.0827917
+0 0 0.0827618
+0 0 0.0827603
+0 0 0.0826968
+0 0 0.0826829
+0 0 0.0826788
+0 0 0.0825669
+0 0 0.0825437
+0 0 0.0825384
+0 0 0.0825341
+0 0 0.0824971
+0 0 0.0824907
+0 0 0.0824755
+0 0 0.0824587
+0 0 0.0824499
+0 0 0.0823912
+0 0 0.0823653
+0 0 0.082329
+0 0 0.0822893
+0 0 0.0822666
+0 0 0.0822373
+0 0 0.0822316
+0 0 0.0822187
+0 0 0.0820697
+0 0 0.082068
+0 0 0.0820532
+0 0 0.0820261
+0 0 0.0820089
+0 0 0.0819164
+0 0 0.0818598
+0 0 0.0818388
+0 0 0.0818334
+0 0 0.0817903
+0 0 0.0817481
+0 0 0.0816978
+0 0 0.0816822
+0 0 0.0816791
+0 0 0.0816385
+0 0 0.0816314
+0 0 0.0816145
+0 0 0.0816071
+0 0 0.0816029
+0 0 0.0815479
+0 0 0.0815381
+0 0 0.0815247
+0 0 0.0815176
+0 0 0.0815082
+0 0 0.0814955
+0 0 0.081407
+0 0 0.0813536
+0 0 0.0812334
+0 0 0.0811714
+0 0 0.0811689
+0 0 0.0811418
+0 0 0.0811346
+0 0 0.0811328
+0 0 0.0811056
+0 0 0.0811001
+0 0 0.0810472
+0 0 0.0810287
+0 0 0.0810215
+0 0 0.0809856
+0 0 0.0809801
+0 0 0.0809611
+0 0 0.0808721
+0 0 0.0808622
+0 0 0.0808618
+0 0 0.0808543
+0 0 0.0807894
+0 0 0.0807893
+0 0 0.0807705
+0 0 0.080715
+0 0 0.0806896
+0 0 0.0805033
+0 0 0.0804637
+0 0 0.0803715
+0 0 0.0803676
+0 0 0.0803405
+0 0 0.08034
+0 0 0.0803392
+0 0 0.0803318
+0 0 0.0803184
+0 0 0.080315
+0 0 0.0803084
+0 0 0.0802989
+0 0 0.0802988
+0 0 0.080297
+0 0 0.0802781
+0 0 0.0802677
+0 0 0.0802418
+0 0 0.0802358
+0 0 0.0802204
+0 0 0.080121
+0 0 0.0800679
+0 0 0.0800673
+0 0 0.0800411
+0 0 0.0800223
+0 0 0.0799564
+0 0 0.0799555
+0 0 0.0799524
+0 0 0.0799261
+0 0 0.0799149
+0 0 0.0798384
+0 0 0.0798239
+0 0 0.0798187
+0 0 0.079787
+0 0 0.0796839
+0 0 0.0796206
+0 0 0.0795918
+0 0 0.0795738
+0 0 0.0795622
+0 0 0.0795417
+0 0 0.0795402
+0 0 0.07953
+0 0 0.07946
+0 0 0.0794184
+0 0 0.0793805
+0 0 0.0793321
+0 0 0.0791912
+0 0 0.0791848
+0 0 0.0791774
+0 0 0.0790722
+0 0 0.0790089
+0 0 0.0789809
+0 0 0.0789498
+0 0 0.0789257
+0 0 0.0788734
+0 0 0.0788599
+0 0 0.0787641
+0 0 0.0787246
+0 0 0.0785622
+0 0 0.0785047
+0 0 0.078495
+0 0 0.0784875
+0 0 0.0784244
+0 0 0.0784187
+0 0 0.0783668
+0 0 0.0783633
+0 0 0.0782278
+0 0 0.0781832
+0 0 0.0781777
+0 0 0.0781513
+0 0 0.0781258
+0 0 0.0780305
+0 0 0.0780201
+0 0 0.0780181
+0 0 0.0780073
+0 0 0.077985
+0 0 0.0779695
+0 0 0.077938
+0 0 0.0778787
+0 0 0.0778179
+0 0 0.0777929
+0 0 0.0777845
+0 0 0.0777642
+0 0 0.0776513
+0 0 0.0776244
+0 0 0.0776135
+0 0 0.0775219
+0 0 0.0774934
+0 0 0.0774764
+0 0 0.0774541
+0 0 0.0774391
+0 0 0.0774203
+0 0 0.0773458
+0 0 0.0773391
+0 0 0.0773344
+0 0 0.0773236
+0 0 0.0773215
+0 0 0.0771755
+0 0 0.0771752
+0 0 0.0771671
+0 0 0.0770662
+0 0 0.0770659
+0 0 0.0769936
+0 0 0.076986
+0 0 0.0768987
+0 0 0.0768589
+0 0 0.0768001
+0 0 0.0767999
+0 0 0.0767277
+0 0 0.07672
+0 0 0.0767033
+0 0 0.0766285
+0 0 0.0766201
+0 0 0.0765619
+0 0 0.07653
+0 0 0.0764597
+0 0 0.0764593
+0 0 0.0762673
+0 0 0.0761308
+0 0 0.0760972
+0 0 0.0760373
+0 0 0.0760352
+0 0 0.0760226
+0 0 0.0759521
+0 0 0.0758866
+0 0 0.0758453
+0 0 0.075843
+0 0 0.0757932
+0 0 0.0757831
+0 0 0.0755611
+0 0 0.0755108
+0 0 0.075429
+0 0 0.0754205
+0 0 0.0753804
+0 0 0.0753703
+0 0 0.0753166
+0 0 0.075056
+0 0 0.0750491
+0 0 0.075046
+0 0 0.0749483
+0 0 0.0749391
+0 0 0.074843
+0 0 0.0748208
+0 0 0.0748122
+0 0 0.0747848
+0 0 0.0747133
+0 0 0.0746998
+0 0 0.074675
+0 0 0.0746619
+0 0 0.0746273
+0 0 0.0746011
+0 0 0.0745912
+0 0 0.0744812
+0 0 0.0744204
+0 0 0.0743043
+0 0 0.0742461
+0 0 0.074126
+0 0 0.0740759
+0 0 0.0740404
+0 0 0.0740253
+0 0 0.0739941
+0 0 0.0739699
+0 0 0.0738437
+0 0 0.0737842
+0 0 0.0737689
+0 0 0.073745
+0 0 0.0737311
+0 0 0.0735169
+0 0 0.0734621
+0 0 0.0733397
+0 0 0.073219
+0 0 0.0732121
+0 0 0.0731369
+0 0 0.0730708
+0 0 0.072856
+0 0 0.0728459
+0 0 0.0728339
+0 0 0.0727474
+0 0 0.0727014
+0 0 0.0725563
+0 0 0.0725443
+0 0 0.0724734
+0 0 0.0724154
+0 0 0.0721943
+0 0 0.0720927
+0 0 0.071931
+0 0 0.0719287
+0 0 0.0717993
+0 0 0.0717989
+0 0 0.0716285
+0 0 0.0715618
+0 0 0.0714544
+0 0 0.0713816
+0 0 0.0713583
+0 0 0.0713392
+0 0 0.0712466
+0 0 0.0712151
+0 0 0.071201
+0 0 0.0710418
+0 0 0.0709326
+0 0 0.0708223
+0 0 0.0707306
+0 0 0.0706176
+0 0 0.0703058
+0 0 0.0702881
+0 0 0.0702202
+0 0 0.0701399
+0 0 0.0695755
+0 0 0.0695132
+0 0 0.0694435
+0 0 0.069298
+0 0 0.0692617
+0 0 0.069201
+0 0 0.0691416
+0 0 0.0691246
+0 0 0.0691224
+0 0 0.0689761
+0 0 0.0687513
+0 0 0.068751
+0 0 0.0684475
+0 0 0.0681675
+0 0 0.0680444
+0 0 0.067827
+0 0 0.0677432
+0 0 0.0676582
+0 0 0.0676575
+0 0 0.0674522
+0 0 0.0674039
+0 0 0.0673475
+0 0 0.067117
+0 0 0.0668301
+0 0 0.0667495
+0 0 0.0666906
+0 0 0.066585
+0 0 0.0665527
+0 0 0.0665475
+0 0 0.0663859
+0 0 0.0663762
+0 0 0.0659211
+0 0 0.065897
+0 0 0.0658765
+0 0 0.0658305
+0 0 0.065485
+0 0 0.0654654
+0 0 0.0654504
+0 0 0.0653854
+0 0 0.06519
+0 0 0.0651654
+0 0 0.0651053
+0 0 0.0651005
+0 0 0.0649342
+0 0 0.0647983
+0 0 0.0645611
+0 0 0.064503
+0 0 0.0643878
+0 0 0.0641541
+0 0 0.0640333
+0 0 0.0639737
+0 0 0.0637212
+0 0 0.0635186
+0 0 0.063326
+0 0 0.0632864
+0 0 0.0632523
+0 0 0.0631945
+0 0 0.0630431
+0 0 0.0630282
+0 0 0.0628496
+0 0 0.0628005
+0 0 0.0627994
+0 0 0.0624745
+0 0 0.062401
+0 0 0.0623043
+0 0 0.0622939
+0 0 0.0620624
+0 0 0.0619763
+0 0 0.0619092
+0 0 0.0617949
+0 0 0.0616653
+0 0 0.0613634
+0 0 0.0611641
+0 0 0.0611428
+0 0 0.0610917
+0 0 0.0609721
+0 0 0.0609721
+0 0 0.060968
+0 0 0.0608642
+0 0 0.0608431
+0 0 0.0607373
+0 0 0.0603037
+0 0 0.0602398
+0 0 0.060237
+0 0 0.0601804
+0 0 0.0596339
+0 0 0.0595326
+0 0 0.0590727
+0 0 0.0589673
+0 0 0.0588864
+0 0 0.058629
+0 0 0.0586267
+0 0 0.0586052
+0 0 0.058033
+0 0 0.0578023
+0 0 0.0577624
+0 0 0.0576869
+0 0 0.0576773
+0 0 0.0576394
+0 0 0.057415
+0 0 0.057225
+0 0 0.0567939
+0 0 0.0567896
+0 0 0.0564245
+0 0 0.0563586
+0 0 0.0561217
+0 0 0.0560577
+0 0 0.0559755
+0 0 0.0557566
+0 0 0.0556232
+0 0 0.055521
+0 0 0.0550926
+0 0 0.0543985
+1 0.0997444 0.137724
+1 0.0963675 0.134298
+1 0.0997164 0.136077
+1 0.0993225 0.134439
+1 0.100243 0.135175
+1 0.102608 0.137141
+1 0.0988401 0.132691
+1 0.103142 0.136828
+1 0.100554 0.134097
+1 0.099464 0.13294
+1 0.0868897 0.119904
+1 0.0986767 0.131216
+1 0.102063 0.134272
+1 0.101951 0.133649
+1 0.104507 0.136174
+1 0.104789 0.136318
+1 0.100679 0.131807
+1 0.0979544 0.128526
+1 0.0992865 0.12979
+1 0.10505 0.135444
+1 0.106493 0.13644
+1 0.104331 0.13427
+1 0.0887233 0.116997
+1 0.100333 0.128453
+1 0.103249 0.130847
+1 0.0928315 0.12026
+1 0.103681 0.131101
+1 0.102419 0.129772
+1 0.107467 0.134504
+1 0.106298 0.13332
+1 0.0863482 0.113056
+1 0.0876685 0.114328
+1 0.102973 0.129426
+1 0.105968 0.132336
+1 0.101557 0.127648
+1 0.103991 0.129943
+1 0.0896578 0.115598
+1 0.102713 0.128567
+1 0.0943757 0.119823
+1 0.107541 0.132986
+1 0.0940415 0.119416
+1 0.106706 0.131892
+1 0.102134 0.127276
+1 0.08196 0.107085
+1 0.0971671 0.122111
+1 0.105478 0.130109
+1 0.0868236 0.111189
+1 0.0946673 0.118694
+1 0.101082 0.125066
+1 0.104529 0.128397
+1 0.113264 0.137008
+1 0.093598 0.117279
+1 0.102309 0.125859
+1 0.11137 0.134915
+1 0.109647 0.133169
+1 0.0961717 0.119526
+1 0.0914649 0.11455
+1 0.104092 0.126992
+1 0.110014 0.13285
+1 0.0969412 0.119714
+1 0.096505 0.119274
+1 0.107733 0.130472
+1 0.105091 0.127774
+1 0.107621 0.130186
+1 0.101354 0.12387
+1 0.0972194 0.119734
+1 0.111868 0.134382
+1 0.103895 0.126399
+1 0.105818 0.128237
+1 0.093991 0.11639
+1 0.106408 0.128753
+1 0.103344 0.125641
+1 0.0985647 0.120776
+1 0.107818 0.130023
+1 0.105326 0.127497
+1 0.108424 0.130451
+1 0.104609 0.126592
+1 0.105401 0.127105
+1 0.108989 0.130636
+1 0.109277 0.130788
+1 0.109884 0.131386
+1 0.096265 0.117692
+1 0.0973152 0.118724
+1 0.0978033 0.11914
+1 0.105971 0.127172
+1 0.105206 0.126174
+1 0.103094 0.123862
+1 0.0930364 0.1137
+1 0.0912215 0.111759
+1 0.105776 0.126283
+1 0.109117 0.129577
+1 0.11185 0.132307
+1 0.103348 0.123757
+1 0.112671 0.132855
+1 0.0873389 0.10751
+1 0.113148 0.133301
+1 0.107831 0.127932
+1 0.0942185 0.114302
+1 0.105423 0.125481
+1 0.100447 0.120461
+1 0.11413 0.134098
+1 0.103867 0.123829
+1 0.109305 0.129215
+1 0.111781 0.131688
+1 0.115776 0.135588
+1 0.0866676 0.106473
+1 0.0820686 0.101833
+1 0.109769 0.129516
+1 0.109777 0.129457
+1 0.0969575 0.116617
+1 0.111919 0.131476
+1 0.0974989 0.117008
+1 0.112238 0.131725
+1 0.0996257 0.119081
+1 0.101161 0.120602
+1 0.0894888 0.108793
+1 0.105021 0.124292
+1 0.110994 0.13024
+1 0.11702 0.136226
+1 0.108008 0.127172
+1 0.109482 0.128451
+1 0.108245 0.127174
+1 0.0942135 0.11312
+1 0.0992471 0.118131
+1 0.106623 0.125504
+1 0.0871873 0.106063
+1 0.108132 0.126863
+1 0.0987099 0.117422
+1 0.105374 0.124068
+1 0.114406 0.133036
+1 0.0914967 0.109997
+1 0.106521 0.1249
+1 0.0955759 0.113921
+1 0.111101 0.129415
+1 0.105122 0.122988
+1 0.091267 0.10908
+1 0.110432 0.127976
+1 0.088384 0.105787
+1 0.112786 0.130153
+1 0.106171 0.123531
+1 0.11442 0.13177
+1 0.109894 0.127229
+1 0.0937333 0.111036
+1 0.106992 0.124248
+1 0.0969028 0.114151
+1 0.116183 0.133233
+1 0.111736 0.128771
+1 0.0993216 0.116354
+1 0.0918067 0.108784
+1 0.104158 0.121134
+1 0.121028 0.137945
+1 0.119786 0.13669
+1 0.101491 0.118338
+1 0.0900619 0.106861
+1 0.105091 0.121844
+1 0.110906 0.12756
+1 0.0942669 0.110777
+1 0.0936016 0.110107
+1 0.103974 0.120299
+1 0.0903266 0.106619
+1 0.109739 0.126022
+1 0.101771 0.117974
+1 0.113957 0.130135
+1 0.104949 0.121106
+1 0.113859 0.129981
+1 0.107266 0.123375
+1 0.106434 0.122441
+1 0.0825495 0.0985038
+1 0.0928646 0.108775
+1 0.10811 0.124013
+1 0.115498 0.131352
+1 0.0857711 0.101566
+1 0.109411 0.12513
+1 0.0992369 0.114868
+1 0.104979 0.120588
+1 0.0928687 0.108436
+1 0.115035 0.130513
+1 0.10634 0.121795
+1 0.0972824 0.112736
+1 0.10508 0.120494
+1 0.0983725 0.11376
+1 0.101863 0.117236
+1 0.108938 0.124301
+1 0.107762 0.123121
+1 0.107407 0.122532
+1 0.102095 0.11715
+1 0.0926056 0.107647
+1 0.102122 0.117157
+1 0.111668 0.126692
+1 0.118187 0.133023
+1 0.091849 0.106675
+1 0.0955763 0.110398
+1 0.0897714 0.104589
+1 0.10026 0.115026
+1 0.117548 0.132301
+1 0.102181 0.116898
+1 0.1039 0.118592
+1 0.111494 0.126118
+1 0.117851 0.132466
+1 0.102311 0.116874
+1 0.104558 0.11907
+1 0.0826206 0.0971206
+1 0.0983518 0.112851
+1 0.108266 0.122713
+1 0.117257 0.13169
+1 0.112625 0.127054
+1 0.102561 0.116956
+1 0.105435 0.119828
+1 0.105814 0.120097
+1 0.110893 0.125174
+1 0.106181 0.120423
+1 0.10824 0.122466
+1 0.0998914 0.114008
+1 0.111463 0.125547
+1 0.106653 0.120712
+1 0.114143 0.128187
+1 0.0987227 0.112686
+1 0.10959 0.123535
+1 0.0866352 0.100557
+1 0.112146 0.126068
+1 0.088279 0.102
+1 0.110376 0.124047
+1 0.101789 0.115338
+1 0.106953 0.120376
+1 0.102707 0.116075
+1 0.0993793 0.112687
+1 0.108878 0.122123
+1 0.120961 0.134152
+1 0.10885 0.122037
+1 0.10738 0.12056
+1 0.101432 0.114597
+1 0.112405 0.125556
+1 0.106813 0.119921
+1 0.112285 0.12529
+1 0.11012 0.123063
+1 0.107717 0.120611
+1 0.0897068 0.102559
+1 0.0987529 0.111589
+1 0.0884634 0.101236
+1 0.0911157 0.103871
+1 0.11179 0.124441
+1 0.119124 0.131689
+1 0.114039 0.126565
+1 0.099306 0.111821
+1 0.109672 0.122139
+1 0.0967874 0.109252
+1 0.110451 0.122915
+1 0.10042 0.112872
+1 0.109867 0.122175
+1 0.11559 0.127875
+1 0.0924112 0.104671
+1 0.109943 0.122122
+1 0.0834784 0.0956025
+1 0.094677 0.106789
+1 0.0944022 0.106494
+1 0.104009 0.116052
+1 0.115406 0.127413
+1 0.0997344 0.111634
+1 0.0834597 0.0953123
+1 0.100265 0.112109
+1 0.108783 0.120624
+1 0.102596 0.114393
+1 0.105933 0.117699
+1 0.0955911 0.10734
+1 0.0915704 0.103284
+1 0.106757 0.11847
+1 0.108034 0.119659
+1 0.107063 0.118624
+1 0.102709 0.114217
+1 0.114347 0.125851
+1 0.101592 0.113064
+1 0.10639 0.117794
+1 0.109961 0.12135
+1 0.106373 0.117724
+1 0.111847 0.123153
+1 0.109539 0.120825
+1 0.0937078 0.104977
+1 0.106527 0.117767
+1 0.115889 0.127068
+1 0.108483 0.119661
+1 0.113189 0.124325
+1 0.106832 0.11787
+1 0.0875251 0.0984842
+1 0.115665 0.126551
+1 0.0929162 0.103716
+1 0.103776 0.114372
+1 0.0975538 0.108061
+1 0.107443 0.117928
+1 0.108814 0.119261
+1 0.103151 0.11358
+1 0.0967477 0.107154
+1 0.104979 0.115384
+1 0.101284 0.111574
+1 0.104981 0.115252
+1 0.0866825 0.0968263
+1 0.112361 0.122466
+1 0.108466 0.118492
+1 0.0975612 0.107561
+1 0.104563 0.114561
+1 0.0854395 0.095418
+1 0.0931692 0.10309
+1 0.107751 0.117652
+1 0.109986 0.119829
+1 0.107501 0.117327
+1 0.104998 0.114782
+1 0.107751 0.117509
+1 0.10754 0.117241
+1 0.111665 0.121357
+1 0.111614 0.121288
+1 0.0872065 0.0968618
+1 0.115761 0.125399
+1 0.10581 0.115426
+1 0.0991297 0.108708
+1 0.104736 0.114301
+1 0.103519 0.112982
+1 0.118659 0.12812
+1 0.107261 0.116706
+1 0.110161 0.119539
+1 0.108064 0.11741
+1 0.107143 0.116483
+1 0.108131 0.117452
+1 0.116427 0.125679
+1 0.0961462 0.105394
+1 0.112569 0.121769
+1 0.0927918 0.101983
+1 0.109636 0.118819
+1 0.0955058 0.104585
+1 0.114088 0.12316
+1 0.111901 0.120969
+1 0.121982 0.131005
+1 0.110988 0.119947
+1 0.0783357 0.0872192
+1 0.120473 0.129355
+1 0.0921671 0.101035
+1 0.119236 0.128003
+1 0.099794 0.108554
+1 0.111357 0.120079
+1 0.11281 0.121516
+1 0.109016 0.117706
+1 0.0931245 0.101783
+1 0.0878912 0.0964983
+1 0.102065 0.110672
+1 0.114585 0.123166
+1 0.110329 0.118883
+1 0.113325 0.121811
+1 0.101143 0.109586
+1 0.119504 0.127904
+1 0.0955487 0.103878
+1 0.0911453 0.0994428
+1 0.120339 0.128623
+1 0.12142 0.129703
+1 0.086379 0.0945977
+1 0.121228 0.129444
+1 0.103027 0.111225
+1 0.113614 0.1218
+1 0.108909 0.116988
+1 0.110248 0.118274
+1 0.0849678 0.0929893
+1 0.113829 0.121818
+1 0.0936312 0.101612
+1 0.102472 0.110429
+1 0.0868612 0.0948107
+1 0.114083 0.121935
+1 0.107052 0.114866
+1 0.105241 0.113053
+1 0.107277 0.114788
+1 0.115088 0.122587
+1 0.0829082 0.0903731
+1 0.10558 0.113044
+1 0.11145 0.118882
+1 0.110181 0.117593
+1 0.114964 0.122358
+1 0.0853302 0.0927109
+1 0.108171 0.115549
+1 0.106645 0.114018
+1 0.103232 0.110597
+1 0.106894 0.114255
+1 0.10519 0.11254
+1 0.127885 0.135225
+1 0.103913 0.111224
+1 0.10035 0.107651
+1 0.110201 0.1175
+1 0.0962617 0.103558
+1 0.108197 0.115486
+1 0.111458 0.11871
+1 0.098521 0.105772
+1 0.122219 0.129425
+1 0.0893661 0.0965408
+1 0.107871 0.115033
+1 0.104818 0.11198
+1 0.114927 0.122087
+1 0.120229 0.127389
+1 0.123751 0.130863
+1 0.0867811 0.0938895
+1 0.108725 0.115806
+1 0.111298 0.118332
+1 0.116132 0.123053
+1 0.102178 0.109074
+1 0.114549 0.121432
+1 0.110523 0.117281
+1 0.117735 0.124481
+1 0.0884368 0.0950995
+1 0.107416 0.114045
+1 0.106531 0.113154
+1 0.103014 0.109636
+1 0.10813 0.114736
+1 0.11639 0.122984
+1 0.109105 0.115698
+1 0.093397 0.0999793
+1 0.115742 0.122324
+1 0.118376 0.124898
+1 0.101104 0.107606
+1 0.115044 0.121535
+1 0.113476 0.119883
+1 0.105748 0.112141
+1 0.109104 0.115492
+1 0.125917 0.132265
+1 0.111914 0.118232
+1 0.125883 0.132169
+1 0.121077 0.127362
+1 0.115773 0.122013
+1 0.104313 0.110543
+1 0.118365 0.124566
+1 0.102092 0.108248
+1 0.120337 0.126478
+1 0.114541 0.120675
+1 0.0992553 0.105374
+1 0.105628 0.111687
+1 0.0850423 0.0910966
+1 0.103692 0.109734
+1 0.120771 0.126797
+1 0.119856 0.125882
+1 0.102024 0.108023
+1 0.119626 0.125622
+1 0.104521 0.110516
+1 0.0915391 0.0974011
+1 0.114983 0.12084
+1 0.110716 0.11656
+1 0.106244 0.112004
+1 0.107854 0.1136
+1 0.0903226 0.0960447
+1 0.120372 0.126087
+1 0.10324 0.108948
+1 0.122694 0.128337
+1 0.1117 0.117306
+1 0.119211 0.124804
+1 0.110743 0.116312
+1 0.117496 0.123039
+1 0.113227 0.118763
+1 0.108715 0.114232
+1 0.107506 0.113022
+1 0.116576 0.122077
+1 0.112014 0.11751
+1 0.121067 0.126503
+1 0.10271 0.10807
+1 0.117238 0.122588
+1 0.112664 0.117961
+1 0.110963 0.116251
+1 0.121302 0.126577
+1 0.112077 0.117336
+1 0.095596 0.100812
+1 0.111411 0.116605
+1 0.0902511 0.0954238
+1 0.114381 0.119466
+1 0.0998193 0.104886
+1 0.0918736 0.0969192
+1 0.0884121 0.0934282
+1 0.110972 0.115973
+1 0.108334 0.113302
+1 0.102994 0.107957
+1 0.122498 0.127409
+1 0.113511 0.118395
+1 0.0906824 0.0955612
+1 0.0908176 0.0956598
+1 0.104894 0.109707
+1 0.116777 0.12158
+1 0.108034 0.112829
+1 0.119415 0.12418
+1 0.119605 0.124356
+1 0.11511 0.119826
+1 0.10408 0.108782
+1 0.0893248 0.0940033
+1 0.109139 0.113811
+1 0.0869492 0.0916174
+1 0.11488 0.119522
+1 0.108902 0.113507
+1 0.102023 0.106573
+1 0.106527 0.111071
+1 0.1117 0.116244
+1 0.108254 0.11277
+1 0.111407 0.115914
+1 0.0906087 0.0950758
+1 0.106504 0.11095
+1 0.123911 0.128263
+1 0.10015 0.10448
+1 0.100373 0.104665
+1 0.104751 0.109037
+1 0.114767 0.119022
+1 0.11427 0.118524
+1 0.0873467 0.091541
+1 0.120737 0.124923
+1 0.101735 0.105869
+1 0.117308 0.121437
+1 0.0949366 0.0990612
+1 0.110445 0.114566
+1 0.111496 0.115581
+1 0.107981 0.112018
+1 0.0836944 0.0877279
+1 0.0869282 0.0909341
+1 0.126548 0.13054
+1 0.119075 0.123055
+1 0.116128 0.120086
+1 0.101842 0.10579
+1 0.113183 0.117117
+1 0.118439 0.122358
+1 0.103742 0.107651
+1 0.115295 0.119183
+1 0.103151 0.107026
+1 0.102092 0.105948
+1 0.108527 0.112373
+1 0.0988934 0.102714
+1 0.105175 0.108975
+1 0.0900176 0.0937998
+1 0.124104 0.127877
+1 0.108098 0.111867
+1 0.0927389 0.0964599
+1 0.0950363 0.09874
+1 0.103675 0.107361
+1 0.106655 0.110301
+1 0.109679 0.113325
+1 0.115272 0.118888
+1 0.115989 0.119557
+1 0.121905 0.125397
+1 0.10312 0.106565
+1 0.108113 0.111554
+1 0.121869 0.125284
+1 0.102783 0.106188
+1 0.103888 0.107291
+1 0.115296 0.118678
+1 0.10563 0.108996
+1 0.106472 0.109814
+1 0.11275 0.116076
+1 0.0845606 0.0878851
+1 0.119448 0.122766
+1 0.104633 0.107941
+1 0.109935 0.113234
+1 0.112474 0.115747
+1 0.091723 0.0949822
+1 0.0844931 0.0877142
+1 0.103435 0.106632
+1 0.0991778 0.102339
+1 0.0920068 0.0951646
+1 0.112581 0.115727
+1 0.100722 0.10379
+1 0.0926894 0.0957161
+1 0.0988585 0.101883
+1 0.112133 0.115093
+1 0.100392 0.103342
+1 0.121192 0.124101
+1 0.127297 0.130205
+1 0.124283 0.127125
+1 0.118629 0.121461
+1 0.120033 0.122863
+1 0.106227 0.109042
+1 0.0877192 0.0905291
+1 0.121117 0.123921
+1 0.1073 0.110085
+1 0.102226 0.104966
+1 0.0913487 0.094036
+1 0.121988 0.124653
+1 0.111379 0.114023
+1 0.118327 0.120956
+1 0.127402 0.129993
+1 0.0865545 0.0891376
+1 0.109444 0.112021
+1 0.116611 0.119173
+1 0.0898924 0.092445
+1 0.104545 0.107074
+1 0.106897 0.109384
+1 0.121135 0.123621
+1 0.108985 0.111453
+1 0.118284 0.120727
+1 0.106249 0.108667
+1 0.12219 0.1246
+1 0.122067 0.124462
+1 0.124465 0.126835
+1 0.112932 0.1153
+1 0.11878 0.121147
+1 0.118673 0.121016
+1 0.125604 0.127906
+1 0.102935 0.105231
+1 0.095249 0.0975133
+1 0.120558 0.122822
+1 0.0989733 0.101209
+1 0.107301 0.10953
+1 0.0928034 0.0950099
+1 0.114216 0.11637
+1 0.118923 0.121075
+1 0.0865676 0.0887081
+1 0.111848 0.113959
+1 0.104268 0.106377
+1 0.120826 0.1229
+1 0.105936 0.108007
+1 0.118976 0.121027
+1 0.110054 0.112099
+1 0.123635 0.125677
+1 0.112406 0.114439
+1 0.11012 0.112134
+1 0.0884612 0.0904603
+1 0.111737 0.113728
+1 0.113357 0.115347
+1 0.108096 0.110081
+1 0.108259 0.110219
+1 0.128584 0.130535
+1 0.10709 0.109027
+1 0.100268 0.102179
+1 0.122237 0.124142
+1 0.100079 0.101982
+1 0.0967714 0.0986662
+1 0.113603 0.115487
+1 0.0912266 0.0930879
+1 0.117149 0.119006
+1 0.110169 0.112011
+1 0.109301 0.111132
+1 0.118604 0.120405
+1 0.117961 0.11976
+1 0.104397 0.106158
+1 0.118609 0.120336
+1 0.0832201 0.0849465
+1 0.0797153 0.0814197
+1 0.0990505 0.100753
+1 0.10596 0.107658
+1 0.0998188 0.101476
+1 0.105913 0.10757
+1 0.113531 0.115182
+1 0.0967926 0.098443
+1 0.0940843 0.0957039
+1 0.114661 0.116274
+1 0.087046 0.0886275
+1 0.0916777 0.0932535
+1 0.123024 0.124591
+1 0.0875995 0.089149
+1 0.110778 0.112327
+1 0.116115 0.117637
+1 0.128015 0.129522
+1 0.11174 0.113234
+1 0.0897331 0.0912275
+1 0.0941751 0.0956475
+1 0.108018 0.109483
+1 0.0974017 0.0988631
+1 0.0933664 0.0947723
+1 0.111466 0.11286
+1 0.11841 0.11978
+1 0.114989 0.116311
+1 0.120067 0.121385
+1 0.113528 0.114839
+1 0.104843 0.106117
+1 0.111298 0.112507
+1 0.0987946 0.100003
+1 0.116609 0.117793
+1 0.0907966 0.0919788
+1 0.116691 0.117873
+1 0.105971 0.107138
+1 0.108219 0.109383
+1 0.107818 0.108969
+1 0.110756 0.111896
+1 0.107855 0.108977
+1 0.111159 0.112271
+1 0.106634 0.107745
+1 0.104271 0.105379
+1 0.114413 0.115445
+1 0.114315 0.115337
+1 0.0967034 0.0977021
+1 0.113498 0.114496
+1 0.105644 0.106638
+1 0.123591 0.124579
+1 0.108805 0.109775
+1 0.108044 0.109001
+1 0.123861 0.124794
+1 0.110087 0.111007
+1 0.0955589 0.0964736
+1 0.102733 0.103612
+1 0.104046 0.104922
+1 0.0924699 0.0933413
+1 0.115285 0.116112
+1 0.0954201 0.09623
+1 0.0855219 0.0863159
+1 0.0967526 0.0975336
+1 0.105675 0.106446
+1 0.0900959 0.0908643
+1 0.105807 0.10657
+1 0.118205 0.118962
+1 0.117617 0.118373
+1 0.114137 0.114887
+1 0.114842 0.115581
+1 0.10114 0.101879
+1 0.103445 0.104161
+1 0.0991235 0.0998339
+1 0.0972796 0.0979348
+1 0.102682 0.103332
+1 0.0938035 0.0944286
+1 0.111262 0.11188
+1 0.111618 0.112153
+1 0.0997941 0.100297
+1 0.120843 0.121314
+1 0.114265 0.114734
+1 0.10137 0.101832
+1 0.108491 0.108953
+1 0.0991029 0.099559
+1 0.102937 0.10339
+1 0.118778 0.119221
+1 0.107189 0.107626
+1 0.101957 0.102357
+1 0.0924087 0.0928051
+1 0.101168 0.101555
+1 0.109878 0.110253
+1 0.101449 0.101791
+1 0.0965111 0.0968328
+1 0.0997832 0.100102
+1 0.116346 0.11663
+1 0.125517 0.125757
+1 0.125432 0.125671
+1 0.111594 0.111803
+1 0.10228 0.102465
+1 0.110192 0.110371
+1 0.0993469 0.0995118
+1 0.104171 0.104324
+1 0.0807541 0.0808958
+1 0.10346 0.103585
+1 0.127331 0.127396
+1 0.0945151 0.0945558
+1 0.104771 0.104801
+1 0.103878 0.103908
+1 0.11169 0.111695
diff --git a/data/points/human.COPYRIGHT b/data/points/human.COPYRIGHT
new file mode 100644
index 00000000..cb9bdb59
--- /dev/null
+++ b/data/points/human.COPYRIGHT
@@ -0,0 +1,77 @@
+The human.off point cloud is available at this webpage :
+http://segeval.cs.princeton.edu/
+
+LICENSE
+=======
+
+AIM@SHAPE General License for Shapes
+Applicable terms
+----------------
+
+This is the general AIM@SHAPE license applicable to models in the
+Shape Repository. It should be noted that each model is a
+representation of, and is distinct from, a shape, whether physical or
+imaginary. While the shape may be subject to its own terms, the terms
+governing the model you are about to download are described herein.
+
+For some models, the owners have defined specific licenses. The terms
+and conditions laid down in these licenses are in addition to the
+terms prescribed here, and are to be adhered to strictly when using
+such models.
+
+Acknowledgements
+----------------
+
+When including models from the Shape Repository in your website or
+research work, or when using them for other purposes allowed by the
+terms described herein, the AIM@SHAPE project and the model owner must
+be acknowledged as the sources of the models, for example with the
+phrase, "... model is provided courtesy of <model_owner> by the
+AIM@SHAPE Shape Repository."
+
+Information on <model_owner> is present in the accompanying metadata
+files and, where present, owner licenses.
+
+Metadata
+--------
+
+Each model is accompanied by its metadata file. Please keep this file
+with the model as it contains important information about the
+model. Please let us know if you find any errors in the metadata.
+
+(Im)proper use
+--------------
+
+Some models in the Shape Repository represent artifacts of religious,
+cultural and/or historical significance, e.g. the Max Planck
+model. Such models have been entrusted to the Shape Repository under
+the hope that they will be used respectfully and
+conscientiously. Please refrain from conducting experiments on them
+that may be rash or insensitive to people's feelings. Such experiments
+include, but are not limited to, morphing, animation, boolean
+operations, simulations of burning, breaking, exploding and melting.
+
+Models in the Shape Repository are made freely available for research
+and non-commercial purposes only. Use of these models for commercial
+purposes is permitted only after the express approval of the Shape
+Repository and the onwner has been obtained. Please contact us using
+the webform on our site in this regard.
+
+
+CITATION
+========
+
+If you use any part of this benchmark, please cite:
+Xiaobai Chen, Aleksey Golovinskiy, and Thomas Funkhouser,
+A Benchmark for 3D Mesh Segmentation
+ACM Transactions on Graphics (Proc. SIGGRAPH), 28(3), 2009.
+
+@article{Chen:2009:ABF,
+ author = "Xiaobai Chen and Aleksey Golovinskiy and Thomas Funkhouser",
+ title = "A Benchmark for {3D} Mesh Segmentation",
+ journal = "ACM Transactions on Graphics (Proc. SIGGRAPH)",
+ year = "2009",
+ month = aug,
+ volume = "28",
+ number = "3"
+} \ No newline at end of file
diff --git a/scripts/metagen.sh b/scripts/metagen.sh
new file mode 100755
index 00000000..4483d24e
--- /dev/null
+++ b/scripts/metagen.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+sep="_"
+for geom in "sphere" "klein" "torus"
+do
+ for number in 10 100 1000
+ do
+ for dim in {3..5}
+ do
+ echo "./off_file_from_shape_generator on $geom $geom$sep$number$sep$dim.off $number $dim"
+ ./off_file_from_shape_generator on $geom $geom$sep$number$sep$dim.off $number $dim
+ done
+ done
+done
+
+#./off_file_from_shape_generator in|on sphere|cube off_file_name points_number[integer > 0] dimension[integer > 1] radius[double > 0.0 | default = 1.0]
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
index db298ea6..7a375c9f 100644
--- a/src/Alpha_complex/doc/Intro_alpha_complex.h
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -57,9 +57,13 @@ namespace alpha_complex {
* href="http://doc.cgal.org/latest/Kernel_d/index.html#Chapter_dD_Geometry_Kernel">dD Geometry Kernel</a>
* \cite cgal:s-gkd-15b from CGAL as template parameter.
*
- * \remark When the simplicial complex is constructed with an infinite value of alpha, the complex is a Delaunay
+ * \remark
+ * - When the simplicial complex is constructed with an infinite value of alpha, the complex is a Delaunay
* complex.
- *
+ * - For people only interested in the topology of the \ref alpha_complex (for instance persistence),
+ * \ref alpha_complex is equivalent to the \ref cech_complex and much smaller if you do not bound the radii.
+ * \ref cech_complex can still make sense in higher dimension precisely because you can bound the radii.
+ *
* \section pointsexample Example from points
*
* This example builds the Delaunay triangulation from the given points in a 2D static kernel, and creates a
@@ -89,63 +93,29 @@ namespace alpha_complex {
* \image html "alpha_complex_doc.png" "Simplicial complex structure construction example"
*
* \subsection filtrationcomputation Filtration value computation algorithm
- *
- *
- *
- * <ul>
- * <li style="list-style-type: none;">\f$ \textbf{for } i : dimension \rightarrow 0 \textbf{ do} \f$
- * <ul>
- * <li style="list-style-type: none;">\f$\textbf{for all } \sigma of dimension i \f$
- * <ul>
- * <li style="list-style-type: none;">\f$\textbf{if } filtration( \sigma ) is NaN \textbf{ then} \f$
- * <ul>
- * <li style="list-style-type: none;">\f$ filtration( \sigma ) = \alpha^2( \sigma ) \f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{end if}\f$
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{for all } \tau face of \sigma \textbf{ do} \f$
- * &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;// propagate alpha filtration value
- * <ul>
- * <li style="list-style-type: none;">\f$\textbf{if } filtration( \tau ) is not NaN \textbf{ then} \f$
- * <ul>
- * <li style="list-style-type: none;">\f$ filtration( \tau ) = min ( filtration( \tau ), filtration( \sigma ) ) \f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{else}\f$
- * <ul>
- * <li style="list-style-type: none;">\f$\textbf{if } \tau is not Gabriel for \sigma \textbf{ then} \f$
- * <ul>
- * <li style="list-style-type: none;">\f$ filtration( \tau ) = filtration( \sigma ) \f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{end if}\f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{end if}\f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{end for}\f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{end for}\f$
- * </li>
- * </ul>
- * </li>
- * <li style="list-style-type: none;">\f$\textbf{end for}\f$
- * </li>
- * <li style="list-style-type: none;">\f$make\_filtration\_non\_decreasing()\f$
- * </li>
- * <li style="list-style-type: none;">\f$prune\_above\_filtration()\f$
- * </li>
- * </ul>
- *
+ * <br>
+ * \f$
+ * \textbf{for } \text{i : dimension } \rightarrow 0 \textbf{ do}\\
+ * \quad \textbf{for all } \sigma \text{ of dimension i}\\
+ * \quad\quad \textbf{if } \text{filtration(} \sigma ) \text{ is NaN} \textbf{ then}\\
+ * \quad\quad\quad \text{filtration(} \sigma ) = \alpha^2( \sigma )\\
+ * \quad\quad \textbf{end if}\\
+ * \quad\quad \textbf{for all } \tau \text{ face of } \sigma \textbf{ do}\quad\quad
+ * \textit{// propagate alpha filtration value}\\
+ * \quad\quad\quad \textbf{if } \text{filtration(} \tau ) \text{ is not NaN} \textbf{ then}\\
+ * \quad\quad\quad\quad \text{filtration(} \tau \text{) = min( filtration(} \tau \text{), filtration(} \sigma
+ * \text{) )}\\
+ * \quad\quad\quad \textbf{else}\\
+ * \quad\quad\quad\quad \textbf{if } \textbf{if } \tau \text{ is not Gabriel for } \sigma \textbf{ then}\\
+ * \quad\quad\quad\quad\quad \text{filtration(} \tau \text{) = filtration(} \sigma \text{)}\\
+ * \quad\quad\quad\quad \textbf{end if}\\
+ * \quad\quad\quad \textbf{end if}\\
+ * \quad\quad \textbf{end for}\\
+ * \quad \textbf{end for}\\
+ * \textbf{end for}\\
+ * \text{make_filtration_non_decreasing()}\\
+ * \text{prune_above_filtration()}\\
+ * \f$
*
* \subsubsection dimension2 Dimension 2
*
diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt
index 5bf553e9..2fc62452 100644
--- a/src/Alpha_complex/example/CMakeLists.txt
+++ b/src/Alpha_complex/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Alpha_complex_examples)
# need CGAL 4.7
diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt
index 9e0b3b3c..9255d3db 100644
--- a/src/Alpha_complex/test/CMakeLists.txt
+++ b/src/Alpha_complex/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Alpha_complex_tests)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
diff --git a/src/Alpha_complex/utilities/CMakeLists.txt b/src/Alpha_complex/utilities/CMakeLists.txt
index a2dfac20..7ace6064 100644
--- a/src/Alpha_complex/utilities/CMakeLists.txt
+++ b/src/Alpha_complex/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Alpha_complex_utilities)
if(CGAL_FOUND)
diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
index efa20db9..6e603155 100644
--- a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
@@ -35,6 +35,8 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Delaunay_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
#include <CGAL/iterator.h>
#include <fstream>
@@ -264,6 +266,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
index 42390b0e..8e6c40b7 100644
--- a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
@@ -133,6 +133,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
index 54c074c4..61f49bb1 100644
--- a/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
@@ -30,6 +30,8 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Delaunay_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
#include <CGAL/iterator.h>
#include <fstream>
@@ -258,6 +260,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
index 7c6e1583..a261c5a3 100644
--- a/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
@@ -32,6 +32,8 @@
#include <CGAL/Periodic_3_Delaunay_triangulation_traits_3.h>
#include <CGAL/Periodic_3_Delaunay_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
#include <CGAL/iterator.h>
#include <fstream>
@@ -295,6 +297,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file cuboid-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
index 54483819..aa7ddee2 100644
--- a/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
@@ -31,6 +31,8 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Regular_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
#include <CGAL/iterator.h>
// For CGAL < 4.11
@@ -309,6 +311,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file weight-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
index f03f29a7..d030c88c 100644
--- a/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
@@ -31,6 +31,8 @@
#include <CGAL/Periodic_3_regular_triangulation_traits_3.h>
#include <CGAL/Periodic_3_regular_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
#include <CGAL/iterator.h>
#include <fstream>
diff --git a/src/Bitmap_cubical_complex/example/CMakeLists.txt b/src/Bitmap_cubical_complex/example/CMakeLists.txt
index 99304aa4..dc659f2d 100644
--- a/src/Bitmap_cubical_complex/example/CMakeLists.txt
+++ b/src/Bitmap_cubical_complex/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bitmap_cubical_complex_examples)
add_executable ( Random_bitmap_cubical_complex Random_bitmap_cubical_complex.cpp )
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
index 47e71f8a..9b74e267 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
@@ -492,7 +492,7 @@ class Bitmap_cubical_complex_base {
this->multipliers.push_back(multiplier);
multiplier *= 2 * sizes[i] + 1;
}
- this->data = std::vector<T>(multiplier, std::numeric_limits<T>::max());
+ this->data = std::vector<T>(multiplier, std::numeric_limits<T>::infinity());
this->total_number_of_cells = multiplier;
}
@@ -562,7 +562,7 @@ void Bitmap_cubical_complex_base<T>::put_data_to_bins(T diameter_of_bin) {
template <typename T>
std::pair<T, T> Bitmap_cubical_complex_base<T>::min_max_filtration() {
- std::pair<T, T> min_max(std::numeric_limits<T>::max(), std::numeric_limits<T>::min());
+ std::pair<T, T> min_max(std::numeric_limits<T>::infinity(), -std::numeric_limits<T>::infinity());
for (std::size_t i = 0; i != this->data.size(); ++i) {
if (this->data[i] < min_max.first) min_max.first = this->data[i];
if (this->data[i] > min_max.second) min_max.second = this->data[i];
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
index 97070cda..8c35f590 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h
@@ -177,7 +177,7 @@ class Bitmap_cubical_complex_periodic_boundary_conditions_base : public Bitmap_c
}
}
// std::reverse( this->sizes.begin() , this->sizes.end() );
- this->data = std::vector<T>(multiplier, std::numeric_limits<T>::max());
+ this->data = std::vector<T>(multiplier, std::numeric_limits<T>::infinity());
this->total_number_of_cells = multiplier;
}
Bitmap_cubical_complex_periodic_boundary_conditions_base(const std::vector<unsigned>& sizes);
diff --git a/src/Bitmap_cubical_complex/test/CMakeLists.txt b/src/Bitmap_cubical_complex/test/CMakeLists.txt
index 02b026f2..8b43632a 100644
--- a/src/Bitmap_cubical_complex/test/CMakeLists.txt
+++ b/src/Bitmap_cubical_complex/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bitmap_cubical_complex_tests)
include(GUDHI_test_coverage)
diff --git a/src/Bitmap_cubical_complex/utilities/CMakeLists.txt b/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
index 676a730a..416db67f 100644
--- a/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
+++ b/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bitmap_cubical_complex_utilities)
add_executable ( cubical_complex_persistence cubical_complex_persistence.cpp )
diff --git a/src/Bottleneck_distance/benchmark/CMakeLists.txt b/src/Bottleneck_distance/benchmark/CMakeLists.txt
index 20a4e47b..3105a1d5 100644
--- a/src/Bottleneck_distance/benchmark/CMakeLists.txt
+++ b/src/Bottleneck_distance/benchmark/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_benchmark)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/example/CMakeLists.txt b/src/Bottleneck_distance/example/CMakeLists.txt
index 6095d6eb..c6f10127 100644
--- a/src/Bottleneck_distance/example/CMakeLists.txt
+++ b/src/Bottleneck_distance/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_examples)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
index 1e27887c..2db1ef80 100644
--- a/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
+++ b/src/Bottleneck_distance/example/alpha_rips_persistence_bottleneck_distance.cpp
@@ -185,6 +185,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h
index 41f8b16a..7a553006 100644
--- a/src/Bottleneck_distance/include/gudhi/Bottleneck.h
+++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h
@@ -30,12 +30,13 @@
#include <limits> // for numeric_limits
#include <cmath>
+#include <cfloat> // FLT_EVAL_METHOD
namespace Gudhi {
namespace persistence_diagram {
-double bottleneck_distance_approx(Persistence_graph& g, double e) {
+inline double bottleneck_distance_approx(Persistence_graph& g, double e) {
double b_lower_bound = 0.;
double b_upper_bound = g.diameter_bound();
const double alpha = std::pow(g.size(), 1. / 5.);
@@ -43,6 +44,13 @@ double bottleneck_distance_approx(Persistence_graph& g, double e) {
Graph_matching biggest_unperfect(g);
while (b_upper_bound - b_lower_bound > 2 * e) {
double step = b_lower_bound + (b_upper_bound - b_lower_bound) / alpha;
+#if !defined FLT_EVAL_METHOD || FLT_EVAL_METHOD < 0 || FLT_EVAL_METHOD > 1
+ // On platforms where double computation is done with excess precision,
+ // we force it to its true precision so the following test is reliable.
+ volatile double drop_excess_precision = step;
+ step = drop_excess_precision;
+ // Alternative: step = CGAL::IA_force_to_double(step);
+#endif
if (step <= b_lower_bound || step >= b_upper_bound) // Avoid precision problem
break;
m.set_r(step);
@@ -58,7 +66,7 @@ double bottleneck_distance_approx(Persistence_graph& g, double e) {
return (b_lower_bound + b_upper_bound) / 2.;
}
-double bottleneck_distance_exact(Persistence_graph& g) {
+inline double bottleneck_distance_exact(Persistence_graph& g) {
std::vector<double> sd = g.sorted_distances();
long lower_bound_i = 0;
long upper_bound_i = sd.size() - 1;
diff --git a/src/Bottleneck_distance/test/CMakeLists.txt b/src/Bottleneck_distance/test/CMakeLists.txt
index 2676b82c..bb739280 100644
--- a/src/Bottleneck_distance/test/CMakeLists.txt
+++ b/src/Bottleneck_distance/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_tests)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/utilities/CMakeLists.txt b/src/Bottleneck_distance/utilities/CMakeLists.txt
index d19e3b1c..2f35885c 100644
--- a/src/Bottleneck_distance/utilities/CMakeLists.txt
+++ b/src/Bottleneck_distance/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_utilities)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 94587044..6c446104 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -1,12 +1,19 @@
-cmake_minimum_required(VERSION 2.6)
-project(GUDHI)
+cmake_minimum_required(VERSION 3.1)
-include("CMakeGUDHIVersion.txt")
+project(GUDHI)
-enable_testing()
+include(CMakeGUDHIVersion.txt)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
+# This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path
+set(GUDHI_CYTHON_PATH "cython")
+
+# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
+include(GUDHI_third_party_libraries NO_POLICY_SCOPE)
+
+include(GUDHI_compilation_flags)
+
# Add your new module in the list, order is not important
include(GUDHI_modules)
@@ -14,6 +21,7 @@ add_gudhi_module(common)
add_gudhi_module(Alpha_complex)
add_gudhi_module(Bitmap_cubical_complex)
add_gudhi_module(Bottleneck_distance)
+add_gudhi_module(Cech_complex)
add_gudhi_module(Contraction)
add_gudhi_module(Hasse_complex)
add_gudhi_module(Persistence_representations)
@@ -33,30 +41,6 @@ message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"")
set(GUDHI_USER_VERSION_DIR ${CMAKE_SOURCE_DIR})
include(GUDHI_doxygen_target)
-# This variable is used by Cython CMakeLists.txt to know its path
-set(GUDHI_CYTHON_PATH "cython")
-# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
-include(GUDHI_third_party_libraries)
-
-if(MSVC)
- # Turn off some VC++ warnings
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
-else()
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -pedantic")
-endif()
-
-if(CMAKE_BUILD_TYPE MATCHES Debug)
- message("++ Debug compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
-else()
- message("++ Release compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_RELEASE}")
-endif()
-
-if (DEBUG_TRACES)
- message(STATUS "DEBUG_TRACES are activated")
- # For programs to be more verbose
- add_definitions(-DDEBUG_TRACES)
-endif()
-
#---------------------------------------------------------------------------------------
# Gudhi compilation part
include_directories(include)
@@ -87,7 +71,7 @@ export(PACKAGE GUDHI)
message("++ make install will install ${PROJECT_NAME} in the following directory : ${CMAKE_INSTALL_PREFIX}")
# Create the GUDHIConfig.cmake and GUDHIConfigVersion files
-set(CONF_INCLUDE_DIRS "${CMAKE_INSTALL_PREFIX}/include")
+set(CONF_INCLUDE_DIRS "${CMAKE_SOURCE_DIR}/include;${CMAKE_INSTALL_PREFIX}/include")
configure_file(GUDHIConfig.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake" @ONLY)
configure_file(GUDHIConfigVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake" @ONLY)
diff --git a/src/Cech_complex/benchmark/CMakeLists.txt b/src/Cech_complex/benchmark/CMakeLists.txt
new file mode 100644
index 00000000..b7697764
--- /dev/null
+++ b/src/Cech_complex/benchmark/CMakeLists.txt
@@ -0,0 +1,12 @@
+cmake_minimum_required(VERSION 2.6)
+project(Cech_complex_benchmark)
+
+# Do not forget to copy test files in current binary dir
+file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+add_executable(cech_complex_benchmark cech_complex_benchmark.cpp)
+target_link_libraries(cech_complex_benchmark ${Boost_SYSTEM_LIBRARY} ${Boost_FILESYSTEM_LIBRARY})
+
+if (TBB_FOUND)
+ target_link_libraries(cech_complex_benchmark ${TBB_LIBRARIES})
+endif()
diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
new file mode 100644
index 00000000..86314930
--- /dev/null
+++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
@@ -0,0 +1,144 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Points_off_io.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Clock.h>
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Cech_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Miniball.hpp>
+
+#include "boost/filesystem.hpp" // includes all needed Boost.Filesystem declarations
+
+#include <string>
+#include <vector>
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Point = std::vector<Filtration_value>;
+using Point_cloud = std::vector<Point>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
+using Proximity_graph = Gudhi::Proximity_graph<Simplex_tree>;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
+
+class Minimal_enclosing_ball_radius {
+ public:
+ // boost::range_value is not SFINAE-friendly so we cannot use it in the return type
+ template <typename Point>
+ typename std::iterator_traits<typename boost::range_iterator<Point>::type>::value_type operator()(
+ const Point& p1, const Point& p2) const {
+ // Type def
+ using Point_cloud = std::vector<Point>;
+ using Point_iterator = typename Point_cloud::const_iterator;
+ using Coordinate_iterator = typename Point::const_iterator;
+ using Min_sphere =
+ typename Gudhi::Miniball::Miniball<Gudhi::Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
+
+ Point_cloud point_cloud;
+ point_cloud.push_back(p1);
+ point_cloud.push_back(p2);
+
+ GUDHI_CHECK((p1.end() - p1.begin()) != (p2.end() - p2.begin()), "inconsistent point dimensions");
+ Min_sphere min_sphere(p1.end() - p1.begin(), point_cloud.begin(), point_cloud.end());
+
+ return std::sqrt(min_sphere.squared_radius());
+ }
+};
+
+int main(int argc, char* argv[]) {
+ std::string off_file_points = "tore3D_1307.off";
+ Filtration_value threshold = 1e20;
+
+ // Extract the points from the file filepoints
+ Points_off_reader off_reader(off_file_points);
+
+ Gudhi::Clock euclidean_clock("Gudhi::Euclidean_distance");
+ // Compute the proximity graph of the points
+ Proximity_graph euclidean_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
+ off_reader.get_point_cloud(), threshold, Gudhi::Euclidean_distance());
+
+ std::cout << euclidean_clock << std::endl;
+
+ Gudhi::Clock miniball_clock("Minimal_enclosing_ball_radius");
+ // Compute the proximity graph of the points
+ Proximity_graph miniball_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
+ off_reader.get_point_cloud(), threshold, Minimal_enclosing_ball_radius());
+ std::cout << miniball_clock << std::endl;
+
+ Gudhi::Clock common_miniball_clock("Gudhi::Minimal_enclosing_ball_radius()");
+ // Compute the proximity graph of the points
+ Proximity_graph common_miniball_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
+ off_reader.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius());
+ std::cout << common_miniball_clock << std::endl;
+
+ boost::filesystem::path full_path(boost::filesystem::current_path());
+ std::cout << "Current path is : " << full_path << std::endl;
+
+ std::cout << "File name;Radius;Rips time;Cech time; Ratio Rips/Cech time;Rips nb simplices;Cech nb simplices;"
+ << std::endl;
+ boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end
+ for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) {
+ if (!boost::filesystem::is_directory(itr->status())) {
+ if (itr->path().extension() == ".off") // see below
+ {
+ Points_off_reader off_reader(itr->path().string());
+ Point p0 = off_reader.get_point_cloud()[0];
+
+ for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) {
+ std::cout << itr->path().stem() << ";";
+ std::cout << radius << ";";
+ Gudhi::Clock rips_clock("Rips computation");
+ Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius,
+ Gudhi::Minimal_enclosing_ball_radius());
+ Simplex_tree rips_stree;
+ rips_complex_from_points.create_complex(rips_stree, p0.size() - 1);
+ // ------------------------------------------
+ // Display information about the Rips complex
+ // ------------------------------------------
+ double rips_sec = rips_clock.num_seconds();
+ std::cout << rips_sec << ";";
+
+ Gudhi::Clock cech_clock("Cech computation");
+ Cech_complex cech_complex_from_points(off_reader.get_point_cloud(), radius);
+ Simplex_tree cech_stree;
+ cech_complex_from_points.create_complex(cech_stree, p0.size() - 1);
+ // ------------------------------------------
+ // Display information about the Cech complex
+ // ------------------------------------------
+ double cech_sec = cech_clock.num_seconds();
+ std::cout << cech_sec << ";";
+ std::cout << cech_sec / rips_sec << ";";
+
+ assert(rips_stree.num_simplices() >= cech_stree.num_simplices());
+ std::cout << rips_stree.num_simplices() << ";";
+ std::cout << cech_stree.num_simplices() << ";" << std::endl;
+ }
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Cech_complex/concept/SimplicialComplexForCech.h b/src/Cech_complex/concept/SimplicialComplexForCech.h
new file mode 100644
index 00000000..89231eec
--- /dev/null
+++ b/src/Cech_complex/concept/SimplicialComplexForCech.h
@@ -0,0 +1,66 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CONCEPT_CECH_COMPLEX_SIMPLICIAL_COMPLEX_FOR_CECH_H_
+#define CONCEPT_CECH_COMPLEX_SIMPLICIAL_COMPLEX_FOR_CECH_H_
+
+namespace Gudhi {
+
+namespace cech_complex {
+
+/** \brief The concept SimplicialComplexForCech describes the requirements for a type to implement a simplicial
+ * complex, that can be created from a `Cech_complex`.
+ */
+struct SimplicialComplexForCech {
+ /** Handle to specify a simplex. */
+ typedef unspecified Simplex_handle;
+ /** Handle to specify a vertex. Must be a non-negative integer. */
+ typedef unspecified Vertex_handle;
+ /** Handle to specify the simplex filtration value. */
+ typedef unspecified Filtration_value;
+
+ /** Assigns the 'simplex' with the given 'filtration' value. */
+ int assign_filtration(Simplex_handle simplex, Filtration_value filtration);
+
+ /** \brief Returns a range over vertices of a given
+ * simplex. */
+ Simplex_vertex_range simplex_vertex_range(Simplex_handle const & simplex);
+
+ /** \brief Inserts a given `Gudhi::ProximityGraph` in the simplicial complex. */
+ template<class ProximityGraph>
+ void insert_graph(const ProximityGraph& proximity_graph);
+
+ /** \brief Expands the simplicial complex containing only its one skeleton until a given maximal dimension.
+ * expansion can be blocked by the blocker oracle. */
+ template< typename Blocker >
+ void expansion_with_blockers(int max_dim, Blocker block_simplex);
+
+ /** Returns the number of vertices in the simplicial complex. */
+ std::size_t num_vertices();
+
+};
+
+} // namespace alpha_complex
+
+} // namespace Gudhi
+
+#endif // CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
diff --git a/src/Cech_complex/doc/COPYRIGHT b/src/Cech_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..5f1d97cc
--- /dev/null
+++ b/src/Cech_complex/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Vincent Rouvreau
+
+Copyright (C) 2015 Inria
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h
new file mode 100644
index 00000000..4483bcb9
--- /dev/null
+++ b/src/Cech_complex/doc/Intro_cech_complex.h
@@ -0,0 +1,114 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_CECH_COMPLEX_INTRO_CECH_COMPLEX_H_
+#define DOC_CECH_COMPLEX_INTRO_CECH_COMPLEX_H_
+
+namespace Gudhi {
+
+namespace cech_complex {
+
+/** \defgroup cech_complex ÄŒech complex
+ *
+ * \author Vincent Rouvreau
+ *
+ * @{
+ *
+ * \section cechdefinition ÄŒech complex definition
+ *
+ * ÄŒech complex
+ * <a target="_blank" href="https://en.wikipedia.org/wiki/%C4%8Cech_cohomology">(Wikipedia)</a> is a
+ * <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a> constructed
+ * from a proximity graph. The set of all simplices is filtered by the radius of their minimal enclosing ball.
+ *
+ * The input shall be a point cloud in an Euclidean space.
+ *
+ * \remark For people only interested in the topology of the \ref cech_complex (for instance persistence),
+ * \ref alpha_complex is equivalent to the \ref cech_complex and much smaller if you do not bound the radii.
+ * \ref cech_complex can still make sense in higher dimension precisely because you can bound the radii.
+ *
+ * \subsection cechalgorithm Algorithm
+ *
+ * Cech_complex first builds a proximity graph from a point cloud.
+ * The filtration value of each edge of the `Gudhi::Proximity_graph` is computed from
+ * `Gudhi::Minimal_enclosing_ball_radius` function.
+ *
+ * All edges that have a filtration value strictly greater than a user given maximal radius value, \f$max\_radius\f$,
+ * are not inserted into the complex.
+ *
+ * Vertex name correspond to the index of the point in the given range (aka. the point cloud).
+ *
+ * \image html "cech_one_skeleton.png" "ÄŒech complex proximity graph representation"
+ *
+ * When creating a simplicial complex from this proximity graph, Cech_complex inserts the proximity graph into the
+ * simplicial complex data structure, and then expands the simplicial complex when required.
+ *
+ * On this example, as edges \f$(x,y)\f$, \f$(y,z)\f$ and \f$(z,y)\f$ are in the complex, the minimal ball radius
+ * containing the points \f$(x,y,z)\f$ is computed.
+ *
+ * \f$(x,y,z)\f$ is inserted to the simplicial complex with the filtration value set with
+ * \f$mini\_ball\_radius(x,y,z))\f$ iff \f$mini\_ball\_radius(x,y,z)) \leq max\_radius\f$.
+ *
+ * And so on for higher dimensions.
+ *
+ * \image html "cech_complex_representation.png" "ÄŒech complex expansion"
+ *
+ * The minimal ball radius computation is insured by
+ * <a target="_blank" href="https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html">
+ * the miniball software (V3.0)</a> - Smallest Enclosing Balls of Points - and distributed with GUDHI.
+ * Please refer to
+ * <a target="_blank" href="https://people.inf.ethz.ch/gaertner/subdir/texts/own_work/esa99_final.pdf">
+ * the miniball software design description</a> for more information about this computation.
+ *
+ * This radius computation is the reason why the Cech_complex is taking much more time to be computed than the
+ * \ref rips_complex but it offers more topological guarantees.
+ *
+ * If the Cech_complex interfaces are not detailed enough for your need, please refer to
+ * <a href="_cech_complex_2cech_complex_step_by_step_8cpp-example.html">
+ * cech_complex_step_by_step.cpp</a> example, where the graph construction over the Simplex_tree is more detailed.
+ *
+ * \subsection cechpointscloudexample Example from a point cloud
+ *
+ * This example builds the proximity graph from the given points, and maximal radius values.
+ * Then it creates a `Simplex_tree` with it.
+ *
+ * Then, it is asked to display information about the simplicial complex.
+ *
+ * \include Cech_complex/cech_complex_example_from_points.cpp
+ *
+ * When launching (maximal enclosing ball radius is 1., is expanded until dimension 2):
+ *
+ * \code $> ./Cech_complex_example_from_points
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Cech_complex/cech_complex_example_from_points_for_doc.txt
+ *
+ */
+/** @} */ // end defgroup cech_complex
+
+} // namespace cech_complex
+
+} // namespace Gudhi
+
+#endif // DOC_CECH_COMPLEX_INTRO_CECH_COMPLEX_H_
diff --git a/src/Cech_complex/doc/cech_complex_representation.ipe b/src/Cech_complex/doc/cech_complex_representation.ipe
new file mode 100644
index 00000000..377745a3
--- /dev/null
+++ b/src/Cech_complex/doc/cech_complex_representation.ipe
@@ -0,0 +1,330 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20180305162524"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<path layer="alpha" stroke="black" fill="darkcyan">
+48 640 m
+80 672 l
+48 672 l
+h
+</path>
+<text matrix="1 0 0 1 -222.178 174.178" transformations="translations" pos="380 530" stroke="seagreen" type="label" width="70.886" height="8.307" depth="2.32" valign="baseline" size="large">Cech complex</text>
+<text matrix="1 0 0 1 -212.333 10.6762" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -314.178 58.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -194.178 -13.8225" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -226.178 18.1775" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -218.178 -21.8225" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -89.478 -87.9762" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -302.178 -13.8225" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<use name="mark/circle(sx)" pos="80 544" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="48 576" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="112 576" size="normal" stroke="black"/>
+<use name="mark/fdisk(sfx)" pos="48 672" size="normal" stroke="black" fill="white"/>
+<use name="mark/circle(sx)" pos="48 640" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="48 672" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="80 672" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="144 672" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="144 608" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="200 640" size="normal" stroke="black"/>
+<use matrix="1 0 0 1 -100 -96" name="mark/circle(sx)" pos="304 672" size="normal" stroke="darkgray"/>
+<use matrix="1 0 0 1 -100 -96" name="mark/circle(sx)" pos="336 672" size="normal" stroke="darkgray"/>
+<path matrix="1 0 0 1 -100 -96" stroke="darkgray">
+32 0 0 32 304 672 e
+</path>
+<path matrix="1 0 0 1 -100 -96" stroke="darkgray" pen="fat">
+304 672 m
+336 672 l
+</path>
+<text matrix="1 0 0 1 -214.178 50.178" transformations="translations" pos="380 530" stroke="darkgray" type="label" width="80.052" height="8.302" depth="0" valign="baseline" size="large">Maximal radius</text>
+<text matrix="1 0 0 1 -226.178 -13.8225" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">7</text>
+<text matrix="1 0 0 1 -258.178 30.1775" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">8</text>
+<text matrix="1 0 0 1 -334.178 -13.8225" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">9</text>
+<path stroke="black">
+112 576 m
+144 608 l
+</path>
+<path stroke="black">
+144 672 m
+144 608 l
+200 640 l
+h
+</path>
+<path stroke="black" fill="darkcyan">
+48 576 m
+112 576 l
+80 544 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="112 728" size="normal" stroke="black"/>
+<path stroke="black">
+80 672 m
+144 672 l
+112 728 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="112 728" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="80 672" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="144 672" size="normal" stroke="black" fill="white"/>
+<path stroke="black" fill="darkcyan">
+48 576 m
+48 640 l
+32 608 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="200 640" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="144 608" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="112 576" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="80 544" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="48 576" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="48 640" size="normal" stroke="black" fill="white"/>
+<path stroke="darkcyan">
+32 0 0 32 80 576 e
+</path>
+<path stroke="darkcyan">
+22.6274 0 0 22.6274 64 656 e
+</path>
+<path stroke="darkorange">
+37.1429 0 0 37.1429 112 690.857 e
+</path>
+<path stroke="darkorange">
+37.1429 0 0 37.1429 162.857 640 e
+</path>
+<use name="mark/fdisk(sfx)" pos="32 608" size="normal" stroke="black"/>
+<text matrix="1 0 0 1 -334.178 94.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="9.963" height="6.42" depth="0" valign="baseline">10</text>
+<path stroke="darkcyan">
+32 0 0 32 48 608 e
+</path>
+<use name="mark/fdisk(sfx)" pos="204 576" size="normal" stroke="darkgray" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="236 576" size="normal" stroke="darkgray" fill="white"/>
+</page>
+</ipe>
diff --git a/src/Cech_complex/doc/cech_complex_representation.png b/src/Cech_complex/doc/cech_complex_representation.png
new file mode 100644
index 00000000..d0eb85a5
--- /dev/null
+++ b/src/Cech_complex/doc/cech_complex_representation.png
Binary files differ
diff --git a/src/Cech_complex/doc/cech_one_skeleton.ipe b/src/Cech_complex/doc/cech_one_skeleton.ipe
new file mode 100644
index 00000000..ed66e132
--- /dev/null
+++ b/src/Cech_complex/doc/cech_one_skeleton.ipe
@@ -0,0 +1,314 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20180305162558"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<text layer="alpha" matrix="1 0 0 1 -222.178 174.178" transformations="translations" pos="380 530" stroke="seagreen" type="label" width="84.053" height="8.307" depth="2.32" valign="baseline" size="large">Proximity graph</text>
+<text matrix="1 0 0 1 -212.333 10.6762" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -314.178 58.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<path matrix="1 0 0 1 -100 -96" stroke="darkgray" pen="fat">
+304 672 m
+336 672 l
+</path>
+<text matrix="1 0 0 1 -194.178 -13.8225" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -226.178 18.1775" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -218.178 -21.8225" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -89.478 -87.9762" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -302.178 -13.8225" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<use name="mark/circle(sx)" pos="80 544" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="48 576" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="112 576" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="48 640" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="48 672" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="80 672" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="144 672" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="144 608" size="normal" stroke="black"/>
+<use name="mark/circle(sx)" pos="200 640" size="normal" stroke="black"/>
+<use matrix="1 0 0 1 -100 -96" name="mark/circle(sx)" pos="336 672" size="normal" stroke="darkgray"/>
+<path matrix="1 0 0 1 -100 -96" stroke="darkgray">
+32 0 0 32 304 672 e
+</path>
+<text matrix="1 0 0 1 -214.178 50.178" transformations="translations" pos="380 530" stroke="darkgray" type="label" width="80.052" height="8.302" depth="0" valign="baseline" size="large">Maximal radius</text>
+<text matrix="1 0 0 1 -226.178 -13.8225" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">7</text>
+<text matrix="1 0 0 1 -258.178 30.1775" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">8</text>
+<text matrix="1 0 0 1 -334.178 -13.8225" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">9</text>
+<path stroke="black">
+112 576 m
+144 608 l
+</path>
+<path stroke="black">
+144 672 m
+144 608 l
+200 640 l
+h
+</path>
+<path stroke="black">
+48 640 m
+80 672 l
+48 672 l
+h
+</path>
+<path stroke="black">
+48 576 m
+112 576 l
+80 544 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="112 728" size="normal" stroke="black"/>
+<path stroke="black">
+80 672 m
+144 672 l
+112 728 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="112 728" size="normal" stroke="black" fill="white"/>
+<path stroke="black">
+48 576 m
+48 640 l
+32 608 l
+h
+</path>
+<use name="mark/fdisk(sfx)" pos="80 672" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="144 672" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="200 640" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="144 608" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="112 576" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="80 544" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="48 576" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="48 640" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="48 672" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="32 608" size="normal" stroke="black" fill="white"/>
+<text matrix="1 0 0 1 -334.178 94.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="9.963" height="6.42" depth="0" valign="baseline">10</text>
+<use name="mark/fdisk(sfx)" pos="204 576" size="normal" stroke="darkgray" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="236 576" size="normal" stroke="darkgray" fill="white"/>
+</page>
+</ipe>
diff --git a/src/Cech_complex/doc/cech_one_skeleton.png b/src/Cech_complex/doc/cech_one_skeleton.png
new file mode 100644
index 00000000..cc636616
--- /dev/null
+++ b/src/Cech_complex/doc/cech_one_skeleton.png
Binary files differ
diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt
new file mode 100644
index 00000000..ab391215
--- /dev/null
+++ b/src/Cech_complex/example/CMakeLists.txt
@@ -0,0 +1,16 @@
+cmake_minimum_required(VERSION 2.6)
+project(Cech_complex_examples)
+
+add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp )
+target_link_libraries(Cech_complex_example_step_by_step ${Boost_PROGRAM_OPTIONS_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Cech_complex_example_step_by_step ${TBB_LIBRARIES})
+endif()
+add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:Cech_complex_example_step_by_step>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-d" "3")
+
+add_executable ( Cech_complex_example_from_points cech_complex_example_from_points.cpp)
+if (TBB_FOUND)
+ target_link_libraries(Cech_complex_example_from_points ${TBB_LIBRARIES})
+endif()
+add_test(NAME Cech_complex_example_from_points COMMAND $<TARGET_FILE:Cech_complex_example_from_points>)
diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp
new file mode 100644
index 00000000..3cc5a4df
--- /dev/null
+++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp
@@ -0,0 +1,54 @@
+#include <gudhi/Cech_complex.h>
+#include <gudhi/Simplex_tree.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+#include <array>
+
+int main() {
+ // Type definitions
+ using Point_cloud = std::vector<std::array<double, 2>>;
+ using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+ using Filtration_value = Simplex_tree::Filtration_value;
+ using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
+
+ Point_cloud points;
+ points.push_back({1., 0.}); // 0
+ points.push_back({0., 1.}); // 1
+ points.push_back({2., 1.}); // 2
+ points.push_back({3., 2.}); // 3
+ points.push_back({0., 3.}); // 4
+ points.push_back({3. + std::sqrt(3.), 3.}); // 5
+ points.push_back({1., 4.}); // 6
+ points.push_back({3., 4.}); // 7
+ points.push_back({2., 4. + std::sqrt(3.)}); // 8
+ points.push_back({0., 4.}); // 9
+ points.push_back({-0.5, 2.}); // 10
+
+ // ----------------------------------------------------------------------------
+ // Init of a Cech complex from points
+ // ----------------------------------------------------------------------------
+ Filtration_value max_radius = 1.;
+ Cech_complex cech_complex_from_points(points, max_radius);
+
+ Simplex_tree stree;
+ cech_complex_from_points.create_complex(stree, 2);
+ // ----------------------------------------------------------------------------
+ // Display information about the one skeleton Cech complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Cech complex is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
+ << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on Cech complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> "
+ << "[" << stree.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ return 0;
+}
diff --git a/src/Cech_complex/example/cech_complex_example_from_points_for_doc.txt b/src/Cech_complex/example/cech_complex_example_from_points_for_doc.txt
new file mode 100644
index 00000000..be0afc76
--- /dev/null
+++ b/src/Cech_complex/example/cech_complex_example_from_points_for_doc.txt
@@ -0,0 +1,31 @@
+Iterator on Cech complex simplices in the filtration order, with [filtration value]:
+ ( 0 ) -> [0]
+ ( 1 ) -> [0]
+ ( 2 ) -> [0]
+ ( 3 ) -> [0]
+ ( 4 ) -> [0]
+ ( 5 ) -> [0]
+ ( 6 ) -> [0]
+ ( 7 ) -> [0]
+ ( 8 ) -> [0]
+ ( 9 ) -> [0]
+ ( 10 ) -> [0]
+ ( 9 4 ) -> [0.5]
+ ( 9 6 ) -> [0.5]
+ ( 10 1 ) -> [0.559017]
+ ( 10 4 ) -> [0.559017]
+ ( 1 0 ) -> [0.707107]
+ ( 2 0 ) -> [0.707107]
+ ( 3 2 ) -> [0.707107]
+ ( 6 4 ) -> [0.707107]
+ ( 9 6 4 ) -> [0.707107]
+ ( 2 1 ) -> [1]
+ ( 2 1 0 ) -> [1]
+ ( 4 1 ) -> [1]
+ ( 5 3 ) -> [1]
+ ( 7 3 ) -> [1]
+ ( 7 5 ) -> [1]
+ ( 7 6 ) -> [1]
+ ( 8 6 ) -> [1]
+ ( 8 7 ) -> [1]
+ ( 10 4 1 ) -> [1]
diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp
new file mode 100644
index 00000000..6fbbde5b
--- /dev/null
+++ b/src/Cech_complex/example/cech_complex_step_by_step.cpp
@@ -0,0 +1,166 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Points_off_io.h>
+
+#include <gudhi/Miniball.hpp>
+
+#include <boost/program_options.hpp>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+#include <utility> // for pair
+#include <map>
+
+// ----------------------------------------------------------------------------
+// rips_persistence_step_by_step is an example of each step that is required to
+// build a Rips over a Simplex_tree. Please refer to rips_persistence to see
+// how to do the same thing with the Rips_complex wrapper for less detailed
+// steps.
+// ----------------------------------------------------------------------------
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Simplex_handle = Simplex_tree::Simplex_handle;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Point = std::vector<double>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
+using Proximity_graph = Gudhi::Proximity_graph<Simplex_tree>;
+
+class Cech_blocker {
+ private:
+ using Point_cloud = std::vector<Point>;
+ using Point_iterator = Point_cloud::const_iterator;
+ using Coordinate_iterator = Point::const_iterator;
+ using Min_sphere = Gudhi::Miniball::Miniball<Gudhi::Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
+
+ public:
+ bool operator()(Simplex_handle sh) {
+ std::vector<Point> points;
+ for (auto vertex : simplex_tree_.simplex_vertex_range(sh)) {
+ points.push_back(point_cloud_[vertex]);
+#ifdef DEBUG_TRACES
+ std::cout << "#(" << vertex << ")#";
+#endif // DEBUG_TRACES
+ }
+ Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points);
+#ifdef DEBUG_TRACES
+ std::cout << "radius = " << radius << " - " << (radius > max_radius_) << std::endl;
+#endif // DEBUG_TRACES
+ simplex_tree_.assign_filtration(sh, radius);
+ return (radius > max_radius_);
+ }
+ Cech_blocker(Simplex_tree& simplex_tree, Filtration_value max_radius, const std::vector<Point>& point_cloud)
+ : simplex_tree_(simplex_tree), max_radius_(max_radius), point_cloud_(point_cloud) {
+ dimension_ = point_cloud_[0].size();
+ }
+
+ private:
+ Simplex_tree simplex_tree_;
+ Filtration_value max_radius_;
+ std::vector<Point> point_cloud_;
+ int dimension_;
+};
+
+void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max);
+
+int main(int argc, char* argv[]) {
+ std::string off_file_points;
+ Filtration_value max_radius;
+ int dim_max;
+
+ program_options(argc, argv, off_file_points, max_radius, dim_max);
+
+ // Extract the points from the file filepoints
+ Points_off_reader off_reader(off_file_points);
+
+ // Compute the proximity graph of the points
+ Proximity_graph prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(off_reader.get_point_cloud(), max_radius,
+ Gudhi::Minimal_enclosing_ball_radius());
+
+ // Construct the Rips complex in a Simplex Tree
+ Simplex_tree st;
+ // insert the proximity graph in the simplex tree
+ st.insert_graph(prox_graph);
+ // expand the graph until dimension dim_max
+ st.expansion_with_blockers(dim_max, Cech_blocker(st, max_radius, off_reader.get_point_cloud()));
+
+ std::cout << "The complex contains " << st.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << st.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ st.initialize_filtration();
+
+#if DEBUG_TRACES
+ std::cout << "********************************************************************\n";
+ std::cout << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
+ std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " "
+ << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << static_cast<int>(vertex) << " ";
+ }
+ std::cout << std::endl;
+ }
+#endif // DEBUG_TRACES
+
+ return 0;
+}
+
+void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "max-radius,r",
+ po::value<Filtration_value>(&max_radius)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Rips complex construction.")(
+ "cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Rips complex we want to compute.");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Construct a Cech complex defined on a set of input points.\n \n";
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ exit(-1);
+ }
+}
diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h
new file mode 100644
index 00000000..f9b8a269
--- /dev/null
+++ b/src/Cech_complex/include/gudhi/Cech_complex.h
@@ -0,0 +1,130 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CECH_COMPLEX_H_
+#define CECH_COMPLEX_H_
+
+#include <gudhi/distance_functions.h> // for Gudhi::Minimal_enclosing_ball_radius
+#include <gudhi/graph_simplicial_complex.h> // for Gudhi::Proximity_graph
+#include <gudhi/Debug_utils.h> // for GUDHI_CHECK
+#include <gudhi/Cech_complex_blocker.h> // for Gudhi::cech_complex::Cech_blocker
+
+#include <iostream>
+#include <stdexcept> // for exception management
+#include <vector>
+
+namespace Gudhi {
+
+namespace cech_complex {
+
+/**
+ * \class Cech_complex
+ * \brief Cech complex data structure.
+ *
+ * \ingroup cech_complex
+ *
+ * \details
+ * The data structure is a proximity graph, containing edges when the edge length is less or equal
+ * to a given max_radius. Edge length is computed from `Gudhi::Minimal_enclosing_ball_radius` distance function.
+ *
+ * \tparam SimplicialComplexForProximityGraph furnishes `Vertex_handle` and `Filtration_value` type definition required
+ * by `Gudhi::Proximity_graph`.
+ *
+ * \tparam ForwardPointRange must be a range for which `std::begin()` and `std::end()` methods return input
+ * iterators on a point. `std::begin()` and `std::end()` methods are also required for a point.
+ */
+template <typename SimplicialComplexForProximityGraph, typename ForwardPointRange>
+class Cech_complex {
+ private:
+ // Required by compute_proximity_graph
+ using Vertex_handle = typename SimplicialComplexForProximityGraph::Vertex_handle;
+ using Filtration_value = typename SimplicialComplexForProximityGraph::Filtration_value;
+ using Proximity_graph = Gudhi::Proximity_graph<SimplicialComplexForProximityGraph>;
+
+ // Retrieve Coordinate type from ForwardPointRange
+ using Point_from_range_iterator = typename boost::range_const_iterator<ForwardPointRange>::type;
+ using Point_from_range = typename std::iterator_traits<Point_from_range_iterator>::value_type;
+ using Coordinate_iterator = typename boost::range_const_iterator<Point_from_range>::type;
+ using Coordinate = typename std::iterator_traits<Coordinate_iterator>::value_type;
+
+ public:
+ // Point and Point_cloud type definition
+ using Point = std::vector<Coordinate>;
+ using Point_cloud = std::vector<Point>;
+
+ public:
+ /** \brief Cech_complex constructor from a list of points.
+ *
+ * @param[in] points Range of points.
+ * @param[in] max_radius Maximal radius value.
+ *
+ * \tparam ForwardPointRange must be a range of Point. Point must be a range of <b>copyable</b> Cartesian coordinates.
+ *
+ */
+ Cech_complex(const ForwardPointRange& points, Filtration_value max_radius) : max_radius_(max_radius) {
+ // Point cloud deep copy
+ point_cloud_.reserve(boost::size(points));
+ for (auto&& point : points) point_cloud_.emplace_back(std::begin(point), std::end(point));
+
+ cech_skeleton_graph_ = Gudhi::compute_proximity_graph<SimplicialComplexForProximityGraph>(
+ point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius());
+ }
+
+ /** \brief Initializes the simplicial complex from the proximity graph and expands it until a given maximal
+ * dimension, using the Cech blocker oracle.
+ *
+ * @param[in] complex SimplicialComplexForCech to be created.
+ * @param[in] dim_max graph expansion until this given maximal dimension.
+ * @exception std::invalid_argument In debug mode, if `complex.num_vertices()` does not return 0.
+ *
+ */
+ template <typename SimplicialComplexForCechComplex>
+ void create_complex(SimplicialComplexForCechComplex& complex, int dim_max) {
+ GUDHI_CHECK(complex.num_vertices() == 0,
+ std::invalid_argument("Cech_complex::create_complex - simplicial complex is not empty"));
+
+ // insert the proximity graph in the simplicial complex
+ complex.insert_graph(cech_skeleton_graph_);
+ // expand the graph until dimension dim_max
+ complex.expansion_with_blockers(dim_max,
+ Cech_blocker<SimplicialComplexForCechComplex, Cech_complex>(&complex, this));
+ }
+
+ /** @return max_radius value given at construction. */
+ Filtration_value max_radius() const { return max_radius_; }
+
+ /** @param[in] vertex Point position in the range.
+ * @return The point.
+ */
+ const Point& get_point(Vertex_handle vertex) const { return point_cloud_[vertex]; }
+
+ private:
+ Proximity_graph cech_skeleton_graph_;
+ Filtration_value max_radius_;
+ Point_cloud point_cloud_;
+};
+
+} // namespace cech_complex
+
+} // namespace Gudhi
+
+#endif // CECH_COMPLEX_H_
diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
new file mode 100644
index 00000000..b0d347b1
--- /dev/null
+++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
@@ -0,0 +1,91 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CECH_COMPLEX_BLOCKER_H_
+#define CECH_COMPLEX_BLOCKER_H_
+
+#include <gudhi/distance_functions.h> // for Gudhi::Minimal_enclosing_ball_radius
+
+#include <iostream>
+#include <vector>
+#include <cmath> // for std::sqrt
+
+namespace Gudhi {
+
+namespace cech_complex {
+
+/** \internal
+ * \class Cech_blocker
+ * \brief ÄŒech complex blocker.
+ *
+ * \ingroup cech_complex
+ *
+ * \details
+ * ÄŒech blocker is an oracle constructed from a Cech_complex and a simplicial complex.
+ *
+ * \tparam SimplicialComplexForProximityGraph furnishes `Simplex_handle` and `Filtration_value` type definition,
+ * `simplex_vertex_range(Simplex_handle sh)`and `assign_filtration(Simplex_handle sh, Filtration_value filt)` methods.
+ *
+ * \tparam Chech_complex is required by the blocker.
+ */
+template <typename SimplicialComplexForCech, typename Cech_complex>
+class Cech_blocker {
+ private:
+ using Point_cloud = typename Cech_complex::Point_cloud;
+
+ using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle;
+ using Filtration_value = typename SimplicialComplexForCech::Filtration_value;
+
+ public:
+ /** \internal \brief ÄŒech complex blocker operator() - the oracle - assigns the filtration value from the simplex
+ * radius and returns if the simplex expansion must be blocked.
+ * \param[in] sh The Simplex_handle.
+ * \return true if the simplex radius is greater than the Cech_complex max_radius*/
+ bool operator()(Simplex_handle sh) {
+ Point_cloud points;
+ for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) {
+ points.push_back(cc_ptr_->get_point(vertex));
+#ifdef DEBUG_TRACES
+ std::cout << "#(" << vertex << ")#";
+#endif // DEBUG_TRACES
+ }
+ Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points);
+#ifdef DEBUG_TRACES
+ if (radius > cc_ptr_->max_radius()) std::cout << "radius > max_radius => expansion is blocked\n";
+#endif // DEBUG_TRACES
+ sc_ptr_->assign_filtration(sh, radius);
+ return (radius > cc_ptr_->max_radius());
+ }
+
+ /** \internal \brief ÄŒech complex blocker constructor. */
+ Cech_blocker(SimplicialComplexForCech* sc_ptr, Cech_complex* cc_ptr) : sc_ptr_(sc_ptr), cc_ptr_(cc_ptr) {}
+
+ private:
+ SimplicialComplexForCech* sc_ptr_;
+ Cech_complex* cc_ptr_;
+};
+
+} // namespace cech_complex
+
+} // namespace Gudhi
+
+#endif // CECH_COMPLEX_BLOCKER_H_
diff --git a/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT b/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT
new file mode 100644
index 00000000..dbe4c553
--- /dev/null
+++ b/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT
@@ -0,0 +1,4 @@
+The miniball software is available under the GNU General Public License (GPLv3 - https://www.gnu.org/copyleft/gpl.html).
+If your intended use is not compliant with this license, please buy a commercial license (EUR 500 - https://people.inf.ethz.ch/gaertner/subdir/software/miniball/license.html).
+You need a license if the software that you develop using Miniball V3.0 is not open source.
+
diff --git a/src/Cech_complex/include/gudhi/Miniball.README b/src/Cech_complex/include/gudhi/Miniball.README
new file mode 100644
index 00000000..033d8953
--- /dev/null
+++ b/src/Cech_complex/include/gudhi/Miniball.README
@@ -0,0 +1,26 @@
+https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html
+
+Smallest Enclosing Balls of Points - Fast and Robust in C++.
+(high-quality software for smallest enclosing balls of balls is available in the computational geometry algorithms library CGAL)
+
+
+This is the miniball software (V3.0) for computing smallest enclosing balls of points in arbitrary dimensions. It consists of a C++ header file Miniball.hpp (around 500 lines of code) and two example programs miniball_example.cpp and miniball_example_containers.cpp that demonstrate the usage. The first example stores the coordinates of the input points in a two-dimensional array, the second example uses a list of vectors to show how generic containers can be used.
+
+Credits: Aditya Gupta and Alexandros Konstantinakis-Karmis have significantly contributed to this version of the software.
+
+Changes - https://people.inf.ethz.ch/gaertner/subdir/software/miniball/changes.txt - from previous versions.
+
+The theory - https://people.inf.ethz.ch/gaertner/subdir/texts/own_work/esa99_final.pdf - behind the miniball software (Proc. 7th Annual European Symposium on Algorithms (ESA), Lecture Notes in Computer Science 1643, Springer-Verlag, pp.325-338, 1999).
+
+Main Features:
+
+ Very fast in low dimensions. 1 million points in 5-space are processed within 0.05 seconds on any recent machine.
+
+ High numerical stability. Almost all input degeneracies (cospherical points, multiple points, points very close together) are routinely handled.
+
+ Easily integrates into your code. You can freely choose the coordinate type of your points and the container to store the points. If you still need to adapt the code, the header is small and readable and contains documentation for all major methods.
+
+
+Changes done for the GUDHI version of MiniBall:
+ - Add include guard
+ - Move Miniball namespace inside a new Gudhi namespace
diff --git a/src/Cech_complex/include/gudhi/Miniball.hpp b/src/Cech_complex/include/gudhi/Miniball.hpp
new file mode 100644
index 00000000..ce6cbb5b
--- /dev/null
+++ b/src/Cech_complex/include/gudhi/Miniball.hpp
@@ -0,0 +1,523 @@
+// Copright (C) 1999-2013, Bernd Gaertner
+// $Rev: 3581 $
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+//
+// Contact:
+// --------
+// Bernd Gaertner
+// Institute of Theoretical Computer Science
+// ETH Zuerich
+// CAB G31.1
+// CH-8092 Zuerich, Switzerland
+// http://www.inf.ethz.ch/personal/gaertner
+
+#ifndef MINIBALL_HPP_
+#define MINIBALL_HPP_
+
+#include <cassert>
+#include <algorithm>
+#include <list>
+#include <ctime>
+#include <limits>
+
+namespace Gudhi {
+
+namespace Miniball {
+
+ // Global Functions
+ // ================
+ template <typename NT>
+ inline NT mb_sqr (NT r) {return r*r;}
+
+ // Functors
+ // ========
+
+ // functor to map a point iterator to the corresponding coordinate iterator;
+ // generic version for points whose coordinate containers have begin()
+ template < typename Pit_, typename Cit_ >
+ struct CoordAccessor {
+ typedef Pit_ Pit;
+ typedef Cit_ Cit;
+ inline Cit operator() (Pit it) const { return (*it).begin(); }
+ };
+
+ // partial specialization for points whose coordinate containers are arrays
+ template < typename Pit_, typename Cit_ >
+ struct CoordAccessor<Pit_, Cit_*> {
+ typedef Pit_ Pit;
+ typedef Cit_* Cit;
+ inline Cit operator() (Pit it) const { return *it; }
+ };
+
+ // Class Declaration
+ // =================
+
+ template <typename CoordAccessor>
+ class Miniball {
+ private:
+ // types
+ // The iterator type to go through the input points
+ typedef typename CoordAccessor::Pit Pit;
+ // The iterator type to go through the coordinates of a single point.
+ typedef typename CoordAccessor::Cit Cit;
+ // The coordinate type
+ typedef typename std::iterator_traits<Cit>::value_type NT;
+ // The iterator to go through the support points
+ typedef typename std::list<Pit>::iterator Sit;
+
+ // data members...
+ const int d; // dimension
+ Pit points_begin;
+ Pit points_end;
+ CoordAccessor coord_accessor;
+ double time;
+ const NT nt0; // NT(0)
+
+ //...for the algorithms
+ std::list<Pit> L;
+ Sit support_end;
+ int fsize; // number of forced points
+ int ssize; // number of support points
+
+ // ...for the ball updates
+ NT* current_c;
+ NT current_sqr_r;
+ NT** c;
+ NT* sqr_r;
+
+ // helper arrays
+ NT* q0;
+ NT* z;
+ NT* f;
+ NT** v;
+ NT** a;
+
+ public:
+ // The iterator type to go through the support points
+ typedef typename std::list<Pit>::const_iterator SupportPointIterator;
+
+ // PRE: [begin, end) is a nonempty range
+ // POST: computes the smallest enclosing ball of the points in the range
+ // [begin, end); the functor a maps a point iterator to an iterator
+ // through the d coordinates of the point
+ Miniball (int d_, Pit begin, Pit end, CoordAccessor ca = CoordAccessor());
+
+ // POST: returns a pointer to the first element of an array that holds
+ // the d coordinates of the center of the computed ball
+ const NT* center () const;
+
+ // POST: returns the squared radius of the computed ball
+ NT squared_radius () const;
+
+ // POST: returns the number of support points of the computed ball;
+ // the support points form a minimal set with the same smallest
+ // enclosing ball as the input set; in particular, the support
+ // points are on the boundary of the computed ball, and their
+ // number is at most d+1
+ int nr_support_points () const;
+
+ // POST: returns an iterator to the first support point
+ SupportPointIterator support_points_begin () const;
+
+ // POST: returns a past-the-end iterator for the range of support points
+ SupportPointIterator support_points_end () const;
+
+ // POST: returns the maximum excess of any input point w.r.t. the computed
+ // ball, divided by the squared radius of the computed ball. The
+ // excess of a point is the difference between its squared distance
+ // from the center and the squared radius; Ideally, the return value
+ // is 0. subopt is set to the absolute value of the most negative
+ // coefficient in the affine combination of the support points that
+ // yields the center. Ideally, this is a convex combination, and there
+ // is no negative coefficient in which case subopt is set to 0.
+ NT relative_error (NT& subopt) const;
+
+ // POST: return true if the relative error is at most tol, and the
+ // suboptimality is 0; the default tolerance is 10 times the
+ // coordinate type's machine epsilon
+ bool is_valid (NT tol = NT(10) * std::numeric_limits<NT>::epsilon()) const;
+
+ // POST: returns the time in seconds taken by the constructor call for
+ // computing the smallest enclosing ball
+ double get_time() const;
+
+ // POST: deletes dynamically allocated arrays
+ ~Miniball();
+
+ private:
+ void mtf_mb (Sit n);
+ void mtf_move_to_front (Sit j);
+ void pivot_mb (Pit n);
+ void pivot_move_to_front (Pit j);
+ NT excess (Pit pit) const;
+ void pop ();
+ bool push (Pit pit);
+ NT suboptimality () const;
+ void create_arrays();
+ void delete_arrays();
+ };
+
+ // Class Definition
+ // ================
+ template <typename CoordAccessor>
+ Miniball<CoordAccessor>::Miniball (int d_, Pit begin, Pit end,
+ CoordAccessor ca)
+ : d (d_),
+ points_begin (begin),
+ points_end (end),
+ coord_accessor (ca),
+ time (clock()),
+ nt0 (NT(0)),
+ L(),
+ support_end (L.begin()),
+ fsize(0),
+ ssize(0),
+ current_c (NULL),
+ current_sqr_r (NT(-1)),
+ c (NULL),
+ sqr_r (NULL),
+ q0 (NULL),
+ z (NULL),
+ f (NULL),
+ v (NULL),
+ a (NULL)
+ {
+ assert (points_begin != points_end);
+ create_arrays();
+
+ // set initial center
+ for (int j=0; j<d; ++j) c[0][j] = nt0;
+ current_c = c[0];
+
+ // compute miniball
+ pivot_mb (points_end);
+
+ // update time
+ time = (clock() - time) / CLOCKS_PER_SEC;
+ }
+
+ template <typename CoordAccessor>
+ Miniball<CoordAccessor>::~Miniball()
+ {
+ delete_arrays();
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::create_arrays()
+ {
+ c = new NT*[d+1];
+ v = new NT*[d+1];
+ a = new NT*[d+1];
+ for (int i=0; i<d+1; ++i) {
+ c[i] = new NT[d];
+ v[i] = new NT[d];
+ a[i] = new NT[d];
+ }
+ sqr_r = new NT[d+1];
+ q0 = new NT[d];
+ z = new NT[d+1];
+ f = new NT[d+1];
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::delete_arrays()
+ {
+ delete[] f;
+ delete[] z;
+ delete[] q0;
+ delete[] sqr_r;
+ for (int i=0; i<d+1; ++i) {
+ delete[] a[i];
+ delete[] v[i];
+ delete[] c[i];
+ }
+ delete[] a;
+ delete[] v;
+ delete[] c;
+ }
+
+ template <typename CoordAccessor>
+ const typename Miniball<CoordAccessor>::NT*
+ Miniball<CoordAccessor>::center () const
+ {
+ return current_c;
+ }
+
+ template <typename CoordAccessor>
+ typename Miniball<CoordAccessor>::NT
+ Miniball<CoordAccessor>::squared_radius () const
+ {
+ return current_sqr_r;
+ }
+
+ template <typename CoordAccessor>
+ int Miniball<CoordAccessor>::nr_support_points () const
+ {
+ assert (ssize < d+2);
+ return ssize;
+ }
+
+ template <typename CoordAccessor>
+ typename Miniball<CoordAccessor>::SupportPointIterator
+ Miniball<CoordAccessor>::support_points_begin () const
+ {
+ return L.begin();
+ }
+
+ template <typename CoordAccessor>
+ typename Miniball<CoordAccessor>::SupportPointIterator
+ Miniball<CoordAccessor>::support_points_end () const
+ {
+ return support_end;
+ }
+
+ template <typename CoordAccessor>
+ typename Miniball<CoordAccessor>::NT
+ Miniball<CoordAccessor>::relative_error (NT& subopt) const
+ {
+ NT e, max_e = nt0;
+ // compute maximum absolute excess of support points
+ for (SupportPointIterator it = support_points_begin();
+ it != support_points_end(); ++it) {
+ e = excess (*it);
+ if (e < nt0) e = -e;
+ if (e > max_e) {
+ max_e = e;
+ }
+ }
+ // compute maximum excess of any point
+ for (Pit i = points_begin; i != points_end; ++i)
+ if ((e = excess (i)) > max_e)
+ max_e = e;
+
+ subopt = suboptimality();
+ assert (current_sqr_r > nt0 || max_e == nt0);
+ return (current_sqr_r == nt0 ? nt0 : max_e / current_sqr_r);
+ }
+
+ template <typename CoordAccessor>
+ bool Miniball<CoordAccessor>::is_valid (NT tol) const
+ {
+ NT suboptimality;
+ return ( (relative_error (suboptimality) <= tol) && (suboptimality == 0) );
+ }
+
+ template <typename CoordAccessor>
+ double Miniball<CoordAccessor>::get_time() const
+ {
+ return time;
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::mtf_mb (Sit n)
+ {
+ // Algorithm 1: mtf_mb (L_{n-1}, B), where L_{n-1} = [L.begin, n)
+ // B: the set of forced points, defining the current ball
+ // S: the superset of support points computed by the algorithm
+ // --------------------------------------------------------------
+ // from B. Gaertner, Fast and Robust Smallest Enclosing Balls, ESA 1999,
+ // http://www.inf.ethz.ch/personal/gaertner/texts/own_work/esa99_final.pdf
+
+ // PRE: B = S
+ assert (fsize == ssize);
+
+ support_end = L.begin();
+ if ((fsize) == d+1) return;
+
+ // incremental construction
+ for (Sit i = L.begin(); i != n;)
+ {
+ // INV: (support_end - L.begin() == |S|-|B|)
+ assert (std::distance (L.begin(), support_end) == ssize - fsize);
+
+ Sit j = i++;
+ if (excess(*j) > nt0)
+ if (push(*j)) { // B := B + p_i
+ mtf_mb (j); // mtf_mb (L_{i-1}, B + p_i)
+ pop(); // B := B - p_i
+ mtf_move_to_front(j);
+ }
+ }
+ // POST: the range [L.begin(), support_end) stores the set S\B
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::mtf_move_to_front (Sit j)
+ {
+ if (support_end == j)
+ support_end++;
+ L.splice (L.begin(), L, j);
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::pivot_mb (Pit n)
+ {
+ // Algorithm 2: pivot_mb (L_{n-1}), where L_{n-1} = [L.begin, n)
+ // --------------------------------------------------------------
+ // from B. Gaertner, Fast and Robust Smallest Enclosing Balls, ESA 1999,
+ // http://www.inf.ethz.ch/personal/gaertner/texts/own_work/esa99_final.pdf
+ NT old_sqr_r;
+ const NT* c;
+ Pit pivot, k;
+ NT e, max_e, sqr_r;
+ Cit p;
+ do {
+ old_sqr_r = current_sqr_r;
+ sqr_r = current_sqr_r;
+
+ pivot = points_begin;
+ max_e = nt0;
+ for (k = points_begin; k != n; ++k) {
+ p = coord_accessor(k);
+ e = -sqr_r;
+ c = current_c;
+ for (int j=0; j<d; ++j)
+ e += mb_sqr<NT>(*p++-*c++);
+ if (e > max_e) {
+ max_e = e;
+ pivot = k;
+ }
+ }
+
+ if (max_e > nt0) {
+ // check if the pivot is already contained in the support set
+ if (std::find(L.begin(), support_end, pivot) == support_end) {
+ assert (fsize == 0);
+ if (push (pivot)) {
+ mtf_mb(support_end);
+ pop();
+ pivot_move_to_front(pivot);
+ }
+ }
+ }
+ } while (old_sqr_r < current_sqr_r);
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::pivot_move_to_front (Pit j)
+ {
+ L.push_front(j);
+ if (std::distance(L.begin(), support_end) == d+2)
+ support_end--;
+ }
+
+ template <typename CoordAccessor>
+ inline typename Miniball<CoordAccessor>::NT
+ Miniball<CoordAccessor>::excess (Pit pit) const
+ {
+ Cit p = coord_accessor(pit);
+ NT e = -current_sqr_r;
+ NT* c = current_c;
+ for (int k=0; k<d; ++k){
+ e += mb_sqr<NT>(*p++-*c++);
+ }
+ return e;
+ }
+
+ template <typename CoordAccessor>
+ void Miniball<CoordAccessor>::pop ()
+ {
+ --fsize;
+ }
+
+ template <typename CoordAccessor>
+ bool Miniball<CoordAccessor>::push (Pit pit)
+ {
+ int i, j;
+ NT eps = mb_sqr<NT>(std::numeric_limits<NT>::epsilon());
+
+ Cit cit = coord_accessor(pit);
+ Cit p = cit;
+
+ if (fsize==0) {
+ for (i=0; i<d; ++i)
+ q0[i] = *p++;
+ for (i=0; i<d; ++i)
+ c[0][i] = q0[i];
+ sqr_r[0] = nt0;
+ }
+ else {
+ // set v_fsize to Q_fsize
+ for (i=0; i<d; ++i)
+ //v[fsize][i] = p[i]-q0[i];
+ v[fsize][i] = *p++-q0[i];
+
+ // compute the a_{fsize,i}, i< fsize
+ for (i=1; i<fsize; ++i) {
+ a[fsize][i] = nt0;
+ for (j=0; j<d; ++j)
+ a[fsize][i] += v[i][j] * v[fsize][j];
+ a[fsize][i]*=(2/z[i]);
+ }
+
+ // update v_fsize to Q_fsize-\bar{Q}_fsize
+ for (i=1; i<fsize; ++i) {
+ for (j=0; j<d; ++j)
+ v[fsize][j] -= a[fsize][i]*v[i][j];
+ }
+
+ // compute z_fsize
+ z[fsize]=nt0;
+ for (j=0; j<d; ++j)
+ z[fsize] += mb_sqr<NT>(v[fsize][j]);
+ z[fsize]*=2;
+
+ // reject push if z_fsize too small
+ if (z[fsize]<eps*current_sqr_r) {
+ return false;
+ }
+
+ // update c, sqr_r
+ p=cit;
+ NT e = -sqr_r[fsize-1];
+ for (i=0; i<d; ++i)
+ e += mb_sqr<NT>(*p++-c[fsize-1][i]);
+ f[fsize]=e/z[fsize];
+
+ for (i=0; i<d; ++i)
+ c[fsize][i] = c[fsize-1][i]+f[fsize]*v[fsize][i];
+ sqr_r[fsize] = sqr_r[fsize-1] + e*f[fsize]/2;
+ }
+ current_c = c[fsize];
+ current_sqr_r = sqr_r[fsize];
+ ssize = ++fsize;
+ return true;
+ }
+
+ template <typename CoordAccessor>
+ typename Miniball<CoordAccessor>::NT
+ Miniball<CoordAccessor>::suboptimality () const
+ {
+ NT* l = new NT[d+1];
+ NT min_l = nt0;
+ l[0] = NT(1);
+ for (int i=ssize-1; i>0; --i) {
+ l[i] = f[i];
+ for (int k=ssize-1; k>i; --k)
+ l[i]-=a[k][i]*l[k];
+ if (l[i] < min_l) min_l = l[i];
+ l[0] -= l[i];
+ }
+ if (l[0] < min_l) min_l = l[0];
+ delete[] l;
+ if (min_l < nt0)
+ return -min_l;
+ return nt0;
+ }
+} // namespace Miniball
+
+} // namespace Gudhi
+
+#endif // MINIBALL_HPP_
diff --git a/src/Cech_complex/test/CMakeLists.txt b/src/Cech_complex/test/CMakeLists.txt
new file mode 100644
index 00000000..8db51173
--- /dev/null
+++ b/src/Cech_complex/test/CMakeLists.txt
@@ -0,0 +1,15 @@
+cmake_minimum_required(VERSION 2.6)
+project(Cech_complex_tests)
+
+include(GUDHI_test_coverage)
+
+add_executable ( Cech_complex_test_unit test_cech_complex.cpp )
+target_link_libraries(Cech_complex_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Cech_complex_test_unit ${TBB_LIBRARIES})
+endif()
+
+# Do not forget to copy test files in current binary dir
+file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+gudhi_add_coverage_test(Cech_complex_test_unit)
diff --git a/src/Cech_complex/test/README b/src/Cech_complex/test/README
new file mode 100644
index 00000000..adf704f7
--- /dev/null
+++ b/src/Cech_complex/test/README
@@ -0,0 +1,12 @@
+To compile:
+***********
+
+cmake .
+make
+
+To launch with details:
+***********************
+
+./Cech_complex_test_unit --report_level=detailed --log_level=all
+
+ ==> echo $? returns 0 in case of success (non-zero otherwise)
diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp
new file mode 100644
index 00000000..9039169c
--- /dev/null
+++ b/src/Cech_complex/test/test_cech_complex.cpp
@@ -0,0 +1,264 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "cech_complex"
+#include <boost/test/unit_test.hpp>
+
+#include <cmath> // float comparison
+#include <limits>
+#include <string>
+#include <vector>
+#include <algorithm> // std::max
+
+#include <gudhi/Cech_complex.h>
+// to construct Cech_complex from a OFF file of points
+#include <gudhi/Points_off_io.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Unitary_tests_utils.h>
+#include <gudhi/Miniball.hpp>
+
+// Type definitions
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Point = std::vector<Filtration_value>;
+using Point_cloud = std::vector<Point>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
+using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
+
+using Point_iterator = Point_cloud::const_iterator;
+using Coordinate_iterator = Point::const_iterator;
+using Min_sphere = Gudhi::Miniball::Miniball<Gudhi::Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
+
+BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of a Cech complex from a point cloud
+ //
+ // ----------------------------------------------------------------------------
+ Point_cloud points;
+ points.push_back({1., 0.}); // 0
+ points.push_back({0., 1.}); // 1
+ points.push_back({2., 1.}); // 2
+ points.push_back({3., 2.}); // 3
+ points.push_back({0., 3.}); // 4
+ points.push_back({3. + std::sqrt(3.), 3.}); // 5
+ points.push_back({1., 4.}); // 6
+ points.push_back({3., 4.}); // 7
+ points.push_back({2., 4. + std::sqrt(3.)}); // 8
+ points.push_back({0., 4.}); // 9
+ points.push_back({-0.5, 2.}); // 10
+
+ Filtration_value max_radius = 1.0;
+ std::cout << "========== NUMBER OF POINTS = " << points.size() << " - Cech max_radius = " << max_radius
+ << "==========" << std::endl;
+
+ Cech_complex cech_complex_for_doc(points, max_radius);
+
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(cech_complex_for_doc.max_radius(), max_radius);
+ std::size_t i = 0;
+ for (; i < points.size(); i++) {
+ BOOST_CHECK(points[i] == cech_complex_for_doc.get_point(i));
+ }
+
+ const int DIMENSION_1 = 1;
+ Simplex_tree st;
+ cech_complex_for_doc.create_complex(st, DIMENSION_1);
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ BOOST_CHECK(st.dimension() == DIMENSION_1);
+
+ const int NUMBER_OF_VERTICES = 11;
+ std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ BOOST_CHECK(st.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ BOOST_CHECK(st.num_simplices() == 27);
+
+ // Check filtration values of vertices is 0.0
+ for (auto f_simplex : st.skeleton_simplex_range(0)) {
+ BOOST_CHECK(st.filtration(f_simplex) == 0.0);
+ }
+
+ // Check filtration values of edges
+ for (auto f_simplex : st.skeleton_simplex_range(DIMENSION_1)) {
+ if (DIMENSION_1 == st.dimension(f_simplex)) {
+ std::vector<Point> vp;
+ std::cout << "vertex = (";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << ",";
+ vp.push_back(points.at(vertex));
+ }
+ std::cout << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))
+ << " - filtration =" << st.filtration(f_simplex) << std::endl;
+ BOOST_CHECK(vp.size() == 2);
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex),
+ Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1)));
+ }
+ }
+
+ const int DIMENSION_2 = 2;
+
+#ifdef GUDHI_DEBUG
+ BOOST_CHECK_THROW(cech_complex_for_doc.create_complex(st, DIMENSION_2), std::invalid_argument);
+#endif
+
+ Simplex_tree st2;
+ cech_complex_for_doc.create_complex(st2, DIMENSION_2);
+ std::cout << "st2.dimension()=" << st2.dimension() << std::endl;
+ BOOST_CHECK(st2.dimension() == DIMENSION_2);
+
+ std::cout << "st2.num_vertices()=" << st2.num_vertices() << std::endl;
+ BOOST_CHECK(st2.num_vertices() == NUMBER_OF_VERTICES);
+
+ std::cout << "st2.num_simplices()=" << st2.num_simplices() << std::endl;
+ BOOST_CHECK(st2.num_simplices() == 30);
+
+ Point_cloud points012;
+ for (std::size_t vertex = 0; vertex <= 2; vertex++) {
+ points012.push_back(cech_complex_for_doc.get_point(vertex));
+ }
+ std::size_t dimension = points[0].end() - points[0].begin();
+ Min_sphere ms012(dimension, points012.begin(), points012.end());
+
+ Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
+ std::cout << "f012= " << f012 << " | ms012_radius= " << std::sqrt(ms012.squared_radius()) << std::endl;
+
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(ms012.squared_radius()));
+
+ Point_cloud points1410;
+ points1410.push_back(cech_complex_for_doc.get_point(1));
+ points1410.push_back(cech_complex_for_doc.get_point(4));
+ points1410.push_back(cech_complex_for_doc.get_point(10));
+ Min_sphere ms1410(dimension, points1410.begin(), points1410.end());
+
+ Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10}));
+ std::cout << "f1410= " << f1410 << " | ms1410_radius= " << std::sqrt(ms1410.squared_radius()) << std::endl;
+
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, std::sqrt(ms1410.squared_radius()));
+
+ Point_cloud points469;
+ points469.push_back(cech_complex_for_doc.get_point(4));
+ points469.push_back(cech_complex_for_doc.get_point(6));
+ points469.push_back(cech_complex_for_doc.get_point(9));
+ Min_sphere ms469(dimension, points469.begin(), points469.end());
+
+ Simplex_tree::Filtration_value f469 = st2.filtration(st2.find({4, 6, 9}));
+ std::cout << "f469= " << f469 << " | ms469_radius= " << std::sqrt(ms469.squared_radius()) << std::endl;
+
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(ms469.squared_radius()));
+
+ BOOST_CHECK((st2.find({6, 7, 8}) == st2.null_simplex()));
+ BOOST_CHECK((st2.find({3, 5, 7}) == st2.null_simplex()));
+}
+
+BOOST_AUTO_TEST_CASE(Cech_complex_from_points) {
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Point_cloud points;
+ std::vector<double> coords = {0.0, 0.0, 0.0, 1.0};
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = {0.0, 0.0, 1.0, 0.0};
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = {0.0, 1.0, 0.0, 0.0};
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = {1.0, 0.0, 0.0, 0.0};
+ points.push_back(Point(coords.begin(), coords.end()));
+
+ // ----------------------------------------------------------------------------
+ // Init of a Cech complex from the list of points
+ // ----------------------------------------------------------------------------
+ Cech_complex cech_complex_from_points(points, 2.0);
+
+ std::cout << "========== cech_complex_from_points ==========" << std::endl;
+ Simplex_tree st;
+ const int DIMENSION = 3;
+ cech_complex_from_points.create_complex(st, DIMENSION);
+
+ // Another way to check num_simplices
+ std::cout << "Iterator on Cech complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ int num_simplices = 0;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ num_simplices++;
+ std::cout << " ( ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> "
+ << "[" << st.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ BOOST_CHECK(num_simplices == 15);
+ std::cout << "st.num_simplices()=" << st.num_simplices() << std::endl;
+ BOOST_CHECK(st.num_simplices() == 15);
+
+ std::cout << "st.dimension()=" << st.dimension() << std::endl;
+ BOOST_CHECK(st.dimension() == DIMENSION);
+ std::cout << "st.num_vertices()=" << st.num_vertices() << std::endl;
+ BOOST_CHECK(st.num_vertices() == 4);
+
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << "dimension(" << st.dimension(f_simplex) << ") - f = " << st.filtration(f_simplex) << std::endl;
+ switch (st.dimension(f_simplex)) {
+ case 0:
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.0);
+ break;
+ case 1:
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.707107, .00001);
+ break;
+ case 2:
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.816497, .00001);
+ break;
+ case 3:
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex), 0.866025, .00001);
+ break;
+ default:
+ BOOST_CHECK(false); // Shall not happen
+ break;
+ }
+ }
+}
+
+#ifdef GUDHI_DEBUG
+BOOST_AUTO_TEST_CASE(Cech_create_complex_throw) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of a Cech complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double max_radius = 12.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - Cech max_radius=" << max_radius
+ << "==========" << std::endl;
+
+ Gudhi::Points_off_reader<Point> off_reader(off_file_name);
+ Cech_complex cech_complex_from_file(off_reader.get_point_cloud(), max_radius);
+
+ Simplex_tree stree;
+ std::vector<int> simplex = {0, 1, 2};
+ stree.insert_simplex_and_subfaces(simplex);
+ std::cout << "Check exception throw in debug mode" << std::endl;
+ // throw excpt because stree is not empty
+ BOOST_CHECK_THROW(cech_complex_from_file.create_complex(stree, 1), std::invalid_argument);
+}
+#endif
diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..30b99729
--- /dev/null
+++ b/src/Cech_complex/utilities/CMakeLists.txt
@@ -0,0 +1,14 @@
+cmake_minimum_required(VERSION 2.6)
+project(Cech_complex_utilities)
+
+add_executable(cech_persistence cech_persistence.cpp)
+target_link_libraries(cech_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+if (TBB_FOUND)
+ target_link_libraries(cech_persistence ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:cech_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
+
+install(TARGETS cech_persistence DESTINATION bin)
diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp
new file mode 100644
index 00000000..93e92695
--- /dev/null
+++ b/src/Cech_complex/utilities/cech_persistence.cpp
@@ -0,0 +1,136 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Cech_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Points_off_io.h>
+
+#include <boost/program_options.hpp>
+
+#include <string>
+#include <vector>
+#include <limits> // infinity
+
+// Types definition
+using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Point = std::vector<double>;
+using Point_cloud = std::vector<Point>;
+using Points_off_reader = Gudhi::Points_off_reader<Point>;
+using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
+using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
+
+void program_options(int argc, char* argv[], std::string& off_file_points, std::string& filediag,
+ Filtration_value& max_radius, int& dim_max, int& p, Filtration_value& min_persistence);
+
+int main(int argc, char* argv[]) {
+ std::string off_file_points;
+ std::string filediag;
+ Filtration_value max_radius;
+ int dim_max;
+ int p;
+ Filtration_value min_persistence;
+
+ program_options(argc, argv, off_file_points, filediag, max_radius, dim_max, p, min_persistence);
+
+ Points_off_reader off_reader(off_file_points);
+ Cech_complex cech_complex_from_file(off_reader.get_point_cloud(), max_radius);
+
+ // Construct the Cech complex in a Simplex Tree
+ Simplex_tree simplex_tree;
+
+ cech_complex_from_file.create_complex(simplex_tree, dim_max);
+ std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
+ std::cout << " and has dimension " << simplex_tree.dimension() << " \n";
+
+ // Sort the simplices in the order of the filtration
+ simplex_tree.initialize_filtration();
+
+ // Compute the persistence diagram of the complex
+ Persistent_cohomology pcoh(simplex_tree);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(p);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (filediag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::ofstream out(filediag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+
+ return 0;
+}
+
+void program_options(int argc, char* argv[], std::string& off_file_points, std::string& filediag,
+ Filtration_value& max_radius, int& dim_max, int& p, Filtration_value& min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-radius,r",
+ po::value<Filtration_value>(&max_radius)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Cech complex construction.")(
+ "cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Cech complex we want to compute.")(
+ "field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a Cech complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ exit(-1);
+ }
+}
diff --git a/src/Cech_complex/utilities/cechcomplex.md b/src/Cech_complex/utilities/cechcomplex.md
new file mode 100644
index 00000000..f7817dbb
--- /dev/null
+++ b/src/Cech_complex/utilities/cechcomplex.md
@@ -0,0 +1,38 @@
+
+
+# ÄŒech complex #
+
+## cech_persistence ##
+This program computes the persistent homology with coefficient field *Z/pZ* of
+a ÄŒech complex defined on a set of input points, using Euclidean distance. The
+output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death`
+are respectively the birth and death of the feature, and `p` is the
+characteristic of the field *Z/pZ* used for homology coefficients (`p` must be
+a prime number).
+
+**Usage**
+
+`cech_persistence [options] <OFF input file>`
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the ÄŒech complex construction.
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the ÄŒech complex we want to compute.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+Beware: this program may use a lot of RAM and take a lot of time if `max-edge-length` is set to a large value.
+
+**Example 1 with Z/2Z coefficients**
+
+`cech_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2`
+
+**Example 2 with Z/3Z coefficients**
+
+`cech_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3`
diff --git a/src/Contraction/example/CMakeLists.txt b/src/Contraction/example/CMakeLists.txt
index a92d1685..582b7ab8 100644
--- a/src/Contraction/example/CMakeLists.txt
+++ b/src/Contraction/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Contraction_examples)
add_executable(RipsContraction Rips_contraction.cpp)
diff --git a/src/Doxyfile b/src/Doxyfile.in
index f1981e2e..858a9299 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile.in
@@ -38,7 +38,7 @@ PROJECT_NAME = "GUDHI"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "2.1.0"
+PROJECT_NUMBER = "@GUDHI_VERSION@"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -780,12 +780,12 @@ RECURSIVE = YES
# Note that relative paths are relative to the directory from which doxygen is
# run.
-EXCLUDE = data/ \
- example/ \
- GudhUI/ \
- cmake/ \
- src/cython/ \
- include/gudhi_patches/
+EXCLUDE = data/ \
+ example/ \
+ GudhUI/ \
+ cmake/ \
+ src/cython/ \
+ include/gudhi_patches/
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
# directories that are symbolic links (a Unix file system feature) are excluded
@@ -818,9 +818,9 @@ EXCLUDE_SYMBOLS =
# that contain example code fragments that are included (see the \include
# command).
-EXAMPLE_PATH = biblio/ \
- example/ \
- utilities/
+EXAMPLE_PATH = biblio/ \
+ example/ \
+ utilities/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -840,21 +840,7 @@ EXAMPLE_RECURSIVE = NO
# that contain images that are to be included in the documentation (see the
# \image command).
-IMAGE_PATH = doc/Skeleton_blocker/ \
- doc/Alpha_complex/ \
- doc/common/ \
- doc/Contraction/ \
- doc/Simplex_tree/ \
- doc/Persistent_cohomology/ \
- doc/Witness_complex/ \
- doc/Bitmap_cubical_complex/ \
- doc/Rips_complex/ \
- doc/Subsampling/ \
- doc/Spatial_searching/ \
- doc/Tangential_complex/ \
- doc/Bottleneck_distance/ \
- doc/Nerve_GIC/ \
- doc/Persistence_representations/
+IMAGE_PATH = @GUDHI_DOXYGEN_IMAGE_PATH@
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program
diff --git a/src/GUDHIConfig.cmake.in b/src/GUDHIConfig.cmake.in
index 02b540dc..8d82f235 100644
--- a/src/GUDHIConfig.cmake.in
+++ b/src/GUDHIConfig.cmake.in
@@ -1,7 +1,12 @@
# - Config file for the GUDHI package
# It defines the following variables
# GUDHI_INCLUDE_DIRS - include directories for GUDHI
+#
+# Order is :
+# 1. user defined GUDHI_INCLUDE_DIRS
+# 2. ${CMAKE_SOURCE_DIR}/include => Where the 'cmake' has been done
+# 3. ${CMAKE_INSTALL_PREFIX}/include => Where the 'make install' has been performed
# Compute paths
-set(GUDHI_INCLUDE_DIRS "@CONF_INCLUDE_DIRS@")
+set(GUDHI_INCLUDE_DIRS "${GUDHI_INCLUDE_DIRS};@CONF_INCLUDE_DIRS@")
diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt
index 2503a03e..b357b8f7 100644
--- a/src/GudhUI/CMakeLists.txt
+++ b/src/GudhUI/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.8)
project(GudhUI)
# Need to find OpenGL first as find_package(Qt5) tries to #include"GL/gl.h" on some platforms
@@ -38,4 +37,4 @@ if (OPENGL_FOUND)
install(TARGETS GudhUI DESTINATION bin)
endif()
-endif(OPENGL_FOUND) \ No newline at end of file
+endif(OPENGL_FOUND)
diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt
index 542c6af4..fdecf86e 100644
--- a/src/Nerve_GIC/example/CMakeLists.txt
+++ b/src/Nerve_GIC/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Nerve_GIC_examples)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
@@ -17,11 +16,11 @@ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC>
- "tore3D_1307.off" "0")
+ "${CMAKE_CURRENT_BINARY_DIR}/tore3D_1307.off" "0")
add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC>
- "lucky_cat.off"
- "lucky_cat_PCA1")
+ "${CMAKE_CURRENT_BINARY_DIR}/lucky_cat.off"
+ "${CMAKE_CURRENT_BINARY_DIR}/lucky_cat_PCA1")
install(TARGETS CoordGIC DESTINATION bin)
install(TARGETS FuncGIC DESTINATION bin)
diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp
index 73edae18..9889b198 100644
--- a/src/Nerve_GIC/example/CoordGIC.cpp
+++ b/src/Nerve_GIC/example/CoordGIC.cpp
@@ -79,10 +79,10 @@ int main(int argc, char **argv) {
// --------------------------------------------
if (verb) {
- std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ std::cout << "Coordinate GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
<< " simplices - " << stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on functional GIC simplices" << std::endl;
+ std::cout << "Iterator on coordinate GIC simplices" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
std::cout << vertex << " ";
diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h
index 8cd7bdbf..2c37dfae 100644
--- a/src/Nerve_GIC/include/gudhi/GIC.h
+++ b/src/Nerve_GIC/include/gudhi/GIC.h
@@ -25,7 +25,6 @@
#ifdef GUDHI_USE_TBB
#include <tbb/parallel_for.h>
-#include <tbb/task_scheduler_init.h>
#include <tbb/mutex.h>
#endif
@@ -63,7 +62,6 @@ namespace Gudhi {
namespace cover_complex {
using Simplex_tree = Gudhi::Simplex_tree<>;
-using Simplex_handle = Simplex_tree::Simplex_handle;
using Filtration_value = Simplex_tree::Filtration_value;
using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
using Persistence_diagram = std::vector<std::pair<double, double> >;
@@ -150,10 +148,20 @@ class Cover_complex {
for (boost::tie(ei, ei_end) = boost::edges(G); ei != ei_end; ++ei) boost::remove_edge(*ei, G);
}
+ // Thread local is not available on XCode version < V.8
+ // If not available, random engine is a class member.
+#ifndef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ std::default_random_engine re;
+#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+
// Find random number in [0,1].
double GetUniform() {
+ // Thread local is not available on XCode version < V.8
+ // If available, random engine is defined for each thread.
+#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
thread_local std::default_random_engine re;
- thread_local std::uniform_real_distribution<double> Dist(0, 1);
+#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ std::uniform_real_distribution<double> Dist(0, 1);
return Dist(re);
}
@@ -218,7 +226,25 @@ class Cover_complex {
void set_mask(int nodemask) { mask = nodemask; }
public:
- /** \brief Reads and stores the input point cloud.
+
+
+ /** \brief Reads and stores the input point cloud from vector stored in memory.
+ *
+ * @param[in] cloud input vector representing the point cloud. Each row is a point and each coordinate is a vector.
+ *
+ */
+ template <class InputRange>
+ void set_point_cloud_from_range(InputRange const & cloud) {
+ n = cloud.size(); data_dimension = cloud[0].size(); point_cloud_name = "matrix";
+ for(int i = 0; i < n; i++){
+ point_cloud.emplace_back(cloud[i].begin(), cloud[i].begin() + data_dimension);
+ boost::add_vertex(one_skeleton_OFF);
+ vertices.push_back(boost::add_vertex(one_skeleton));
+ cover.emplace_back();
+ }
+ }
+
+ /** \brief Reads and stores the input point cloud from .(n)OFF file.
*
* @param[in] off_file_name name of the input .OFF or .nOFF file.
*
@@ -269,7 +295,7 @@ class Cover_complex {
point.assign(std::istream_iterator<double>(iss), std::istream_iterator<double>());
point_cloud.emplace_back(point.begin(), point.begin() + data_dimension);
boost::add_vertex(one_skeleton_OFF);
- vertices.push_back(boost::add_vertex(one_skeleton)); cover.emplace_back();
+ vertices.push_back(boost::add_vertex(one_skeleton));
cover.emplace_back();
i++;
}
@@ -416,17 +442,20 @@ class Cover_complex {
template <typename Distance>
double set_graph_from_automatic_rips(Distance distance, int N = 100) {
int m = floor(n / std::exp((1 + rate_power) * std::log(std::log(n) / std::log(rate_constant))));
- m = std::min(m, n - 1); double delta = 0;
+ m = std::min(m, n - 1);
+ double delta = 0;
if (verbose) std::cout << n << " points in R^" << data_dimension << std::endl;
if (verbose) std::cout << "Subsampling " << m << " points" << std::endl;
if (distances.size() == 0) compute_pairwise_distances(distance);
- #ifdef GUDHI_USE_TBB
- tbb::mutex deltamutex;
- tbb::parallel_for(0, N, [&](int i){
- std::vector<int> samples(m);
+ // This cannot be parallelized if thread_local is not defined
+ // thread_local is not defined for XCode < v.8
+ #if defined(GUDHI_USE_TBB) && defined(GUDHI_CAN_USE_CXX11_THREAD_LOCAL)
+ tbb::mutex deltamutex;
+ tbb::parallel_for(0, N, [&](int i){
+ std::vector<int> samples(m);
SampleWithoutReplacement(n, m, samples);
double hausdorff_dist = 0;
for (int j = 0; j < n; j++) {
@@ -434,13 +463,13 @@ class Cover_complex {
for (int k = 1; k < m; k++) mj = std::min(mj, distances[j][samples[k]]);
hausdorff_dist = std::max(hausdorff_dist, mj);
}
- deltamutex.lock();
+ deltamutex.lock();
delta += hausdorff_dist / N;
- deltamutex.unlock();
+ deltamutex.unlock();
});
#else
for (int i = 0; i < N; i++) {
- std::vector<int> samples(m);
+ std::vector<int> samples(m);
SampleWithoutReplacement(n, m, samples);
double hausdorff_dist = 0;
for (int j = 0; j < n; j++) {
@@ -490,8 +519,6 @@ class Cover_complex {
*/
void set_function_from_coordinate(int k) {
for (int i = 0; i < n; i++) func.push_back(point_cloud[i][k]);
- char coordinate[100];
- sprintf(coordinate, "coordinate %d", k);
functional_cover = true;
cover_name = "coordinate " + std::to_string(k);
}
@@ -723,11 +750,13 @@ class Cover_complex {
}
#ifdef GUDHI_USE_TBB
- if (verbose) std::cout << "Computing connected components (parallelized)..." << std::endl; tbb::mutex covermutex, idmutex;
+ if (verbose) std::cout << "Computing connected components (parallelized)..." << std::endl;
+ tbb::mutex covermutex, idmutex;
tbb::parallel_for(0, res, [&](int i){
// Compute connected components
Graph G = one_skeleton.create_subgraph();
- int num = preimages[i].size(); std::vector<int> component(num);
+ int num = preimages[i].size();
+ std::vector<int> component(num);
for (int j = 0; j < num; j++) boost::add_vertex(index[vertices[preimages[i][j]]], G);
boost::connected_components(G, &component[0]);
int max = 0;
@@ -741,20 +770,20 @@ class Cover_complex {
int identifier = ((i + component[j])*(i + component[j]) + 3 * i + component[j]) / 2;
// Update covers
- covermutex.lock();
+ covermutex.lock();
cover[preimages[i][j]].push_back(identifier);
cover_back[identifier].push_back(preimages[i][j]);
cover_fct[identifier] = i;
cover_std[identifier] = funcstd[i];
cover_color[identifier].second += func_color[preimages[i][j]];
cover_color[identifier].first += 1;
- covermutex.unlock();
+ covermutex.unlock();
}
// Maximal dimension is total number of connected components
- idmutex.lock();
+ idmutex.lock();
id += max + 1;
- idmutex.unlock();
+ idmutex.unlock();
});
#else
if (verbose) std::cout << "Computing connected components..." << std::endl;
@@ -849,7 +878,6 @@ class Cover_complex {
std::vector<double> mindist(n);
for (int j = 0; j < n; j++) mindist[j] = std::numeric_limits<double>::max();
-
// Compute the geodesic distances to subsamples with Dijkstra
#ifdef GUDHI_USE_TBB
if (verbose) std::cout << "Computing geodesic distances (parallelized)..." << std::endl;
@@ -954,7 +982,7 @@ class Cover_complex {
* @param[in] color input vector of values.
*
*/
- void set_color_from_vector(std::vector<double> color) {
+ void set_color_from_range(std::vector<double> color) {
for (unsigned int i = 0; i < color.size(); i++) func_color.push_back(color[i]);
}
@@ -1098,7 +1126,7 @@ class Cover_complex {
/** \brief Computes the extended persistence diagram of the complex.
*
*/
- void compute_PD() {
+ Persistence_diagram compute_PD() {
Simplex_tree st;
// Compute max and min
@@ -1118,9 +1146,8 @@ class Cover_complex {
for (std::map<int, double>::iterator it = cover_std.begin(); it != cover_std.end(); it++) {
int vertex = it->first; float val = it->second;
int vert[] = {vertex}; int edge[] = {vertex, -2};
- Simplex_handle shv = st.find(vert); Simplex_handle she = st.find(edge);
- if(shv != st.null_simplex()) st.assign_filtration(shv, -2 + (val - minf)/(maxf - minf));
- if(she != st.null_simplex()) st.assign_filtration(she, 2 - (val - minf)/(maxf - minf));
+ st.assign_filtration(st.find(vert), -2 + (val - minf)/(maxf - minf));
+ st.assign_filtration(st.find(edge), 2 - (val - minf)/(maxf - minf));
}
st.make_filtration_non_decreasing();
@@ -1150,6 +1177,7 @@ class Cover_complex {
if (verbose) std::cout << " [" << birth << ", " << death << "]" << std::endl;
}
}
+ return PD;
}
public:
@@ -1175,7 +1203,7 @@ class Cover_complex {
Cboot.point_cloud.push_back(this->point_cloud[id]); Cboot.cover.emplace_back(); Cboot.func.push_back(this->func[id]);
boost::add_vertex(Cboot.one_skeleton_OFF); Cboot.vertices.push_back(boost::add_vertex(Cboot.one_skeleton));
}
- Cboot.set_color_from_vector(Cboot.func);
+ Cboot.set_color_from_range(Cboot.func);
for (int j = 0; j < n; j++) {
std::vector<double> dist(n);
diff --git a/src/Nerve_GIC/test/CMakeLists.txt b/src/Nerve_GIC/test/CMakeLists.txt
index c35cdff7..99263ea0 100644
--- a/src/Nerve_GIC/test/CMakeLists.txt
+++ b/src/Nerve_GIC/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Graph_induced_complex_tests)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt
index 7a838a8c..215f9dfd 100644
--- a/src/Nerve_GIC/utilities/CMakeLists.txt
+++ b/src/Nerve_GIC/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Nerve_GIC_examples)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt
index 54d719ac..33558df3 100644
--- a/src/Persistence_representations/example/CMakeLists.txt
+++ b/src/Persistence_representations/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_example)
add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp )
diff --git a/src/Persistence_representations/include/gudhi/Persistence_landscape.h b/src/Persistence_representations/include/gudhi/Persistence_landscape.h
index 4381a55b..9cab0166 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_landscape.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_landscape.h
@@ -734,7 +734,7 @@ double Persistence_landscape::compute_integral_of_landscape(double p) const {
double Persistence_landscape::compute_value_at_a_given_point(unsigned level, double x) const {
bool compute_value_at_a_given_pointDbg = false;
// in such a case lambda_level = 0.
- if (level > this->land.size()) return 0;
+ if (level >= this->land.size()) return 0;
// we know that the points in this->land[level] are ordered according to x coordinate. Therefore, we can find the
// point by using bisection:
@@ -1235,40 +1235,43 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
std::cerr << "Computing inner product for a level : " << level << std::endl;
getchar();
}
- if (l1.land[level].size() * l2.land[level].size() == 0) continue;
+ auto&& l1_land_level = l1.land[level];
+ auto&& l2_land_level = l2.land[level];
+
+ if (l1_land_level.size() * l2_land_level.size() == 0) continue;
// endpoints of the interval on which we will compute the inner product of two locally linear functions:
double x1 = -std::numeric_limits<int>::max();
double x2;
- if (l1.land[level][1].first < l2.land[level][1].first) {
- x2 = l1.land[level][1].first;
+ if (l1_land_level[1].first < l2_land_level[1].first) {
+ x2 = l1_land_level[1].first;
} else {
- x2 = l2.land[level][1].first;
+ x2 = l2_land_level[1].first;
}
// iterators for the landscapes l1 and l2
size_t l1It = 0;
size_t l2It = 0;
- while ((l1It < l1.land[level].size() - 1) && (l2It < l2.land[level].size() - 1)) {
+ while ((l1It < l1_land_level.size() - 1) && (l2It < l2_land_level.size() - 1)) {
// compute the value of a inner product on a interval [x1,x2]
double a, b, c, d;
- if (l1.land[level][l1It + 1].first != l1.land[level][l1It].first) {
- a = (l1.land[level][l1It + 1].second - l1.land[level][l1It].second) /
- (l1.land[level][l1It + 1].first - l1.land[level][l1It].first);
+ if (l1_land_level[l1It + 1].first != l1_land_level[l1It].first) {
+ a = (l1_land_level[l1It + 1].second - l1_land_level[l1It].second) /
+ (l1_land_level[l1It + 1].first - l1_land_level[l1It].first);
} else {
a = 0;
}
- b = l1.land[level][l1It].second - a * l1.land[level][l1It].first;
- if (l2.land[level][l2It + 1].first != l2.land[level][l2It].first) {
- c = (l2.land[level][l2It + 1].second - l2.land[level][l2It].second) /
- (l2.land[level][l2It + 1].first - l2.land[level][l2It].first);
+ b = l1_land_level[l1It].second - a * l1_land_level[l1It].first;
+ if (l2_land_level[l2It + 1].first != l2_land_level[l2It].first) {
+ c = (l2_land_level[l2It + 1].second - l2_land_level[l2It].second) /
+ (l2_land_level[l2It + 1].first - l2_land_level[l2It].first);
} else {
c = 0;
}
- d = l2.land[level][l2It].second - c * l2.land[level][l2It].first;
+ d = l2_land_level[l2It].second - c * l2_land_level[l2It].first;
double contributionFromThisPart = (a * c * x2 * x2 * x2 / 3 + (a * d + b * c) * x2 * x2 / 2 + b * d * x2) -
(a * c * x1 * x1 * x1 / 3 + (a * d + b * c) * x1 * x1 / 2 + b * d * x1);
@@ -1276,10 +1279,10 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
result += contributionFromThisPart;
if (dbg) {
- std::cerr << "[l1.land[level][l1It].first,l1.land[level][l1It+1].first] : " << l1.land[level][l1It].first
- << " , " << l1.land[level][l1It + 1].first << std::endl;
- std::cerr << "[l2.land[level][l2It].first,l2.land[level][l2It+1].first] : " << l2.land[level][l2It].first
- << " , " << l2.land[level][l2It + 1].first << std::endl;
+ std::cerr << "[l1_land_level[l1It].first,l1_land_level[l1It+1].first] : " << l1_land_level[l1It].first
+ << " , " << l1_land_level[l1It + 1].first << std::endl;
+ std::cerr << "[l2_land_level[l2It].first,l2_land_level[l2It+1].first] : " << l2_land_level[l2It].first
+ << " , " << l2_land_level[l2It + 1].first << std::endl;
std::cerr << "a : " << a << ", b : " << b << " , c: " << c << ", d : " << d << std::endl;
std::cerr << "x1 : " << x1 << " , x2 : " << x2 << std::endl;
std::cerr << "contributionFromThisPart : " << contributionFromThisPart << std::endl;
@@ -1288,14 +1291,14 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
}
// we have two intervals in which functions are constant:
- // [l1.land[level][l1It].first , l1.land[level][l1It+1].first]
+ // [l1_land_level[l1It].first , l1_land_level[l1It+1].first]
// and
- // [l2.land[level][l2It].first , l2.land[level][l2It+1].first]
+ // [l2_land_level[l2It].first , l2_land_level[l2It+1].first]
// We also have an interval [x1,x2]. Since the intervals in the landscapes cover the whole R, then it is clear
// that x2
- // is either l1.land[level][l1It+1].first of l2.land[level][l2It+1].first or both. Lets test it.
- if (x2 == l1.land[level][l1It + 1].first) {
- if (x2 == l2.land[level][l2It + 1].first) {
+ // is either l1_land_level[l1It+1].first of l2_land_level[l2It+1].first or both. Lets test it.
+ if (x2 == l1_land_level[l1It + 1].first) {
+ if (x2 == l2_land_level[l2It + 1].first) {
// in this case, we increment both:
++l2It;
if (dbg) {
@@ -1314,12 +1317,16 @@ double compute_inner_product(const Persistence_landscape& l1, const Persistence_
std::cerr << "Incrementing second \n";
}
}
+
+ if ( l1It + 1 >= l1_land_level.size() )break;
+ if ( l2It + 1 >= l2_land_level.size() )break;
+
// Now, we shift x1 and x2:
x1 = x2;
- if (l1.land[level][l1It + 1].first < l2.land[level][l2It + 1].first) {
- x2 = l1.land[level][l1It + 1].first;
+ if (l1_land_level[l1It + 1].first < l2_land_level[l2It + 1].first) {
+ x2 = l1_land_level[l1It + 1].first;
} else {
- x2 = l2.land[level][l2It + 1].first;
+ x2 = l2_land_level[l2It + 1].first;
}
}
}
diff --git a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
index e0fc7107..4a2b9d68 100644
--- a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
+++ b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
@@ -57,7 +57,7 @@ std::vector<std::pair<double, double> > read_persistence_intervals_in_one_dimens
std::string line;
std::vector<std::pair<double, double> > barcode_initial =
- read_persistence_intervals_in_dimension(filename, (int)dimension);
+ read_persistence_intervals_in_dimension(filename, static_cast<int>(dimension));
std::vector<std::pair<double, double> > final_barcode;
final_barcode.reserve(barcode_initial.size());
@@ -92,8 +92,8 @@ std::vector<std::pair<double, double> > read_persistence_intervals_in_one_dimens
if ((barcode_initial[i].second == std::numeric_limits<double>::infinity()) &&
(what_to_substitute_for_infinite_bar != -1)) {
- if (barcode_initial[i].first < what_to_substitute_for_infinite_bar) // if only birth < death.
- {
+ if (barcode_initial[i].first < what_to_substitute_for_infinite_bar) {
+ // if only birth < death.
final_barcode.push_back(
std::pair<double, double>(barcode_initial[i].first, what_to_substitute_for_infinite_bar));
}
diff --git a/src/Persistence_representations/test/CMakeLists.txt b/src/Persistence_representations/test/CMakeLists.txt
index 335a71ef..5e2b6910 100644
--- a/src/Persistence_representations/test/CMakeLists.txt
+++ b/src/Persistence_representations/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_test)
include(GUDHI_test_coverage)
diff --git a/src/Persistence_representations/utilities/persistence_heat_maps/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_heat_maps/CMakeLists.txt
index 386e9fa5..89ef232f 100644
--- a/src/Persistence_representations/utilities/persistence_heat_maps/CMakeLists.txt
+++ b/src/Persistence_representations/utilities/persistence_heat_maps/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_heat_maps_utilities)
add_persistence_representation_creation_utility(create_pssk "10" "-1" "-1" "4" "-1")
diff --git a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt
index 875ff45e..649b72cb 100644
--- a/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt
+++ b/src/Persistence_representations/utilities/persistence_intervals/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_intervals_utilities)
diff --git a/src/Persistence_representations/utilities/persistence_landscapes/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_landscapes/CMakeLists.txt
index d7087ed8..6b24d032 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes/CMakeLists.txt
+++ b/src/Persistence_representations/utilities/persistence_landscapes/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_landscapes_utilities)
add_persistence_representation_creation_utility(create_landscapes "-1")
diff --git a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/CMakeLists.txt
index c5ea4bbf..36f3196b 100644
--- a/src/Persistence_representations/utilities/persistence_landscapes_on_grid/CMakeLists.txt
+++ b/src/Persistence_representations/utilities/persistence_landscapes_on_grid/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_lanscapes_on_grid_utilities)
# Need to set grid min and max for further average, distance and scalar_product
diff --git a/src/Persistence_representations/utilities/persistence_vectors/CMakeLists.txt b/src/Persistence_representations/utilities/persistence_vectors/CMakeLists.txt
index a401c955..bc982094 100644
--- a/src/Persistence_representations/utilities/persistence_vectors/CMakeLists.txt
+++ b/src/Persistence_representations/utilities/persistence_vectors/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistence_vectors_utilities)
add_persistence_representation_creation_utility(create_persistence_vectors "-1")
diff --git a/src/Persistent_cohomology/benchmark/CMakeLists.txt b/src/Persistent_cohomology/benchmark/CMakeLists.txt
index 8b135ba1..2bb3b0c7 100644
--- a/src/Persistent_cohomology/benchmark/CMakeLists.txt
+++ b/src/Persistent_cohomology/benchmark/CMakeLists.txt
@@ -1,6 +1,4 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistent_cohomology_benchmark)
-
if(GMP_FOUND)
if(GMPXX_FOUND)
diff --git a/src/Persistent_cohomology/concept/FilteredComplex.h b/src/Persistent_cohomology/concept/FilteredComplex.h
index 178503c9..62b9002f 100644
--- a/src/Persistent_cohomology/concept/FilteredComplex.h
+++ b/src/Persistent_cohomology/concept/FilteredComplex.h
@@ -29,10 +29,6 @@ struct FilteredComplex
{
/** Handle to specify a simplex. */
typedef unspecified Simplex_handle;
-/** \brief Key associated to each simplex.
- *
- * Must be an integer type. */
- typedef unspecified Simplex_key;
/** \brief Type for the value of the filtration function.
*
* Must be comparable with <. */
@@ -58,20 +54,10 @@ struct FilteredComplex
* filtration function on the complex. */
Filtration_value filtration(Simplex_handle sh);
-/** \brief Returns a key that is different from the keys associated
- * to the simplices. */
- Simplex_key null_key ();
-/** \brief Returns the key associated to a simplex.
- *
- * This is never called on null_simplex(). */
- Simplex_key key ( Simplex_handle sh );
/** \brief Returns the simplex that has index idx in the filtration.
*
* This is only called on valid indices. */
Simplex_handle simplex ( size_t idx );
-/** \brief Assign a key to a simplex. */
- void assign_key(Simplex_handle sh, Simplex_key key);
-
/** \brief Iterator on the simplices belonging to the
* boundary of a simplex.
*
@@ -115,6 +101,26 @@ typedef unspecified Filtration_simplex_range;
* .begin() and .end() return type Filtration_simplex_iterator.*/
Filtration_simplex_range filtration_simplex_range();
+/** \name Map interface
+ * Conceptually a `std::unordered_map<Simplex_handle,std::size_t>`.
+ * @{ */
+/** \brief Data stored for each simplex.
+ *
+ * Must be an integer type. */
+ typedef unspecified Simplex_key;
+/** \brief Returns a constant dummy number that is either negative,
+ * or at least as large as `num_simplices()`. Suggested value: -1. */
+ Simplex_key null_key ();
+/** \brief Returns the number stored for a simplex by `assign_key`.
+ *
+ * This is never called on null_simplex(). */
+ Simplex_key key ( Simplex_handle sh );
+/** \brief Store a number for a simplex, which can later be retrieved with `key(sh)`.
+ *
+ * This is never called on null_simplex(). */
+ void assign_key(Simplex_handle sh, Simplex_key n);
+/** @} */
+
/* \brief Iterator over the simplices of the complex,
* in an arbitrary order.
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index 18e2913b..0f731519 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistent_cohomology_examples)
add_executable(plain_homology plain_homology.cpp)
diff --git a/src/Persistent_cohomology/example/persistence_from_file.cpp b/src/Persistent_cohomology/example/persistence_from_file.cpp
index c40434a4..53456919 100644
--- a/src/Persistent_cohomology/example/persistence_from_file.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_file.cpp
@@ -138,6 +138,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
index 626ec2ef..d6a5bdad 100644
--- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
+++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
@@ -149,6 +149,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
index 7c81fcfb..796cfa3a 100644
--- a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
@@ -161,6 +161,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
index c7607dce..71fc0802 100644
--- a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
@@ -167,6 +167,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
index c68b5c0b..c51e47a5 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
@@ -300,7 +300,10 @@ class Persistent_cohomology {
// with multiplicity. We used to sum the coefficients directly in
// annotations_in_boundary by using a map, we now do it later.
typedef std::pair<Column *, int> annotation_t;
- thread_local std::vector<annotation_t> annotations_in_boundary;
+#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ thread_local
+#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ std::vector<annotation_t> annotations_in_boundary;
annotations_in_boundary.clear();
int sign = 1 - 2 * (dim_sigma % 2); // \in {-1,1} provides the sign in the
// alternate sum in the boundary.
diff --git a/src/Persistent_cohomology/test/CMakeLists.txt b/src/Persistent_cohomology/test/CMakeLists.txt
index 45f53eb9..f8baf861 100644
--- a/src/Persistent_cohomology/test/CMakeLists.txt
+++ b/src/Persistent_cohomology/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Persistent_cohomology_tests)
include(GUDHI_test_coverage)
diff --git a/src/Rips_complex/example/CMakeLists.txt b/src/Rips_complex/example/CMakeLists.txt
index af86636b..e7772bdb 100644
--- a/src/Rips_complex/example/CMakeLists.txt
+++ b/src/Rips_complex/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Rips_complex_examples)
# Point cloud
diff --git a/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp b/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp
index 1343f24d..05bacb9f 100644
--- a/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp
+++ b/src/Rips_complex/example/example_one_skeleton_rips_from_correlation_matrix.cpp
@@ -15,11 +15,11 @@ int main() {
using Distance_matrix = std::vector<std::vector<Filtration_value>>;
// User defined correlation matrix is:
- // |1 0.06 0.23 0.01 0.89|
- // |0.06 1 0.74 0.01 0.61|
- // |0.23 0.74 1 0.72 0.03|
- // |0.01 0.01 0.72 1 0.7 |
- // |0.89 0.61 0.03 0.7 1 |
+ // |1 0.06 0.23 0.01 0.89|
+ // |0.06 1 0.74 0.01 0.61|
+ // |0.23 0.74 1 0.72 0.03|
+ // |0.01 0.01 0.72 1 0.7 |
+ // |0.89 0.61 0.03 0.7 1 |
Distance_matrix correlations;
correlations.push_back({});
diff --git a/src/Rips_complex/include/gudhi/Sparse_rips_complex.h b/src/Rips_complex/include/gudhi/Sparse_rips_complex.h
index 19a44b28..4dcc08ed 100644
--- a/src/Rips_complex/include/gudhi/Sparse_rips_complex.h
+++ b/src/Rips_complex/include/gudhi/Sparse_rips_complex.h
@@ -54,7 +54,7 @@ namespace rips_complex {
template <typename Filtration_value>
class Sparse_rips_complex {
private:
- // TODO: use a different graph where we know we can safely insert in parallel.
+ // TODO(MG): use a different graph where we know we can safely insert in parallel.
typedef typename boost::adjacency_list<boost::vecS, boost::vecS, boost::undirectedS,
boost::property<vertex_filtration_t, Filtration_value>,
boost::property<edge_filtration_t, Filtration_value>>
@@ -140,7 +140,7 @@ class Sparse_rips_complex {
put(vertex_filtration_t(), graph_, v, 0);
}
- // TODO:
+ // TODO(MG):
// - make it parallel
// - only test near-enough neighbors
for (int i = 0; i < n; ++i)
diff --git a/src/Rips_complex/test/CMakeLists.txt b/src/Rips_complex/test/CMakeLists.txt
index 3da9c90d..745d953c 100644
--- a/src/Rips_complex/test/CMakeLists.txt
+++ b/src/Rips_complex/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Rips_complex_tests)
include(GUDHI_test_coverage)
diff --git a/src/Rips_complex/utilities/CMakeLists.txt b/src/Rips_complex/utilities/CMakeLists.txt
index deb73ff0..4b565628 100644
--- a/src/Rips_complex/utilities/CMakeLists.txt
+++ b/src/Rips_complex/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Rips_complex_utilities)
add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
diff --git a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
index d4671b45..287e8915 100644
--- a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
@@ -31,6 +31,7 @@
#include <string>
#include <vector>
#include <limits> // infinity
+#include <algorithm> // for sort
// Types definition
using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
@@ -165,6 +166,6 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
index 53191ca7..c73152cf 100644
--- a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
@@ -128,6 +128,6 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Rips_complex/utilities/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp
index 7cee927e..9410b9c2 100644
--- a/src/Rips_complex/utilities/rips_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_persistence.cpp
@@ -130,6 +130,6 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Rips_complex/utilities/sparse_rips_persistence.cpp b/src/Rips_complex/utilities/sparse_rips_persistence.cpp
index bcd5c2c5..6d4d86fd 100644
--- a/src/Rips_complex/utilities/sparse_rips_persistence.cpp
+++ b/src/Rips_complex/utilities/sparse_rips_persistence.cpp
@@ -128,6 +128,6 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt
index b33b2d05..857e8518 100644
--- a/src/Simplex_tree/example/CMakeLists.txt
+++ b/src/Simplex_tree/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Simplex_tree_examples)
add_executable ( Simplex_tree_example_from_cliques_of_graph simplex_tree_from_cliques_of_graph.cpp )
diff --git a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
index 08ed74bb..34092ef6 100644
--- a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
+++ b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
@@ -171,7 +171,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, Filtr
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
index f675e353..f39de31f 100644
--- a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
+++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
@@ -29,33 +29,33 @@ using Simplex_handle = Simplex_tree::Simplex_handle;
int main(int argc, char* const argv[]) {
// Construct the Simplex Tree with a 1-skeleton graph example
- Simplex_tree simplexTree;
+ Simplex_tree stree;
- simplexTree.insert_simplex({0, 1}, 0.);
- simplexTree.insert_simplex({0, 2}, 1.);
- simplexTree.insert_simplex({0, 3}, 2.);
- simplexTree.insert_simplex({1, 2}, 3.);
- simplexTree.insert_simplex({1, 3}, 4.);
- simplexTree.insert_simplex({2, 3}, 5.);
- simplexTree.insert_simplex({2, 4}, 6.);
- simplexTree.insert_simplex({3, 6}, 7.);
- simplexTree.insert_simplex({4, 5}, 8.);
- simplexTree.insert_simplex({4, 6}, 9.);
- simplexTree.insert_simplex({5, 6}, 10.);
- simplexTree.insert_simplex({6}, 10.);
+ stree.insert_simplex({0, 1}, 0.);
+ stree.insert_simplex({0, 2}, 1.);
+ stree.insert_simplex({0, 3}, 2.);
+ stree.insert_simplex({1, 2}, 3.);
+ stree.insert_simplex({1, 3}, 4.);
+ stree.insert_simplex({2, 3}, 5.);
+ stree.insert_simplex({2, 4}, 6.);
+ stree.insert_simplex({3, 6}, 7.);
+ stree.insert_simplex({4, 5}, 8.);
+ stree.insert_simplex({4, 6}, 9.);
+ stree.insert_simplex({5, 6}, 10.);
+ stree.insert_simplex({6}, 10.);
- simplexTree.expansion_with_blockers(3, [&](Simplex_handle sh) {
+ stree.expansion_with_blockers(3, [&](Simplex_handle sh) {
bool result = false;
std::cout << "Blocker on [";
// User can loop on the vertices from the given simplex_handle i.e.
- for (auto vertex : simplexTree.simplex_vertex_range(sh)) {
+ for (auto vertex : stree.simplex_vertex_range(sh)) {
// We block the expansion, if the vertex '6' is in the given list of vertices
if (vertex == 6) result = true;
std::cout << vertex << ", ";
}
- std::cout << "] ( " << simplexTree.filtration(sh);
+ std::cout << "] ( " << stree.filtration(sh);
// User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
- simplexTree.assign_filtration(sh, simplexTree.filtration(sh) + 1.);
+ stree.assign_filtration(sh, stree.filtration(sh) + 1.);
std::cout << " + 1. ) = " << result << std::endl;
@@ -63,13 +63,13 @@ int main(int argc, char* const argv[]) {
});
std::cout << "********************************************************************\n";
- std::cout << "* The complex contains " << simplexTree.num_simplices() << " simplices";
- std::cout << " - dimension " << simplexTree.dimension() << "\n";
+ std::cout << "* The complex contains " << stree.num_simplices() << " simplices";
+ std::cout << " - dimension " << stree.dimension() << "\n";
std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
- for (auto f_simplex : simplexTree.filtration_simplex_range()) {
+ for (auto f_simplex : stree.filtration_simplex_range()) {
std::cout << " "
- << "[" << simplexTree.filtration(f_simplex) << "] ";
- for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
+ << "[" << stree.filtration(f_simplex) << "] ";
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 5d4ea30c..ee96d5a2 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -689,7 +689,11 @@ class Simplex_tree {
return { null_simplex(), true }; // ----->>
// Copy before sorting
- thread_local std::vector<Vertex_handle> copy;
+ // Thread local is not available on XCode version < V.8 - It will slow down computation
+#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ thread_local
+#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ std::vector<Vertex_handle> copy;
copy.clear();
copy.insert(copy.end(), first, last);
std::sort(std::begin(copy), std::end(copy));
@@ -1238,9 +1242,8 @@ class Simplex_tree {
}
public:
- /** \brief Browse the simplex tree to ensure the filtration is not decreasing.
- * The simplex tree is browsed starting from the root until the leaf, and the filtration values are set with their
- * parent value (increased), in case the values are decreasing.
+ /** \brief This function ensures that each simplex has a higher filtration value than its faces by increasing the
+ * filtration values.
* @return The filtration modification information.
* \post Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()`
* function is not launching `initialize_filtration()` but returns the filtration modification information. If the
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h
index 335bac1e..02c8bb64 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h
@@ -101,7 +101,9 @@ class Simplex_tree_boundary_simplex_iterator : public boost::iterator_facade<
// any end() iterator
explicit Simplex_tree_boundary_simplex_iterator(SimplexTree * st)
- : sib_(nullptr),
+ : last_(st->null_vertex()),
+ next_(st->null_vertex()),
+ sib_(nullptr),
sh_(st->null_simplex()),
st_(st) {
}
@@ -109,7 +111,9 @@ class Simplex_tree_boundary_simplex_iterator : public boost::iterator_facade<
template<class SimplexHandle>
Simplex_tree_boundary_simplex_iterator(SimplexTree * st, SimplexHandle sh)
: last_(sh->first),
+ next_(st->null_vertex()),
sib_(nullptr),
+ sh_(st->null_simplex()),
st_(st) {
// Only check once at the beginning instead of for every increment, as this is expensive.
if (SimplexTree::Options::contiguous_vertices)
@@ -123,9 +127,7 @@ class Simplex_tree_boundary_simplex_iterator : public boost::iterator_facade<
sh_ = sib_->members_.begin()+next_;
else
sh_ = sib_->find(next_);
- } else {
- sh_ = st->null_simplex();
- } // vertex: == end()
+ }
}
private:
diff --git a/src/Simplex_tree/test/CMakeLists.txt b/src/Simplex_tree/test/CMakeLists.txt
index 8684ad2a..c63d8532 100644
--- a/src/Simplex_tree/test/CMakeLists.txt
+++ b/src/Simplex_tree/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Simplex_tree_tests)
include(GUDHI_test_coverage)
diff --git a/src/Skeleton_blocker/example/CMakeLists.txt b/src/Skeleton_blocker/example/CMakeLists.txt
index de70f089..0e5d2f11 100644
--- a/src/Skeleton_blocker/example/CMakeLists.txt
+++ b/src/Skeleton_blocker/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Skeleton_blocker_examples)
add_executable(Skeleton_blocker_example_from_simplices Skeleton_blocker_from_simplices.cpp)
diff --git a/src/Skeleton_blocker/test/CMakeLists.txt b/src/Skeleton_blocker/test/CMakeLists.txt
index 4a363294..19c65871 100644
--- a/src/Skeleton_blocker/test/CMakeLists.txt
+++ b/src/Skeleton_blocker/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Skeleton_blocker_tests)
include(GUDHI_test_coverage)
diff --git a/src/Spatial_searching/example/CMakeLists.txt b/src/Spatial_searching/example/CMakeLists.txt
index 4cf3d863..0f799987 100644
--- a/src/Spatial_searching/example/CMakeLists.txt
+++ b/src/Spatial_searching/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Spatial_searching_examples)
if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Spatial_searching/test/CMakeLists.txt b/src/Spatial_searching/test/CMakeLists.txt
index b9da7b4e..b60ab1e3 100644
--- a/src/Spatial_searching/test/CMakeLists.txt
+++ b/src/Spatial_searching/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Spatial_searching_tests)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Subsampling/example/CMakeLists.txt b/src/Subsampling/example/CMakeLists.txt
index 34400b1e..f26d107f 100644
--- a/src/Subsampling/example/CMakeLists.txt
+++ b/src/Subsampling/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Subsampling_examples)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Subsampling/test/CMakeLists.txt b/src/Subsampling/test/CMakeLists.txt
index dbf97db3..924f0925 100644
--- a/src/Subsampling/test/CMakeLists.txt
+++ b/src/Subsampling/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Subsampling_tests)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt
index 8729e394..f136ab27 100644
--- a/src/Tangential_complex/benchmark/CMakeLists.txt
+++ b/src/Tangential_complex/benchmark/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Tangential_complex_benchmark)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt
index 16d1339d..af0dac51 100644
--- a/src/Tangential_complex/example/CMakeLists.txt
+++ b/src/Tangential_complex/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Tangential_complex_examples)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
index d8356520..d1c846cf 100644
--- a/src/Tangential_complex/include/gudhi/Tangential_complex.h
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -83,16 +83,11 @@ using namespace internal;
class Vertex_data {
public:
- Vertex_data(std::size_t data = (std::numeric_limits<std::size_t>::max)())
- : m_data(data) { }
+ Vertex_data(std::size_t data = (std::numeric_limits<std::size_t>::max)()) : m_data(data) {}
- operator std::size_t() {
- return m_data;
- }
+ operator std::size_t() { return m_data; }
- operator std::size_t() const {
- return m_data;
- }
+ operator std::size_t() const { return m_data; }
private:
std::size_t m_data;
@@ -101,9 +96,9 @@ class Vertex_data {
/**
* \class Tangential_complex Tangential_complex.h gudhi/Tangential_complex.h
* \brief Tangential complex data structure.
- *
+ *
* \ingroup tangential_complex
- *
+ *
* \details
* The class Tangential_complex represents a tangential complex.
* After the computation of the complex, an optional post-processing called perturbation can
@@ -118,17 +113,14 @@ class Vertex_data {
* or <a target="_blank"
* href="http://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Dynamic__dimension__tag.html">CGAL::Dynamic_dimension_tag</a>
* if you don't.
- * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the default) and `CGAL::Sequential_tag`.
- * \tparam Triangulation_ is the type used for storing the local regular triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`).
+ * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the
+ * default) and `CGAL::Sequential_tag`. \tparam Triangulation_ is the type used for storing the local regular
+ * triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`).
*
*/
-template
-<
- typename Kernel_, // ambiant kernel
- typename DimensionTag, // intrinsic dimension
- typename Concurrency_tag = CGAL::Parallel_tag,
- typename Triangulation_ = CGAL::Default
->
+template <typename Kernel_, // ambiant kernel
+ typename DimensionTag, // intrinsic dimension
+ typename Concurrency_tag = CGAL::Parallel_tag, typename Triangulation_ = CGAL::Default>
class Tangential_complex {
typedef Kernel_ K;
typedef typename K::FT FT;
@@ -136,23 +128,16 @@ class Tangential_complex {
typedef typename K::Weighted_point_d Weighted_point;
typedef typename K::Vector_d Vector;
- typedef typename CGAL::Default::Get
- <
- Triangulation_,
- CGAL::Regular_triangulation
- <
- CGAL::Epick_d<DimensionTag>,
- CGAL::Triangulation_data_structure
- <
- typename CGAL::Epick_d<DimensionTag>::Dimension,
- CGAL::Triangulation_vertex
- <
- CGAL::Regular_triangulation_traits_adapter< CGAL::Epick_d<DimensionTag> >, Vertex_data
- >,
- CGAL::Triangulation_full_cell<CGAL::Regular_triangulation_traits_adapter< CGAL::Epick_d<DimensionTag> > >
- >
- >
- >::type Triangulation;
+ typedef typename CGAL::Default::Get<
+ Triangulation_,
+ CGAL::Regular_triangulation<
+ CGAL::Epick_d<DimensionTag>,
+ CGAL::Triangulation_data_structure<
+ typename CGAL::Epick_d<DimensionTag>::Dimension,
+ CGAL::Triangulation_vertex<CGAL::Regular_triangulation_traits_adapter<CGAL::Epick_d<DimensionTag> >,
+ Vertex_data>,
+ CGAL::Triangulation_full_cell<
+ CGAL::Regular_triangulation_traits_adapter<CGAL::Epick_d<DimensionTag> > > > > >::type Triangulation;
typedef typename Triangulation::Geom_traits Tr_traits;
typedef typename Triangulation::Weighted_point Tr_point;
typedef typename Tr_traits::Base::Point_d Tr_bare_point;
@@ -174,17 +159,13 @@ class Tangential_complex {
struct Tr_and_VH {
public:
- Tr_and_VH()
- : m_tr(NULL) { }
+ Tr_and_VH() : m_tr(NULL) {}
- Tr_and_VH(int dim)
- : m_tr(new Triangulation(dim)) { }
+ Tr_and_VH(int dim) : m_tr(new Triangulation(dim)) {}
- ~Tr_and_VH() {
- destroy_triangulation();
- }
+ ~Tr_and_VH() { destroy_triangulation(); }
- Triangulation & construct_triangulation(int dim) {
+ Triangulation &construct_triangulation(int dim) {
delete m_tr;
m_tr = new Triangulation(dim);
return tr();
@@ -195,24 +176,16 @@ class Tangential_complex {
m_tr = NULL;
}
- Triangulation & tr() {
- return *m_tr;
- }
+ Triangulation &tr() { return *m_tr; }
- Triangulation const& tr() const {
- return *m_tr;
- }
+ Triangulation const &tr() const { return *m_tr; }
- Tr_vertex_handle const& center_vertex() const {
- return m_center_vertex;
- }
+ Tr_vertex_handle const &center_vertex() const { return m_center_vertex; }
- Tr_vertex_handle & center_vertex() {
- return m_center_vertex;
- }
+ Tr_vertex_handle &center_vertex() { return m_center_vertex; }
private:
- Triangulation* m_tr;
+ Triangulation *m_tr;
Tr_vertex_handle m_center_vertex;
};
@@ -243,9 +216,7 @@ class Tangential_complex {
// For transform_iterator
- static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) {
- return vh->point();
- }
+ static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) { return vh->point(); }
template <typename P, typename VH>
static const P &vertex_handle_to_point(VH vh) {
@@ -265,111 +236,97 @@ class Tangential_complex {
* @param[in] k Kernel instance.
*/
template <typename Point_range>
- Tangential_complex(Point_range points,
- int intrinsic_dimension,
+ Tangential_complex(Point_range points, int intrinsic_dimension,
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- InputIterator first_for_tse, InputIterator last_for_tse,
+ InputIterator first_for_tse, InputIterator last_for_tse,
#endif
- const K &k = K()
- )
+ const K &k = K())
: m_k(k),
- m_intrinsic_dim(intrinsic_dimension),
- m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())),
- m_points(points.begin(), points.end()),
- m_weights(m_points.size(), FT(0))
+ m_intrinsic_dim(intrinsic_dimension),
+ m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())),
+ m_points(points.begin(), points.end()),
+ m_weights(m_points.size(), FT(0))
#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
- , m_p_perturb_mutexes(NULL)
-#endif
- , m_points_ds(m_points)
- , m_last_max_perturb(0.)
- , m_are_tangent_spaces_computed(m_points.size(), false)
- , m_tangent_spaces(m_points.size(), Tangent_space_basis())
+ ,
+ m_p_perturb_mutexes(NULL)
+#endif
+ ,
+ m_points_ds(m_points),
+ m_last_max_perturb(0.),
+ m_are_tangent_spaces_computed(m_points.size(), false),
+ m_tangent_spaces(m_points.size(), Tangent_space_basis())
#ifdef GUDHI_TC_EXPORT_NORMALS
- , m_orth_spaces(m_points.size(), Orthogonal_space_basis())
+ ,
+ m_orth_spaces(m_points.size(), Orthogonal_space_basis())
#endif
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- , m_points_for_tse(first_for_tse, last_for_tse)
- , m_points_ds_for_tse(m_points_for_tse)
+ ,
+ m_points_for_tse(first_for_tse, last_for_tse),
+ m_points_ds_for_tse(m_points_for_tse)
#endif
- { }
+ {
+ }
/// Destructor
~Tangential_complex() {
#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
- delete [] m_p_perturb_mutexes;
+ delete[] m_p_perturb_mutexes;
#endif
}
/// Returns the intrinsic dimension of the manifold.
- int intrinsic_dimension() const {
- return m_intrinsic_dim;
- }
+ int intrinsic_dimension() const { return m_intrinsic_dim; }
/// Returns the ambient dimension.
- int ambient_dimension() const {
- return m_ambient_dim;
- }
+ int ambient_dimension() const { return m_ambient_dim; }
- Points const& points() const {
- return m_points;
- }
+ Points const &points() const { return m_points; }
/** \brief Returns the point corresponding to the vertex given as parameter.
*
* @param[in] vertex Vertex handle of the point to retrieve.
* @return The point found.
*/
- Point get_point(std::size_t vertex) const {
- return m_points[vertex];
- }
+ Point get_point(std::size_t vertex) const { return m_points[vertex]; }
/** \brief Returns the perturbed position of the point corresponding to the vertex given as parameter.
*
* @param[in] vertex Vertex handle of the point to retrieve.
* @return The perturbed position of the point found.
*/
- Point get_perturbed_point(std::size_t vertex) const {
- return compute_perturbed_point(vertex);
- }
+ Point get_perturbed_point(std::size_t vertex) const { return compute_perturbed_point(vertex); }
/// Returns the number of vertices.
- std::size_t number_of_vertices() const {
- return m_points.size();
- }
+ std::size_t number_of_vertices() const { return m_points.size(); }
- void set_weights(const Weights& weights) {
- m_weights = weights;
- }
+ void set_weights(const Weights &weights) { m_weights = weights; }
- void set_tangent_planes(const TS_container& tangent_spaces
+ void set_tangent_planes(const TS_container &tangent_spaces
#ifdef GUDHI_TC_EXPORT_NORMALS
- , const OS_container& orthogonal_spaces
+ ,
+ const OS_container &orthogonal_spaces
#endif
- ) {
+ ) {
#ifdef GUDHI_TC_EXPORT_NORMALS
- GUDHI_CHECK(
- m_points.size() == tangent_spaces.size()
- && m_points.size() == orthogonal_spaces.size(),
+ GUDHI_CHECK(m_points.size() == tangent_spaces.size() && m_points.size() == orthogonal_spaces.size(),
std::logic_error("Wrong sizes"));
#else
- GUDHI_CHECK(
- m_points.size() == tangent_spaces.size(),
- std::logic_error("Wrong sizes"));
+ GUDHI_CHECK(m_points.size() == tangent_spaces.size(), std::logic_error("Wrong sizes"));
#endif
m_tangent_spaces = tangent_spaces;
#ifdef GUDHI_TC_EXPORT_NORMALS
m_orth_spaces = orthogonal_spaces;
#endif
- for (std::size_t i = 0; i < m_points.size(); ++i)
- m_are_tangent_spaces_computed[i] = true;
+ for (std::size_t i = 0; i < m_points.size(); ++i) m_are_tangent_spaces_computed[i] = true;
}
/// Computes the tangential complex.
void compute_tangential_complex() {
#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
std::cerr << red << "WARNING: GUDHI_TC_PERFORM_EXTRA_CHECKS is defined. "
- << "Computation might be slower than usual.\n" << white;
+ << "Computation might be slower than usual.\n"
+ << white;
#endif
#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
@@ -386,10 +343,9 @@ class Tangential_complex {
if (m_points.empty())
m_translations.clear();
else
- m_translations.resize(m_points.size(),
- m_k.construct_vector_d_object()(m_ambient_dim));
+ m_translations.resize(m_points.size(), m_k.construct_vector_d_object()(m_ambient_dim));
#if defined(GUDHI_USE_TBB)
- delete [] m_p_perturb_mutexes;
+ delete[] m_p_perturb_mutexes;
m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()];
#endif
#endif
@@ -397,21 +353,18 @@ class Tangential_complex {
#ifdef GUDHI_USE_TBB
// Parallel
if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
- tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
- Compute_tangent_triangulation(*this));
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()), Compute_tangent_triangulation(*this));
} else {
#endif // GUDHI_USE_TBB
// Sequential
- for (std::size_t i = 0; i < m_points.size(); ++i)
- compute_tangent_triangulation(i);
+ for (std::size_t i = 0; i < m_points.size(); ++i) compute_tangent_triangulation(i);
#ifdef GUDHI_USE_TBB
}
#endif // GUDHI_USE_TBB
#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
t.end();
- std::cerr << "Tangential complex computed in " << t.num_seconds()
- << " seconds.\n";
+ std::cerr << "Tangential complex computed in " << t.num_seconds() << " seconds.\n";
#endif
}
@@ -437,14 +390,12 @@ class Tangential_complex {
Fix_inconsistencies_info fix_inconsistencies_using_perturbation(double max_perturb, double time_limit = -1.) {
Fix_inconsistencies_info info;
- if (time_limit == 0.)
- return info;
+ if (time_limit == 0.) return info;
Gudhi::Clock t;
#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
- std::tuple<std::size_t, std::size_t, std::size_t> stats_before =
- number_of_inconsistent_simplices(false);
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_before = number_of_inconsistent_simplices(false);
if (std::get<1>(stats_before) == 0) {
#ifdef DEBUG_TRACES
@@ -462,22 +413,17 @@ class Tangential_complex {
info.num_steps = 0;
while (!done) {
#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
- std::cerr
- << "\nBefore fix step:\n"
- << " * Total number of simplices in stars (incl. duplicates): "
- << std::get<0>(stats_before) << "\n"
- << " * Num inconsistent simplices in stars (incl. duplicates): "
- << red << std::get<1>(stats_before) << white << " ("
- << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n"
- << " * Number of stars containing inconsistent simplices: "
- << red << std::get<2>(stats_before) << white << " ("
- << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n";
+ std::cerr << "\nBefore fix step:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << std::get<0>(stats_before) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): " << red << std::get<1>(stats_before)
+ << white << " (" << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << red << std::get<2>(stats_before)
+ << white << " (" << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n";
#endif
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
- std::cerr << yellow
- << "\nAttempt to fix inconsistencies using perturbations - step #"
- << info.num_steps + 1 << "... " << white;
+ std::cerr << yellow << "\nAttempt to fix inconsistencies using perturbations - step #" << info.num_steps + 1
+ << "... " << white;
#endif
std::size_t num_inconsistent_stars = 0;
@@ -492,29 +438,24 @@ class Tangential_complex {
if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
tbb::combinable<std::size_t> num_inconsistencies;
tbb::combinable<std::vector<std::size_t> > tls_updated_points;
- tbb::parallel_for(
- tbb::blocked_range<size_t>(0, m_triangulations.size()),
- Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb,
- num_inconsistencies,
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_triangulations.size()),
+ Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb, num_inconsistencies,
tls_updated_points));
- num_inconsistent_stars =
- num_inconsistencies.combine(std::plus<std::size_t>());
- updated_points = tls_updated_points.combine(
- [](std::vector<std::size_t> const& x,
- std::vector<std::size_t> const& y) {
- std::vector<std::size_t> res;
- res.reserve(x.size() + y.size());
- res.insert(res.end(), x.begin(), x.end());
- res.insert(res.end(), y.begin(), y.end());
- return res;
- });
+ num_inconsistent_stars = num_inconsistencies.combine(std::plus<std::size_t>());
+ updated_points =
+ tls_updated_points.combine([](std::vector<std::size_t> const &x, std::vector<std::size_t> const &y) {
+ std::vector<std::size_t> res;
+ res.reserve(x.size() + y.size());
+ res.insert(res.end(), x.begin(), x.end());
+ res.insert(res.end(), y.begin(), y.end());
+ return res;
+ });
} else {
#endif // GUDHI_USE_TBB
// Sequential
for (std::size_t i = 0; i < m_triangulations.size(); ++i) {
num_inconsistent_stars +=
- try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb,
- std::back_inserter(updated_points));
+ try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb, std::back_inserter(updated_points));
}
#if defined(GUDHI_USE_TBB)
}
@@ -525,57 +466,44 @@ class Tangential_complex {
#endif
#if defined(GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES) || defined(DEBUG_TRACES)
- std::cerr
- << "\nEncountered during fix:\n"
- << " * Num stars containing inconsistent simplices: "
- << red << num_inconsistent_stars << white
- << " (" << 100. * num_inconsistent_stars / m_points.size() << "%)\n";
+ std::cerr << "\nEncountered during fix:\n"
+ << " * Num stars containing inconsistent simplices: " << red << num_inconsistent_stars << white << " ("
+ << 100. * num_inconsistent_stars / m_points.size() << "%)\n";
#endif
#ifdef GUDHI_TC_PROFILING
- std::cerr << yellow << "done in " << t_fix_step.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t_fix_step.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
- if (num_inconsistent_stars > 0)
- refresh_tangential_complex(updated_points);
+ if (num_inconsistent_stars > 0) refresh_tangential_complex(updated_points);
#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
// Confirm that all stars were actually refreshed
- std::size_t num_inc_1 =
- std::get<1>(number_of_inconsistent_simplices(false));
+ std::size_t num_inc_1 = std::get<1>(number_of_inconsistent_simplices(false));
refresh_tangential_complex();
- std::size_t num_inc_2 =
- std::get<1>(number_of_inconsistent_simplices(false));
+ std::size_t num_inc_2 = std::get<1>(number_of_inconsistent_simplices(false));
if (num_inc_1 != num_inc_2)
- std::cerr << red << "REFRESHMENT CHECK: FAILED. ("
- << num_inc_1 << " vs " << num_inc_2 << ")\n" << white;
+ std::cerr << red << "REFRESHMENT CHECK: FAILED. (" << num_inc_1 << " vs " << num_inc_2 << ")\n" << white;
else
std::cerr << green << "REFRESHMENT CHECK: PASSED.\n" << white;
#endif
#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
- std::tuple<std::size_t, std::size_t, std::size_t> stats_after =
- number_of_inconsistent_simplices(false);
-
- std::cerr
- << "\nAfter fix:\n"
- << " * Total number of simplices in stars (incl. duplicates): "
- << std::get<0>(stats_after) << "\n"
- << " * Num inconsistent simplices in stars (incl. duplicates): "
- << red << std::get<1>(stats_after) << white << " ("
- << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n"
- << " * Number of stars containing inconsistent simplices: "
- << red << std::get<2>(stats_after) << white << " ("
- << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n";
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_after = number_of_inconsistent_simplices(false);
+
+ std::cerr << "\nAfter fix:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << std::get<0>(stats_after) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): " << red << std::get<1>(stats_after)
+ << white << " (" << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << red << std::get<2>(stats_after) << white
+ << " (" << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n";
stats_before = stats_after;
#endif
- if (info.num_steps == 0)
- info.initial_num_inconsistent_stars = num_inconsistent_stars;
+ if (info.num_steps == 0) info.initial_num_inconsistent_stars = num_inconsistent_stars;
if (num_inconsistent_stars < info.best_num_inconsistent_stars)
info.best_num_inconsistent_stars = num_inconsistent_stars;
@@ -615,8 +543,7 @@ class Tangential_complex {
/// Returns the number of inconsistencies
/// @param[in] verbose If true, outputs a message into `std::cerr`.
- Num_inconsistencies
- number_of_inconsistent_simplices(
+ Num_inconsistencies number_of_inconsistent_simplices(
#ifdef DEBUG_TRACES
bool verbose = true
#else
@@ -634,8 +561,7 @@ class Tangential_complex {
Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
// Don't check infinite cells
- if (is_infinite(*it_inc_simplex))
- continue;
+ if (is_infinite(*it_inc_simplex)) continue;
Simplex c = *it_inc_simplex;
c.insert(idx); // Add the missing index
@@ -651,18 +577,15 @@ class Tangential_complex {
}
if (verbose) {
- std::cerr
- << "\n==========================================================\n"
- << "Inconsistencies:\n"
- << " * Total number of simplices in stars (incl. duplicates): "
- << stats.num_simplices << "\n"
- << " * Number of inconsistent simplices in stars (incl. duplicates): "
- << stats.num_inconsistent_simplices << " ("
- << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n"
- << " * Number of stars containing inconsistent simplices: "
- << stats.num_inconsistent_stars << " ("
- << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n"
- << "==========================================================\n";
+ std::cerr << "\n==========================================================\n"
+ << "Inconsistencies:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << stats.num_simplices << "\n"
+ << " * Number of inconsistent simplices in stars (incl. duplicates): "
+ << stats.num_inconsistent_simplices << " ("
+ << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << stats.num_inconsistent_stars << " ("
+ << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n"
+ << "==========================================================\n";
}
return stats;
@@ -672,23 +595,22 @@ class Tangential_complex {
*
* \tparam Simplex_tree_ must be a `Simplex_tree`.
*
- * @param[out] tree The result, where each `Vertex_handle` is the index of the
+ * @param[out] tree The result, where each `Vertex_handle` is the index of the
* corresponding point in the range provided to the constructor (it can also be
* retrieved through the `Tangential_complex::get_point` function.
* @param[in] export_inconsistent_simplices Also export inconsistent simplices or not?
* @return The maximal dimension of the simplices.
*/
template <typename Simplex_tree_>
- int create_complex(Simplex_tree_ &tree
- , bool export_inconsistent_simplices = true
+ int create_complex(Simplex_tree_ &tree,
+ bool export_inconsistent_simplices = true
/// \cond ADVANCED_PARAMETERS
- , bool export_infinite_simplices = false
- , Simplex_set *p_inconsistent_simplices = NULL
+ ,
+ bool export_infinite_simplices = false, Simplex_set *p_inconsistent_simplices = NULL
/// \endcond
) const {
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
- std::cerr << yellow
- << "\nExporting the TC as a Simplex_tree... " << white;
+ std::cerr << yellow << "\nExporting the TC as a Simplex_tree... " << white;
#endif
#ifdef GUDHI_TC_PROFILING
Gudhi::Clock t;
@@ -705,14 +627,11 @@ class Tangential_complex {
Simplex c = *it_inc_simplex;
// Don't export infinite cells
- if (!export_infinite_simplices && is_infinite(c))
- continue;
+ if (!export_infinite_simplices && is_infinite(c)) continue;
- if (!export_inconsistent_simplices && !is_simplex_consistent(c))
- continue;
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c)) continue;
- if (static_cast<int> (c.size()) > max_dim)
- max_dim = static_cast<int> (c.size());
+ if (static_cast<int>(c.size()) > max_dim) max_dim = static_cast<int>(c.size());
// Add the missing center vertex
c.insert(idx);
@@ -728,8 +647,7 @@ class Tangential_complex {
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
@@ -747,14 +665,11 @@ class Tangential_complex {
// simplex whose dimension is different from the previous ones.
// N.B.: The check is quite expensive.
- int create_complex(Simplicial_complex &complex,
- bool export_inconsistent_simplices = true,
- bool export_infinite_simplices = false,
- int check_lower_and_higher_dim_simplices = 2,
+ int create_complex(Simplicial_complex &complex, bool export_inconsistent_simplices = true,
+ bool export_infinite_simplices = false, int check_lower_and_higher_dim_simplices = 2,
Simplex_set *p_inconsistent_simplices = NULL) const {
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
- std::cerr << yellow
- << "\nExporting the TC as a Simplicial_complex... " << white;
+ std::cerr << yellow << "\nExporting the TC as a Simplicial_complex... " << white;
#endif
#ifdef GUDHI_TC_PROFILING
Gudhi::Clock t;
@@ -772,31 +687,26 @@ class Tangential_complex {
Simplex c = *it_inc_simplex;
// Don't export infinite cells
- if (!export_infinite_simplices && is_infinite(c))
- continue;
+ if (!export_infinite_simplices && is_infinite(c)) continue;
- if (!export_inconsistent_simplices && !is_simplex_consistent(c))
- continue;
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c)) continue;
// Unusual simplex dim?
- if (check_lower_and_higher_dim_simplices == 2
- && max_dim != -1
- && static_cast<int> (c.size()) != max_dim) {
+ if (check_lower_and_higher_dim_simplices == 2 && max_dim != -1 && static_cast<int>(c.size()) != max_dim) {
// Let's activate the check
- std::cerr << red <<
- "Info: check_lower_and_higher_dim_simplices ACTIVATED. "
- "Export might be take some time...\n" << white;
+ std::cerr << red
+ << "Info: check_lower_and_higher_dim_simplices ACTIVATED. "
+ "Export might be take some time...\n"
+ << white;
check_lower_and_higher_dim_simplices = 1;
}
- if (static_cast<int> (c.size()) > max_dim)
- max_dim = static_cast<int> (c.size());
+ if (static_cast<int>(c.size()) > max_dim) max_dim = static_cast<int>(c.size());
// Add the missing center vertex
c.insert(idx);
// Try to insert the simplex
- bool added =
- complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1);
+ bool added = complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1);
// Inconsistent?
if (p_inconsistent_simplices && added && !is_simplex_consistent(c)) {
@@ -807,8 +717,7 @@ class Tangential_complex {
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
@@ -816,29 +725,24 @@ class Tangential_complex {
return max_dim;
}
- template<typename ProjectionFunctor = CGAL::Identity<Point> >
- std::ostream &export_to_off(
- const Simplicial_complex &complex, std::ostream & os,
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(const Simplicial_complex &complex, std::ostream &os,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
Simplex_set const *p_simpl_to_color_in_blue = NULL,
- ProjectionFunctor const& point_projection = ProjectionFunctor())
- const {
- return export_to_off(
- os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green,
- p_simpl_to_color_in_blue, &complex, point_projection);
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ return export_to_off(os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green, p_simpl_to_color_in_blue,
+ &complex, point_projection);
}
- template<typename ProjectionFunctor = CGAL::Identity<Point> >
- std::ostream &export_to_off(
- std::ostream & os, bool color_inconsistencies = false,
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(std::ostream &os, bool color_inconsistencies = false,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
Simplex_set const *p_simpl_to_color_in_blue = NULL,
const Simplicial_complex *p_complex = NULL,
- ProjectionFunctor const& point_projection = ProjectionFunctor()) const {
- if (m_points.empty())
- return os;
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ if (m_points.empty()) return os;
if (m_ambient_dim < 2) {
std::cerr << "Error: export_to_off => ambient dimension should be >= 2.\n";
@@ -847,14 +751,14 @@ class Tangential_complex {
}
if (m_ambient_dim > 3) {
std::cerr << "Warning: export_to_off => ambient dimension should be "
- "<= 3. Only the first 3 coordinates will be exported.\n";
+ "<= 3. Only the first 3 coordinates will be exported.\n";
}
if (m_intrinsic_dim < 1 || m_intrinsic_dim > 3) {
std::cerr << "Error: export_to_off => intrinsic dimension should be "
- "between 1 and 3.\n";
+ "between 1 and 3.\n";
os << "Error: export_to_off => intrinsic dimension should be "
- "between 1 and 3.\n";
+ "between 1 and 3.\n";
return os;
}
@@ -862,12 +766,10 @@ class Tangential_complex {
std::size_t num_simplices, num_vertices;
export_vertices_to_off(output, num_vertices, false, point_projection);
if (p_complex) {
- export_simplices_to_off(
- *p_complex, output, num_simplices, p_simpl_to_color_in_red,
- p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
+ export_simplices_to_off(*p_complex, output, num_simplices, p_simpl_to_color_in_red, p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue);
} else {
- export_simplices_to_off(
- output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red,
+ export_simplices_to_off(output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red,
p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
}
@@ -876,10 +778,9 @@ class Tangential_complex {
#endif
os << "OFF \n"
- << num_vertices << " "
- << num_simplices << " "
- << "0 \n"
- << output.str();
+ << num_vertices << " " << num_simplices << " "
+ << "0 \n"
+ << output.str();
return os;
}
@@ -896,21 +797,18 @@ class Tangential_complex {
#ifdef GUDHI_USE_TBB
// Parallel
if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
- tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
- Compute_tangent_triangulation(*this));
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()), Compute_tangent_triangulation(*this));
} else {
#endif // GUDHI_USE_TBB
// Sequential
- for (std::size_t i = 0; i < m_points.size(); ++i)
- compute_tangent_triangulation(i);
+ for (std::size_t i = 0; i < m_points.size(); ++i) compute_tangent_triangulation(i);
#ifdef GUDHI_USE_TBB
}
#endif // GUDHI_USE_TBB
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
@@ -918,8 +816,7 @@ class Tangential_complex {
// If the list of perturbed points is provided, it is much faster
template <typename Point_indices_range>
- void refresh_tangential_complex(
- Point_indices_range const& perturbed_points_indices) {
+ void refresh_tangential_complex(Point_indices_range const &perturbed_points_indices) {
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
std::cerr << yellow << "\nRefreshing TC... " << white;
#endif
@@ -939,22 +836,20 @@ class Tangential_complex {
} else {
#endif // GUDHI_USE_TBB
// Sequential
- for (std::size_t i = 0; i < m_points.size(); ++i)
- refresh_tangent_triangulation(i, updated_pts_ds);
+ for (std::size_t i = 0; i < m_points.size(); ++i) refresh_tangent_triangulation(i, updated_pts_ds);
#ifdef GUDHI_USE_TBB
}
#endif // GUDHI_USE_TBB
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
}
- void export_inconsistent_stars_to_OFF_files(std::string const& filename_base) const {
+ void export_inconsistent_stars_to_OFF_files(std::string const &filename_base) const {
// For each triangulation
for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
// We build a SC along the way in case it's inconsistent
@@ -963,11 +858,9 @@ class Tangential_complex {
bool is_inconsistent = false;
Star::const_iterator it_inc_simplex = m_stars[idx].begin();
Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
- for (; it_inc_simplex != it_inc_simplex_end;
- ++it_inc_simplex) {
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
// Skip infinite cells
- if (is_infinite(*it_inc_simplex))
- continue;
+ if (is_infinite(*it_inc_simplex)) continue;
Simplex c = *it_inc_simplex;
c.insert(idx); // Add the missing index
@@ -975,8 +868,7 @@ class Tangential_complex {
sc.add_simplex(c);
// If we do not already know this star is inconsistent, test it
- if (!is_inconsistent && !is_simplex_consistent(c))
- is_inconsistent = true;
+ if (!is_inconsistent && !is_simplex_consistent(c)) is_inconsistent = true;
}
if (is_inconsistent) {
@@ -991,66 +883,58 @@ class Tangential_complex {
class Compare_distance_to_ref_point {
public:
- Compare_distance_to_ref_point(Point const& ref, K const& k)
- : m_ref(ref), m_k(k) { }
+ Compare_distance_to_ref_point(Point const &ref, K const &k) : m_ref(ref), m_k(k) {}
- bool operator()(Point const& p1, Point const& p2) {
- typename K::Squared_distance_d sqdist =
- m_k.squared_distance_d_object();
+ bool operator()(Point const &p1, Point const &p2) {
+ typename K::Squared_distance_d sqdist = m_k.squared_distance_d_object();
return sqdist(p1, m_ref) < sqdist(p2, m_ref);
}
private:
- Point const& m_ref;
- K const& m_k;
+ Point const &m_ref;
+ K const &m_k;
};
#ifdef GUDHI_USE_TBB
// Functor for compute_tangential_complex function
class Compute_tangent_triangulation {
- Tangential_complex & m_tc;
+ Tangential_complex &m_tc;
public:
// Constructor
- Compute_tangent_triangulation(Tangential_complex &tc)
- : m_tc(tc) { }
+ Compute_tangent_triangulation(Tangential_complex &tc) : m_tc(tc) {}
// Constructor
- Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt)
- : m_tc(ctt.m_tc) { }
+ Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt) : m_tc(ctt.m_tc) {}
// operator()
- void operator()(const tbb::blocked_range<size_t>& r) const {
- for (size_t i = r.begin(); i != r.end(); ++i)
- m_tc.compute_tangent_triangulation(i);
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) m_tc.compute_tangent_triangulation(i);
}
};
// Functor for refresh_tangential_complex function
class Refresh_tangent_triangulation {
- Tangential_complex & m_tc;
- Points_ds const& m_updated_pts_ds;
+ Tangential_complex &m_tc;
+ Points_ds const &m_updated_pts_ds;
public:
// Constructor
- Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const& updated_pts_ds)
- : m_tc(tc), m_updated_pts_ds(updated_pts_ds) { }
+ Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const &updated_pts_ds)
+ : m_tc(tc), m_updated_pts_ds(updated_pts_ds) {}
// Constructor
Refresh_tangent_triangulation(const Refresh_tangent_triangulation &ctt)
- : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) { }
+ : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) {}
// operator()
- void operator()(const tbb::blocked_range<size_t>& r) const {
- for (size_t i = r.begin(); i != r.end(); ++i)
- m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds);
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds);
}
};
#endif // GUDHI_USE_TBB
- bool is_infinite(Simplex const& s) const {
- return *s.rbegin() == (std::numeric_limits<std::size_t>::max)();
- }
+ bool is_infinite(Simplex const &s) const { return *s.rbegin() == (std::numeric_limits<std::size_t>::max)(); }
// Output: "triangulation" is a Regular Triangulation containing at least the
// star of "center_pt"
@@ -1076,17 +960,16 @@ class Tangential_complex {
Tr_point proj_wp;
if (i == tsb.origin()) {
// Insert {(0, 0, 0...), m_weights[i]}
- proj_wp = local_tr_traits.construct_weighted_point_d_object()(local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN),
- m_weights[i]);
+ proj_wp = local_tr_traits.construct_weighted_point_d_object()(
+ local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN), m_weights[i]);
} else {
- const Weighted_point& wp = compute_perturbed_weighted_point(i);
+ const Weighted_point &wp = compute_perturbed_weighted_point(i);
proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits);
}
Tr_vertex_handle center_vertex = triangulation.insert(proj_wp);
center_vertex->data() = i;
- if (verbose)
- std::cerr << "* Inserted point #" << i << "\n";
+ if (verbose) std::cerr << "* Inserted point #" << i << "\n";
#ifdef GUDHI_TC_VERY_VERBOSE
std::size_t num_attempts_to_insert_points = 1;
@@ -1100,12 +983,13 @@ class Tangential_complex {
// of the sphere "star sphere" centered at "center_vertex"
// and which contains all the
// circumspheres of the star of "center_vertex"
- boost::optional<FT> squared_star_sphere_radius_plus_margin;
+ boost::optional<FT> squared_star_sphere_radius_plus_margin = boost::make_optional(false, FT());
+ // This is the strange way boost is recommending to get rid of "may be used uninitialized in this function".
+ // Former code was :
+ // boost::optional<FT> squared_star_sphere_radius_plus_margin;
// Insert points until we find a point which is outside "star sphere"
- for (auto nn_it = ins_range.begin();
- nn_it != ins_range.end();
- ++nn_it) {
+ for (auto nn_it = ins_range.begin(); nn_it != ins_range.end(); ++nn_it) {
std::size_t neighbor_point_idx = nn_it->first;
// ith point = p, which is already inserted
@@ -1120,22 +1004,19 @@ class Tangential_complex {
k_sqdist(center_pt, neighbor_pt) > *squared_star_sphere_radius_plus_margin)
break;
- Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb,
- local_tr_traits);
+ Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb, local_tr_traits);
#ifdef GUDHI_TC_VERY_VERBOSE
++num_attempts_to_insert_points;
#endif
-
Tr_vertex_handle vh = triangulation.insert_if_in_star(proj_pt, center_vertex);
// Tr_vertex_handle vh = triangulation.insert(proj_pt);
if (vh != Tr_vertex_handle() && vh->data() == (std::numeric_limits<std::size_t>::max)()) {
#ifdef GUDHI_TC_VERY_VERBOSE
++num_inserted_points;
#endif
- if (verbose)
- std::cerr << "* Inserted point #" << neighbor_point_idx << "\n";
+ if (verbose) std::cerr << "* Inserted point #" << neighbor_point_idx << "\n";
vh->data() = neighbor_point_idx;
@@ -1144,11 +1025,9 @@ class Tangential_complex {
squared_star_sphere_radius_plus_margin = boost::none;
// Get the incident cells and look for the biggest circumsphere
std::vector<Tr_full_cell_handle> incident_cells;
- triangulation.incident_full_cells(
- center_vertex,
- std::back_inserter(incident_cells));
- for (typename std::vector<Tr_full_cell_handle>::iterator cit =
- incident_cells.begin(); cit != incident_cells.end(); ++cit) {
+ triangulation.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
+ for (typename std::vector<Tr_full_cell_handle>::iterator cit = incident_cells.begin();
+ cit != incident_cells.end(); ++cit) {
Tr_full_cell_handle cell = *cit;
if (triangulation.is_infinite(cell)) {
squared_star_sphere_radius_plus_margin = boost::none;
@@ -1156,12 +1035,11 @@ class Tangential_complex {
} else {
// Note that this uses the perturbed point since it uses
// the points of the local triangulation
- Tr_point c = power_center(boost::make_transform_iterator(cell->vertices_begin(),
- vertex_handle_to_point<Tr_point,
- Tr_vertex_handle>),
- boost::make_transform_iterator(cell->vertices_end(),
- vertex_handle_to_point<Tr_point,
- Tr_vertex_handle>));
+ Tr_point c =
+ power_center(boost::make_transform_iterator(cell->vertices_begin(),
+ vertex_handle_to_point<Tr_point, Tr_vertex_handle>),
+ boost::make_transform_iterator(cell->vertices_end(),
+ vertex_handle_to_point<Tr_point, Tr_vertex_handle>));
FT sq_power_sphere_diam = 4 * point_weight(c);
@@ -1176,12 +1054,11 @@ class Tangential_complex {
// The value depends on whether we perturb weight or position
if (squared_star_sphere_radius_plus_margin) {
// "2*m_last_max_perturb" because both points can be perturbed
- squared_star_sphere_radius_plus_margin = CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin)
- + 2 * m_last_max_perturb);
+ squared_star_sphere_radius_plus_margin =
+ CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin) + 2 * m_last_max_perturb);
// Save it in `m_squared_star_spheres_radii_incl_margin`
- m_squared_star_spheres_radii_incl_margin[i] =
- *squared_star_sphere_radius_plus_margin;
+ m_squared_star_spheres_radii_incl_margin[i] = *squared_star_sphere_radius_plus_margin;
} else {
m_squared_star_spheres_radii_incl_margin[i] = FT(-1);
}
@@ -1193,36 +1070,28 @@ class Tangential_complex {
return center_vertex;
}
- void refresh_tangent_triangulation(std::size_t i, Points_ds const& updated_pts_ds, bool verbose = false) {
- if (verbose)
- std::cerr << "** Refreshing tangent tri #" << i << " **\n";
+ void refresh_tangent_triangulation(std::size_t i, Points_ds const &updated_pts_ds, bool verbose = false) {
+ if (verbose) std::cerr << "** Refreshing tangent tri #" << i << " **\n";
- if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1))
- return compute_tangent_triangulation(i, verbose);
+ if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1)) return compute_tangent_triangulation(i, verbose);
Point center_point = compute_perturbed_point(i);
// Among updated point, what is the closer from our center point?
- std::size_t closest_pt_index =
- updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
+ std::size_t closest_pt_index = updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
- typename K::Construct_weighted_point_d k_constr_wp =
- m_k.construct_weighted_point_d_object();
+ typename K::Construct_weighted_point_d k_constr_wp = m_k.construct_weighted_point_d_object();
typename K::Power_distance_d k_power_dist = m_k.power_distance_d_object();
// Construct a weighted point equivalent to the star sphere
- Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i),
- m_squared_star_spheres_radii_incl_margin[i]);
- Weighted_point closest_updated_point =
- compute_perturbed_weighted_point(closest_pt_index);
+ Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i), m_squared_star_spheres_radii_incl_margin[i]);
+ Weighted_point closest_updated_point = compute_perturbed_weighted_point(closest_pt_index);
// Is the "closest point" inside our star sphere?
- if (k_power_dist(star_sphere, closest_updated_point) <= FT(0))
- compute_tangent_triangulation(i, verbose);
+ if (k_power_dist(star_sphere, closest_updated_point) <= FT(0)) compute_tangent_triangulation(i, verbose);
}
void compute_tangent_triangulation(std::size_t i, bool verbose = false) {
- if (verbose)
- std::cerr << "** Computing tangent tri #" << i << " **\n";
+ if (verbose) std::cerr << "** Computing tangent tri #" << i << " **\n";
// std::cerr << "***********************************************\n";
// No need to lock the mutex here since this will not be called while
@@ -1233,7 +1102,7 @@ class Tangential_complex {
// Estimate the tangent space
if (!m_are_tangent_spaces_computed[i]) {
#ifdef GUDHI_TC_EXPORT_NORMALS
- tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]);
+ tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]);
#else
tsb = compute_tangent_space(center_pt, i);
#endif
@@ -1243,11 +1112,9 @@ class Tangential_complex {
Gudhi::Clock t;
#endif
int tangent_space_dim = tangent_basis_dim(i);
- Triangulation &local_tr =
- m_triangulations[i].construct_triangulation(tangent_space_dim);
+ Triangulation &local_tr = m_triangulations[i].construct_triangulation(tangent_space_dim);
- m_triangulations[i].center_vertex() =
- compute_star(i, center_pt, tsb, local_tr, verbose);
+ m_triangulations[i].center_vertex() = compute_star(i, center_pt, tsb, local_tr, verbose);
#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
t.end();
@@ -1256,8 +1123,8 @@ class Tangential_complex {
#endif
#ifdef GUDHI_TC_VERY_VERBOSE
- std::cerr << "Inserted " << num_inserted_points << " points / "
- << num_attempts_to_insert_points << " attemps to compute the star\n";
+ std::cerr << "Inserted " << num_inserted_points << " points / " << num_attempts_to_insert_points
+ << " attemps to compute the star\n";
#endif
update_star(i);
@@ -1278,8 +1145,7 @@ class Tangential_complex {
int cur_dim_plus_1 = local_tr.current_dimension() + 1;
std::vector<Tr_full_cell_handle> incident_cells;
- local_tr.incident_full_cells(
- center_vertex, std::back_inserter(incident_cells));
+ local_tr.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
typename std::vector<Tr_full_cell_handle>::const_iterator it_c = incident_cells.begin();
typename std::vector<Tr_full_cell_handle>::const_iterator it_c_end = incident_cells.end();
@@ -1289,30 +1155,25 @@ class Tangential_complex {
Incident_simplex incident_simplex;
for (int j = 0; j < cur_dim_plus_1; ++j) {
std::size_t index = (*it_c)->vertex(j)->data();
- if (index != i)
- incident_simplex.insert(index);
+ if (index != i) incident_simplex.insert(index);
}
GUDHI_CHECK(incident_simplex.size() == cur_dim_plus_1 - 1,
- std::logic_error("update_star: wrong size of incident simplex"));
+ std::logic_error("update_star: wrong size of incident simplex"));
star.push_back(incident_simplex);
}
}
// Estimates tangent subspaces using PCA
- Tangent_space_basis compute_tangent_space(const Point &p
- , const std::size_t i
- , bool normalize_basis = true
- , Orthogonal_space_basis *p_orth_space_basis = NULL
- ) {
- unsigned int num_pts_for_pca = (std::min)(static_cast<unsigned int> (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
- static_cast<unsigned int> (m_points.size()));
+ Tangent_space_basis compute_tangent_space(const Point &p, const std::size_t i, bool normalize_basis = true,
+ Orthogonal_space_basis *p_orth_space_basis = NULL) {
+ unsigned int num_pts_for_pca =
+ (std::min)(static_cast<unsigned int>(std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int>(m_points.size()));
// Kernel functors
- typename K::Construct_vector_d constr_vec =
- m_k.construct_vector_d_object();
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
+ typename K::Construct_vector_d constr_vec = m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
@@ -1325,9 +1186,7 @@ class Tangential_complex {
// One row = one point
Eigen::MatrixXd mat_points(num_pts_for_pca, m_ambient_dim);
auto nn_it = kns_range.begin();
- for (unsigned int j = 0;
- j < num_pts_for_pca && nn_it != kns_range.end();
- ++j, ++nn_it) {
+ for (unsigned int j = 0; j < num_pts_for_pca && nn_it != kns_range.end(); ++j, ++nn_it) {
for (int i = 0; i < m_ambient_dim; ++i) {
mat_points(j, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
}
@@ -1340,36 +1199,26 @@ class Tangential_complex {
// The eigenvectors are sorted in increasing order of their corresponding
// eigenvalues
- for (int j = m_ambient_dim - 1;
- j >= m_ambient_dim - m_intrinsic_dim;
- --j) {
+ for (int j = m_ambient_dim - 1; j >= m_ambient_dim - m_intrinsic_dim; --j) {
if (normalize_basis) {
- Vector v = constr_vec(m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim);
tsb.push_back(normalize_vector(v, m_k));
} else {
- tsb.push_back(constr_vec(
- m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ tsb.push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim));
}
}
if (p_orth_space_basis) {
p_orth_space_basis->set_origin(i);
- for (int j = m_ambient_dim - m_intrinsic_dim - 1;
- j >= 0;
- --j) {
+ for (int j = m_ambient_dim - m_intrinsic_dim - 1; j >= 0; --j) {
if (normalize_basis) {
- Vector v = constr_vec(m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim);
p_orth_space_basis->push_back(normalize_vector(v, m_k));
} else {
- p_orth_space_basis->push_back(constr_vec(
- m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ p_orth_space_basis->push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim));
}
}
@@ -1386,29 +1235,23 @@ class Tangential_complex {
// on it. Note that most points are duplicated.
Tangent_space_basis compute_tangent_space(const Simplex &s, bool normalize_basis = true) {
- unsigned int num_pts_for_pca = (std::min)(static_cast<unsigned int> (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
- static_cast<unsigned int> (m_points.size()));
+ unsigned int num_pts_for_pca =
+ (std::min)(static_cast<unsigned int>(std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int>(m_points.size()));
// Kernel functors
- typename K::Construct_vector_d constr_vec =
- m_k.construct_vector_d_object();
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
- typename K::Squared_length_d sqlen =
- m_k.squared_length_d_object();
- typename K::Scaled_vector_d scaled_vec =
- m_k.scaled_vector_d_object();
- typename K::Scalar_product_d scalar_pdct =
- m_k.scalar_product_d_object();
- typename K::Difference_of_vectors_d diff_vec =
- m_k.difference_of_vectors_d_object();
+ typename K::Construct_vector_d constr_vec = m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+ typename K::Squared_length_d sqlen = m_k.squared_length_d_object();
+ typename K::Scaled_vector_d scaled_vec = m_k.scaled_vector_d_object();
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_vectors_d diff_vec = m_k.difference_of_vectors_d_object();
// One row = one point
Eigen::MatrixXd mat_points(s.size() * num_pts_for_pca, m_ambient_dim);
unsigned int current_row = 0;
- for (Simplex::const_iterator it_index = s.begin();
- it_index != s.end(); ++it_index) {
+ for (Simplex::const_iterator it_index = s.begin(); it_index != s.end(); ++it_index) {
const Point &p = m_points[*it_index];
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
@@ -1420,12 +1263,9 @@ class Tangential_complex {
#endif
auto nn_it = kns_range.begin();
- for (;
- current_row < num_pts_for_pca && nn_it != kns_range.end();
- ++current_row, ++nn_it) {
+ for (; current_row < num_pts_for_pca && nn_it != kns_range.end(); ++current_row, ++nn_it) {
for (int i = 0; i < m_ambient_dim; ++i) {
- mat_points(current_row, i) =
- CGAL::to_double(coord(points_for_pca[nn_it->first], i));
+ mat_points(current_row, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
}
}
}
@@ -1437,18 +1277,13 @@ class Tangential_complex {
// The eigenvectors are sorted in increasing order of their corresponding
// eigenvalues
- for (int j = m_ambient_dim - 1;
- j >= m_ambient_dim - m_intrinsic_dim;
- --j) {
+ for (int j = m_ambient_dim - 1; j >= m_ambient_dim - m_intrinsic_dim; --j) {
if (normalize_basis) {
- Vector v = constr_vec(m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim);
tsb.push_back(normalize_vector(v, m_k));
} else {
- tsb.push_back(constr_vec(
- m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ tsb.push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim));
}
}
@@ -1458,14 +1293,11 @@ class Tangential_complex {
// Returns the dimension of the ith local triangulation
- int tangent_basis_dim(std::size_t i) const {
- return m_tangent_spaces[i].dimension();
- }
+ int tangent_basis_dim(std::size_t i) const { return m_tangent_spaces[i].dimension(); }
Point compute_perturbed_point(std::size_t pt_idx) const {
#ifdef GUDHI_TC_PERTURB_POSITION
- return m_k.translated_point_d_object()(
- m_points[pt_idx], m_translations[pt_idx]);
+ return m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]);
#else
return m_points[pt_idx];
#endif
@@ -1473,8 +1305,7 @@ class Tangential_complex {
void compute_perturbed_weighted_point(std::size_t pt_idx, Point &p, FT &w) const {
#ifdef GUDHI_TC_PERTURB_POSITION
- p = m_k.translated_point_d_object()(
- m_points[pt_idx], m_translations[pt_idx]);
+ p = m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]);
#else
p = m_points[pt_idx];
#endif
@@ -1482,8 +1313,7 @@ class Tangential_complex {
}
Weighted_point compute_perturbed_weighted_point(std::size_t pt_idx) const {
- typename K::Construct_weighted_point_d k_constr_wp =
- m_k.construct_weighted_point_d_object();
+ typename K::Construct_weighted_point_d k_constr_wp = m_k.construct_weighted_point_d_object();
Weighted_point wp = k_constr_wp(
#ifdef GUDHI_TC_PERTURB_POSITION
@@ -1496,33 +1326,22 @@ class Tangential_complex {
return wp;
}
- Point unproject_point(const Tr_point &p,
- const Tangent_space_basis &tsb,
- const Tr_traits &tr_traits) const {
- typename K::Translated_point_d k_transl =
- m_k.translated_point_d_object();
- typename K::Scaled_vector_d k_scaled_vec =
- m_k.scaled_vector_d_object();
- typename Tr_traits::Compute_coordinate_d coord =
- tr_traits.compute_coordinate_d_object();
+ Point unproject_point(const Tr_point &p, const Tangent_space_basis &tsb, const Tr_traits &tr_traits) const {
+ typename K::Translated_point_d k_transl = m_k.translated_point_d_object();
+ typename K::Scaled_vector_d k_scaled_vec = m_k.scaled_vector_d_object();
+ typename Tr_traits::Compute_coordinate_d coord = tr_traits.compute_coordinate_d_object();
Point global_point = compute_perturbed_point(tsb.origin());
- for (int i = 0; i < m_intrinsic_dim; ++i)
- global_point = k_transl(global_point,
- k_scaled_vec(tsb[i], coord(p, i)));
+ for (int i = 0; i < m_intrinsic_dim; ++i) global_point = k_transl(global_point, k_scaled_vec(tsb[i], coord(p, i)));
return global_point;
}
// Project the point in the tangent space
// Resulting point coords are expressed in tsb's space
- Tr_bare_point project_point(const Point &p,
- const Tangent_space_basis &tsb,
- const Tr_traits &tr_traits) const {
- typename K::Scalar_product_d scalar_pdct =
- m_k.scalar_product_d_object();
- typename K::Difference_of_points_d diff_points =
- m_k.difference_of_points_d_object();
+ Tr_bare_point project_point(const Point &p, const Tangent_space_basis &tsb, const Tr_traits &tr_traits) const {
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points = m_k.difference_of_points_d_object();
Vector v = diff_points(p, compute_perturbed_point(tsb.origin()));
@@ -1535,41 +1354,30 @@ class Tangential_complex {
coords.push_back(coord);
}
- return tr_traits.construct_point_d_object()(
- static_cast<int> (coords.size()), coords.begin(), coords.end());
+ return tr_traits.construct_point_d_object()(static_cast<int>(coords.size()), coords.begin(), coords.end());
}
// Project the point in the tangent space
// The weight will be the squared distance between p and the projection of p
// Resulting point coords are expressed in tsb's space
- Tr_point project_point_and_compute_weight(const Weighted_point &wp,
- const Tangent_space_basis &tsb,
+ Tr_point project_point_and_compute_weight(const Weighted_point &wp, const Tangent_space_basis &tsb,
const Tr_traits &tr_traits) const {
- typename K::Point_drop_weight_d k_drop_w =
- m_k.point_drop_weight_d_object();
- typename K::Compute_weight_d k_point_weight =
- m_k.compute_weight_d_object();
- return project_point_and_compute_weight(
- k_drop_w(wp), k_point_weight(wp), tsb, tr_traits);
+ typename K::Point_drop_weight_d k_drop_w = m_k.point_drop_weight_d_object();
+ typename K::Compute_weight_d k_point_weight = m_k.compute_weight_d_object();
+ return project_point_and_compute_weight(k_drop_w(wp), k_point_weight(wp), tsb, tr_traits);
}
// Same as above, with slightly different parameters
- Tr_point project_point_and_compute_weight(const Point &p, const FT w,
- const Tangent_space_basis &tsb,
+ Tr_point project_point_and_compute_weight(const Point &p, const FT w, const Tangent_space_basis &tsb,
const Tr_traits &tr_traits) const {
const int point_dim = m_k.point_dimension_d_object()(p);
- typename K::Construct_point_d constr_pt =
- m_k.construct_point_d_object();
- typename K::Scalar_product_d scalar_pdct =
- m_k.scalar_product_d_object();
- typename K::Difference_of_points_d diff_points =
- m_k.difference_of_points_d_object();
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
- typename K::Construct_cartesian_const_iterator_d ccci =
- m_k.construct_cartesian_const_iterator_d_object();
+ typename K::Construct_point_d constr_pt = m_k.construct_point_d_object();
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points = m_k.difference_of_points_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+ typename K::Construct_cartesian_const_iterator_d ccci = m_k.construct_cartesian_const_iterator_d_object();
Point origin = compute_perturbed_point(tsb.origin());
Vector v = diff_points(p, origin);
@@ -1588,8 +1396,7 @@ class Tangential_complex {
// p_proj += c * tsb[i]
if (!same_dim) {
- for (int j = 0; j < point_dim; ++j)
- p_proj[j] += c * coord(tsb[i], j);
+ for (int j = 0; j < point_dim; ++j) p_proj[j] += c * coord(tsb[i], j);
}
}
@@ -1600,24 +1407,21 @@ class Tangential_complex {
sq_dist_to_proj_pt = m_k.squared_distance_d_object()(p, projected_pt);
}
- return tr_traits.construct_weighted_point_d_object()
- (tr_traits.construct_point_d_object()(static_cast<int> (coords.size()), coords.begin(), coords.end()),
- w - sq_dist_to_proj_pt);
+ return tr_traits.construct_weighted_point_d_object()(
+ tr_traits.construct_point_d_object()(static_cast<int>(coords.size()), coords.begin(), coords.end()),
+ w - sq_dist_to_proj_pt);
}
// Project all the points in the tangent space
template <typename Indexed_point_range>
- std::vector<Tr_point> project_points_and_compute_weights(
- const Indexed_point_range &point_indices,
+ std::vector<Tr_point> project_points_and_compute_weights(const Indexed_point_range &point_indices,
const Tangent_space_basis &tsb,
const Tr_traits &tr_traits) const {
std::vector<Tr_point> ret;
- for (typename Indexed_point_range::const_iterator
- it = point_indices.begin(), it_end = point_indices.end();
+ for (typename Indexed_point_range::const_iterator it = point_indices.begin(), it_end = point_indices.end();
it != it_end; ++it) {
- ret.push_back(project_point_and_compute_weight(
- compute_perturbed_weighted_point(*it), tsb, tr_traits));
+ ret.push_back(project_point_and_compute_weight(compute_perturbed_weighted_point(*it), tsb, tr_traits));
}
return ret;
}
@@ -1636,7 +1440,7 @@ class Tangential_complex {
// A simplex here is a list of point indices
// TODO(CJ): improve it like the other "is_simplex_consistent" below
- bool is_simplex_consistent(Simplex const& simplex) const {
+ bool is_simplex_consistent(Simplex const &simplex) const {
// Check if the simplex is in the stars of all its vertices
Simplex::const_iterator it_point_idx = simplex.begin();
// For each point p of the simplex, we parse the incidents cells of p
@@ -1644,18 +1448,16 @@ class Tangential_complex {
for (; it_point_idx != simplex.end(); ++it_point_idx) {
std::size_t point_idx = *it_point_idx;
// Don't check infinite simplices
- if (point_idx == (std::numeric_limits<std::size_t>::max)())
- continue;
+ if (point_idx == (std::numeric_limits<std::size_t>::max)()) continue;
- Star const& star = m_stars[point_idx];
+ Star const &star = m_stars[point_idx];
// What we're looking for is "simplex" \ point_idx
Incident_simplex is_to_find = simplex;
is_to_find.erase(point_idx);
// For each cell
- if (std::find(star.begin(), star.end(), is_to_find) == star.end())
- return false;
+ if (std::find(star.begin(), star.end(), is_to_find) == star.end()) return false;
}
return true;
@@ -1668,9 +1470,8 @@ class Tangential_complex {
// star(center_point)
template <typename OutputIterator> // value_type = std::size_t
- bool is_simplex_consistent(
- std::size_t center_point,
- Incident_simplex const& s, // without "center_point"
+ bool is_simplex_consistent(std::size_t center_point,
+ Incident_simplex const &s, // without "center_point"
OutputIterator points_whose_star_does_not_contain_s,
bool check_also_in_non_maximal_faces = false) const {
Simplex full_simplex = s;
@@ -1683,10 +1484,9 @@ class Tangential_complex {
for (; it_point_idx != s.end(); ++it_point_idx) {
std::size_t point_idx = *it_point_idx;
// Don't check infinite simplices
- if (point_idx == (std::numeric_limits<std::size_t>::max)())
- continue;
+ if (point_idx == (std::numeric_limits<std::size_t>::max)()) continue;
- Star const& star = m_stars[point_idx];
+ Star const &star = m_stars[point_idx];
// What we're looking for is full_simplex \ point_idx
Incident_simplex is_to_find = full_simplex;
@@ -1696,15 +1496,11 @@ class Tangential_complex {
// For each simplex "is" of the star, check if ic_to_simplex is
// included in "is"
bool found = false;
- for (Star::const_iterator is = star.begin(), is_end = star.end();
- !found && is != is_end; ++is) {
- if (std::includes(is->begin(), is->end(),
- is_to_find.begin(), is_to_find.end()))
- found = true;
+ for (Star::const_iterator is = star.begin(), is_end = star.end(); !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), is_to_find.begin(), is_to_find.end())) found = true;
}
- if (!found)
- *points_whose_star_does_not_contain_s++ = point_idx;
+ if (!found) *points_whose_star_does_not_contain_s++ = point_idx;
} else {
// Does the star contain is_to_find?
if (std::find(star.begin(), star.end(), is_to_find) == star.end())
@@ -1718,19 +1514,15 @@ class Tangential_complex {
// A simplex here is a list of point indices
// It looks for s in star(p).
// "s" contains all the points of the simplex except p.
- bool is_simplex_in_star(std::size_t p,
- Incident_simplex const& s,
- bool check_also_in_non_maximal_faces = true) const {
- Star const& star = m_stars[p];
+ bool is_simplex_in_star(std::size_t p, Incident_simplex const &s, bool check_also_in_non_maximal_faces = true) const {
+ Star const &star = m_stars[p];
if (check_also_in_non_maximal_faces) {
// For each simplex "is" of the star, check if ic_to_simplex is
// included in "is"
bool found = false;
- for (Star::const_iterator is = star.begin(), is_end = star.end();
- !found && is != is_end; ++is) {
- if (std::includes(is->begin(), is->end(), s.begin(), s.end()))
- found = true;
+ for (Star::const_iterator is = star.begin(), is_end = star.end(); !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), s.begin(), s.end())) found = true;
}
return found;
@@ -1742,64 +1534,55 @@ class Tangential_complex {
#ifdef GUDHI_USE_TBB
// Functor for try_to_solve_inconsistencies_in_a_local_triangulation function
class Try_to_solve_inconsistencies_in_a_local_triangulation {
- Tangential_complex & m_tc;
+ Tangential_complex &m_tc;
double m_max_perturb;
tbb::combinable<std::size_t> &m_num_inconsistencies;
tbb::combinable<std::vector<std::size_t> > &m_updated_points;
public:
// Constructor
- Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc,
- double max_perturb,
+ Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc, double max_perturb,
tbb::combinable<std::size_t> &num_inconsistencies,
tbb::combinable<std::vector<std::size_t> > &updated_points)
: m_tc(tc),
- m_max_perturb(max_perturb),
- m_num_inconsistencies(num_inconsistencies),
- m_updated_points(updated_points) { }
+ m_max_perturb(max_perturb),
+ m_num_inconsistencies(num_inconsistencies),
+ m_updated_points(updated_points) {}
// Constructor
- Try_to_solve_inconsistencies_in_a_local_triangulation(const Try_to_solve_inconsistencies_in_a_local_triangulation&
- tsilt)
+ Try_to_solve_inconsistencies_in_a_local_triangulation(
+ const Try_to_solve_inconsistencies_in_a_local_triangulation &tsilt)
: m_tc(tsilt.m_tc),
- m_max_perturb(tsilt.m_max_perturb),
- m_num_inconsistencies(tsilt.m_num_inconsistencies),
- m_updated_points(tsilt.m_updated_points) { }
+ m_max_perturb(tsilt.m_max_perturb),
+ m_num_inconsistencies(tsilt.m_num_inconsistencies),
+ m_updated_points(tsilt.m_updated_points) {}
// operator()
- void operator()(const tbb::blocked_range<size_t>& r) const {
+ void operator()(const tbb::blocked_range<size_t> &r) const {
for (size_t i = r.begin(); i != r.end(); ++i) {
- m_num_inconsistencies.local() +=
- m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(i, m_max_perturb,
- std::back_inserter(m_updated_points.local()));
+ m_num_inconsistencies.local() += m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(
+ i, m_max_perturb, std::back_inserter(m_updated_points.local()));
}
}
};
#endif // GUDHI_USE_TBB
void perturb(std::size_t point_idx, double max_perturb) {
- const Tr_traits &local_tr_traits =
- m_triangulations[point_idx].tr().geom_traits();
- typename Tr_traits::Compute_coordinate_d coord =
- local_tr_traits.compute_coordinate_d_object();
- typename K::Translated_point_d k_transl =
- m_k.translated_point_d_object();
- typename K::Construct_vector_d k_constr_vec =
- m_k.construct_vector_d_object();
- typename K::Scaled_vector_d k_scaled_vec =
- m_k.scaled_vector_d_object();
-
- CGAL::Random_points_in_ball_d<Tr_bare_point>
- tr_point_in_ball_generator(m_intrinsic_dim,
- m_random_generator.get_double(0., max_perturb));
+ const Tr_traits &local_tr_traits = m_triangulations[point_idx].tr().geom_traits();
+ typename Tr_traits::Compute_coordinate_d coord = local_tr_traits.compute_coordinate_d_object();
+ typename K::Translated_point_d k_transl = m_k.translated_point_d_object();
+ typename K::Construct_vector_d k_constr_vec = m_k.construct_vector_d_object();
+ typename K::Scaled_vector_d k_scaled_vec = m_k.scaled_vector_d_object();
+
+ CGAL::Random_points_in_ball_d<Tr_bare_point> tr_point_in_ball_generator(
+ m_intrinsic_dim, m_random_generator.get_double(0., max_perturb));
Tr_point local_random_transl =
local_tr_traits.construct_weighted_point_d_object()(*tr_point_in_ball_generator++, 0);
Translation_for_perturb global_transl = k_constr_vec(m_ambient_dim);
const Tangent_space_basis &tsb = m_tangent_spaces[point_idx];
for (int i = 0; i < m_intrinsic_dim; ++i) {
- global_transl = k_transl(global_transl,
- k_scaled_vec(tsb[i], coord(local_random_transl, i)));
+ global_transl = k_transl(global_transl, k_scaled_vec(tsb[i], coord(local_random_transl, i)));
}
// Parallel
#if defined(GUDHI_USE_TBB)
@@ -1814,12 +1597,11 @@ class Tangential_complex {
// Return true if inconsistencies were found
template <typename OutputIt>
- bool try_to_solve_inconsistencies_in_a_local_triangulation(std::size_t tr_index,
- double max_perturb,
- OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) {
+ bool try_to_solve_inconsistencies_in_a_local_triangulation(
+ std::size_t tr_index, double max_perturb, OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) {
bool is_inconsistent = false;
- Star const& star = m_stars[tr_index];
+ Star const &star = m_stars[tr_index];
// For each incident simplex
Star::const_iterator it_inc_simplex = star.begin();
@@ -1828,8 +1610,7 @@ class Tangential_complex {
const Incident_simplex &incident_simplex = *it_inc_simplex;
// Don't check infinite cells
- if (is_infinite(incident_simplex))
- continue;
+ if (is_infinite(incident_simplex)) continue;
Simplex c = incident_simplex;
c.insert(tr_index); // Add the missing index
@@ -1851,31 +1632,24 @@ class Tangential_complex {
return is_inconsistent;
}
-
// 1st line: number of points
// Then one point per line
- std::ostream &export_point_set(std::ostream & os,
- bool use_perturbed_points = false,
+ std::ostream &export_point_set(std::ostream &os, bool use_perturbed_points = false,
const char *coord_separator = " ") const {
if (use_perturbed_points) {
std::vector<Point> perturbed_points;
perturbed_points.reserve(m_points.size());
- for (std::size_t i = 0; i < m_points.size(); ++i)
- perturbed_points.push_back(compute_perturbed_point(i));
+ for (std::size_t i = 0; i < m_points.size(); ++i) perturbed_points.push_back(compute_perturbed_point(i));
- return export_point_set(
- m_k, perturbed_points, os, coord_separator);
+ return export_point_set(m_k, perturbed_points, os, coord_separator);
} else {
- return export_point_set(
- m_k, m_points, os, coord_separator);
+ return export_point_set(m_k, m_points, os, coord_separator);
}
}
- template<typename ProjectionFunctor = CGAL::Identity<Point> >
- std::ostream &export_vertices_to_off(
- std::ostream & os, std::size_t &num_vertices,
- bool use_perturbed_points = false,
- ProjectionFunctor const& point_projection = ProjectionFunctor()) const {
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_vertices_to_off(std::ostream &os, std::size_t &num_vertices, bool use_perturbed_points = false,
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
if (m_points.empty()) {
num_vertices = 0;
return os;
@@ -1887,8 +1661,7 @@ class Tangential_complex {
const int N = (m_intrinsic_dim == 1 ? 2 : 1);
// Kernel functors
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
#ifdef GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF
int num_coords = m_ambient_dim;
@@ -1903,18 +1676,14 @@ class Tangential_complex {
typename Points::const_iterator it_p_end = m_points.end();
// For each point p
for (std::size_t i = 0; it_p != it_p_end; ++it_p, ++i) {
- Point p = point_projection(
- use_perturbed_points ? compute_perturbed_point(i) : *it_p);
+ Point p = point_projection(use_perturbed_points ? compute_perturbed_point(i) : *it_p);
for (int ii = 0; ii < N; ++ii) {
int j = 0;
- for (; j < num_coords; ++j)
- os << CGAL::to_double(coord(p, j)) << " ";
- if (j == 2)
- os << "0";
+ for (; j < num_coords; ++j) os << CGAL::to_double(coord(p, j)) << " ";
+ if (j == 2) os << "0";
#ifdef GUDHI_TC_EXPORT_NORMALS
- for (j = 0; j < num_coords; ++j)
- os << " " << CGAL::to_double(coord(*it_os->begin(), j));
+ for (j = 0; j < num_coords; ++j) os << " " << CGAL::to_double(coord(*it_os->begin(), j));
#endif
os << "\n";
}
@@ -1927,12 +1696,11 @@ class Tangential_complex {
return os;
}
- std::ostream &export_simplices_to_off(std::ostream & os, std::size_t &num_OFF_simplices,
+ std::ostream &export_simplices_to_off(std::ostream &os, std::size_t &num_OFF_simplices,
bool color_inconsistencies = false,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
- Simplex_set const *p_simpl_to_color_in_blue = NULL)
- const {
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) const {
// If m_intrinsic_dim = 1, each point is output two times
// (see export_vertices_to_off)
num_OFF_simplices = 0;
@@ -1945,10 +1713,9 @@ class Tangential_complex {
for (std::size_t idx = 0; it_tr != it_tr_end; ++it_tr, ++idx) {
bool is_star_inconsistent = false;
- Triangulation const& tr = it_tr->tr();
+ Triangulation const &tr = it_tr->tr();
- if (tr.current_dimension() < m_intrinsic_dim)
- continue;
+ if (tr.current_dimension() < m_intrinsic_dim) continue;
// Color for this star
std::stringstream color;
@@ -1974,23 +1741,16 @@ class Tangential_complex {
color_simplex = 0;
is_star_inconsistent = true;
} else {
- if (p_simpl_to_color_in_red &&
- std::find(
- p_simpl_to_color_in_red->begin(),
- p_simpl_to_color_in_red->end(),
- c) != p_simpl_to_color_in_red->end()) {
+ if (p_simpl_to_color_in_red && std::find(p_simpl_to_color_in_red->begin(), p_simpl_to_color_in_red->end(),
+ c) != p_simpl_to_color_in_red->end()) {
color_simplex = 1;
} else if (p_simpl_to_color_in_green &&
- std::find(
- p_simpl_to_color_in_green->begin(),
- p_simpl_to_color_in_green->end(),
- c) != p_simpl_to_color_in_green->end()) {
+ std::find(p_simpl_to_color_in_green->begin(), p_simpl_to_color_in_green->end(), c) !=
+ p_simpl_to_color_in_green->end()) {
color_simplex = 2;
} else if (p_simpl_to_color_in_blue &&
- std::find(
- p_simpl_to_color_in_blue->begin(),
- p_simpl_to_color_in_blue->end(),
- c) != p_simpl_to_color_in_blue->end()) {
+ std::find(p_simpl_to_color_in_blue->begin(), p_simpl_to_color_in_blue->end(), c) !=
+ p_simpl_to_color_in_blue->end()) {
color_simplex = 3;
}
}
@@ -2002,10 +1762,8 @@ class Tangential_complex {
if (m_intrinsic_dim == 1) {
Simplex tmp_c;
Simplex::iterator it = c.begin();
- for (; it != c.end(); ++it)
- tmp_c.insert(*it * 2);
- if (num_vertices == 2)
- tmp_c.insert(*tmp_c.rbegin() + 1);
+ for (; it != c.end(); ++it) tmp_c.insert(*it * 2);
+ if (num_vertices == 2) tmp_c.insert(*tmp_c.rbegin() + 1);
c = tmp_c;
}
@@ -2020,26 +1778,21 @@ class Tangential_complex {
Simplex triangle;
Simplex::iterator it = c.begin();
for (int i = 0; it != c.end(); ++i, ++it) {
- if (booleans[i])
- triangle.insert(*it);
+ if (booleans[i]) triangle.insert(*it);
}
- star_using_triangles.push_back(
- std::make_pair(triangle, color_simplex));
+ star_using_triangles.push_back(std::make_pair(triangle, color_simplex));
} while (std::next_permutation(booleans.begin(), booleans.end()));
}
}
// For each cell
- Star_using_triangles::const_iterator it_simplex =
- star_using_triangles.begin();
- Star_using_triangles::const_iterator it_simplex_end =
- star_using_triangles.end();
+ Star_using_triangles::const_iterator it_simplex = star_using_triangles.begin();
+ Star_using_triangles::const_iterator it_simplex_end = star_using_triangles.end();
for (; it_simplex != it_simplex_end; ++it_simplex) {
const Simplex &c = it_simplex->first;
// Don't export infinite cells
- if (is_infinite(c))
- continue;
+ if (is_infinite(c)) continue;
int color_simplex = it_simplex->second;
@@ -2051,46 +1804,42 @@ class Tangential_complex {
}
os << 3 << " " << sstr_c.str();
- if (color_inconsistencies || p_simpl_to_color_in_red
- || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
+ if (color_inconsistencies || p_simpl_to_color_in_red || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
switch (color_simplex) {
- case 0: os << " 255 255 0";
+ case 0:
+ os << " 255 255 0";
break;
- case 1: os << " 255 0 0";
+ case 1:
+ os << " 255 0 0";
break;
- case 2: os << " 0 255 0";
+ case 2:
+ os << " 0 255 0";
break;
- case 3: os << " 0 0 255";
+ case 3:
+ os << " 0 0 255";
break;
- default: os << " " << color.str();
+ default:
+ os << " " << color.str();
break;
}
}
++num_OFF_simplices;
os << "\n";
}
- if (is_star_inconsistent)
- ++num_inconsistent_stars;
+ if (is_star_inconsistent) ++num_inconsistent_stars;
}
#ifdef DEBUG_TRACES
- std::cerr
- << "\n==========================================================\n"
- << "Export from list of stars to OFF:\n"
- << " * Number of vertices: " << m_points.size() << "\n"
- << " * Total number of maximal simplices: " << num_maximal_simplices
- << "\n";
+ std::cerr << "\n==========================================================\n"
+ << "Export from list of stars to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices << "\n";
if (color_inconsistencies) {
- std::cerr
- << " * Number of inconsistent stars: "
- << num_inconsistent_stars << " ("
- << (m_points.size() > 0 ?
- 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n"
- << " * Number of inconsistent maximal simplices: "
- << num_inconsistent_maximal_simplices << " ("
- << (num_maximal_simplices > 0 ?
- 100. * num_inconsistent_maximal_simplices / num_maximal_simplices
- : 0.) << "%)\n";
+ std::cerr << " * Number of inconsistent stars: " << num_inconsistent_stars << " ("
+ << (m_points.size() > 0 ? 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n"
+ << " * Number of inconsistent maximal simplices: " << num_inconsistent_maximal_simplices << " ("
+ << (num_maximal_simplices > 0 ? 100. * num_inconsistent_maximal_simplices / num_maximal_simplices : 0.)
+ << "%)\n";
}
std::cerr << "==========================================================\n";
#endif
@@ -2099,13 +1848,11 @@ class Tangential_complex {
}
public:
- std::ostream &export_simplices_to_off(
- const Simplicial_complex &complex,
- std::ostream & os, std::size_t &num_OFF_simplices,
+ std::ostream &export_simplices_to_off(const Simplicial_complex &complex, std::ostream &os,
+ std::size_t &num_OFF_simplices,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
- Simplex_set const *p_simpl_to_color_in_blue = NULL)
- const {
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) const {
typedef Simplicial_complex::Simplex Simplex;
typedef Simplicial_complex::Simplex_set Simplex_set;
@@ -2114,31 +1861,24 @@ class Tangential_complex {
num_OFF_simplices = 0;
std::size_t num_maximal_simplices = 0;
- typename Simplex_set::const_iterator it_s =
- complex.simplex_range().begin();
- typename Simplex_set::const_iterator it_s_end =
- complex.simplex_range().end();
+ typename Simplex_set::const_iterator it_s = complex.simplex_range().begin();
+ typename Simplex_set::const_iterator it_s_end = complex.simplex_range().end();
// For each simplex
for (; it_s != it_s_end; ++it_s) {
Simplex c = *it_s;
++num_maximal_simplices;
int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue
- if (p_simpl_to_color_in_red &&
- std::find(
- p_simpl_to_color_in_red->begin(),
- p_simpl_to_color_in_red->end(),
- c) != p_simpl_to_color_in_red->end()) {
+ if (p_simpl_to_color_in_red && std::find(p_simpl_to_color_in_red->begin(), p_simpl_to_color_in_red->end(), c) !=
+ p_simpl_to_color_in_red->end()) {
color_simplex = 1;
} else if (p_simpl_to_color_in_green &&
- std::find(p_simpl_to_color_in_green->begin(),
- p_simpl_to_color_in_green->end(),
- c) != p_simpl_to_color_in_green->end()) {
+ std::find(p_simpl_to_color_in_green->begin(), p_simpl_to_color_in_green->end(), c) !=
+ p_simpl_to_color_in_green->end()) {
color_simplex = 2;
} else if (p_simpl_to_color_in_blue &&
- std::find(p_simpl_to_color_in_blue->begin(),
- p_simpl_to_color_in_blue->end(),
- c) != p_simpl_to_color_in_blue->end()) {
+ std::find(p_simpl_to_color_in_blue->begin(), p_simpl_to_color_in_blue->end(), c) !=
+ p_simpl_to_color_in_blue->end()) {
color_simplex = 3;
}
@@ -2148,8 +1888,7 @@ class Tangential_complex {
int num_vertices = static_cast<int>(c.size());
// Do not export smaller dimension simplices
- if (num_vertices < m_intrinsic_dim + 1)
- continue;
+ if (num_vertices < m_intrinsic_dim + 1) continue;
// If m_intrinsic_dim = 1, each point is output two times,
// so we need to multiply each index by 2
@@ -2158,10 +1897,8 @@ class Tangential_complex {
if (m_intrinsic_dim == 1) {
Simplex tmp_c;
Simplex::iterator it = c.begin();
- for (; it != c.end(); ++it)
- tmp_c.insert(*it * 2);
- if (num_vertices == 2)
- tmp_c.insert(*tmp_c.rbegin() + 1);
+ for (; it != c.end(); ++it) tmp_c.insert(*it * 2);
+ if (num_vertices == 2) tmp_c.insert(*tmp_c.rbegin() + 1);
c = tmp_c;
}
@@ -2176,11 +1913,10 @@ class Tangential_complex {
Simplex triangle;
Simplex::iterator it = c.begin();
for (int i = 0; it != c.end(); ++i, ++it) {
- if (booleans[i])
- triangle.insert(*it);
+ if (booleans[i]) triangle.insert(*it);
}
triangles.push_back(triangle);
- } while (std::next_permutation(booleans.begin(), booleans.end()));
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
}
// For each cell
@@ -2188,8 +1924,7 @@ class Tangential_complex {
Triangles::const_iterator it_tri_end = triangles.end();
for (; it_tri != it_tri_end; ++it_tri) {
// Don't export infinite cells
- if (is_infinite(*it_tri))
- continue;
+ if (is_infinite(*it_tri)) continue;
os << 3 << " ";
Simplex::const_iterator it_point_idx = it_tri->begin();
@@ -2197,18 +1932,22 @@ class Tangential_complex {
os << *it_point_idx << " ";
}
- if (p_simpl_to_color_in_red || p_simpl_to_color_in_green
- || p_simpl_to_color_in_blue) {
+ if (p_simpl_to_color_in_red || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
switch (color_simplex) {
- case 0: os << " 255 255 0";
+ case 0:
+ os << " 255 255 0";
break;
- case 1: os << " 255 0 0";
+ case 1:
+ os << " 255 0 0";
break;
- case 2: os << " 0 255 0";
+ case 2:
+ os << " 0 255 0";
break;
- case 3: os << " 0 0 255";
+ case 3:
+ os << " 0 0 255";
break;
- default: os << " 128 128 128";
+ default:
+ os << " 128 128 128";
break;
}
}
@@ -2219,13 +1958,11 @@ class Tangential_complex {
}
#ifdef DEBUG_TRACES
- std::cerr
- << "\n==========================================================\n"
- << "Export from complex to OFF:\n"
- << " * Number of vertices: " << m_points.size() << "\n"
- << " * Total number of maximal simplices: " << num_maximal_simplices
- << "\n"
- << "==========================================================\n";
+ std::cerr << "\n==========================================================\n"
+ << "Export from complex to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices << "\n"
+ << "==========================================================\n";
#endif
return os;
diff --git a/src/Tangential_complex/test/CMakeLists.txt b/src/Tangential_complex/test/CMakeLists.txt
index 1948c8f6..902f19af 100644
--- a/src/Tangential_complex/test/CMakeLists.txt
+++ b/src/Tangential_complex/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Tangential_complex_tests)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt
index a8231392..3d838c0d 100644
--- a/src/Witness_complex/example/CMakeLists.txt
+++ b/src/Witness_complex/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Witness_complex_examples)
add_executable ( Witness_complex_example_nearest_landmark_table example_nearest_landmark_table.cpp )
diff --git a/src/Witness_complex/example/generators.h b/src/Witness_complex/example/generators.h
index 81566824..4b755daa 100644
--- a/src/Witness_complex/example/generators.h
+++ b/src/Witness_complex/example/generators.h
@@ -20,8 +20,8 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#ifndef EXAMPLE_WITNESS_COMPLEX_GENERATORS_H_
-#define EXAMPLE_WITNESS_COMPLEX_GENERATORS_H_
+#ifndef GENERATORS_H_
+#define GENERATORS_H_
#include <CGAL/Epick_d.h>
#include <CGAL/point_generators_d.h>
@@ -163,4 +163,4 @@ void generate_points_torus(Point_Vector& W, int nbP, int dim) {
}
}
-#endif // EXAMPLE_WITNESS_COMPLEX_GENERATORS_H_
+#endif // GENERATORS_H_
diff --git a/src/Witness_complex/test/CMakeLists.txt b/src/Witness_complex/test/CMakeLists.txt
index 0b523eaf..58ac60c5 100644
--- a/src/Witness_complex/test/CMakeLists.txt
+++ b/src/Witness_complex/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Witness_complex_tests)
include(GUDHI_test_coverage)
diff --git a/src/Witness_complex/utilities/CMakeLists.txt b/src/Witness_complex/utilities/CMakeLists.txt
index 125a41ff..ce5e29f2 100644
--- a/src/Witness_complex/utilities/CMakeLists.txt
+++ b/src/Witness_complex/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Witness_complex_utilities)
# CGAL and Eigen3 are required for Euclidean version of Witness
diff --git a/src/Witness_complex/utilities/strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp
index 9d23df74..f386e992 100644
--- a/src/Witness_complex/utilities/strong_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp
@@ -151,6 +151,6 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/Witness_complex/utilities/weak_witness_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp
index 1315d2ba..ea00cfe7 100644
--- a/src/Witness_complex/utilities/weak_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp
@@ -151,6 +151,6 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/src/cmake/modules/FindCython.cmake b/src/cmake/modules/FindCython.cmake
deleted file mode 100644
index 04aed1f8..00000000
--- a/src/cmake/modules/FindCython.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-# Find the Cython compiler.
-#
-# This code sets the following variables:
-#
-# CYTHON_EXECUTABLE
-#
-# See also UseCython.cmake
-
-#=============================================================================
-# Copyright 2011 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Use the Cython executable that lives next to the Python executable
-# if it is a local installation.
-find_package( PythonInterp )
-if( PYTHONINTERP_FOUND )
- get_filename_component( _python_path ${PYTHON_EXECUTABLE} PATH )
- find_program( CYTHON_EXECUTABLE
- NAMES cython cython.bat cython3
- HINTS ${_python_path}
- )
-else()
- find_program( CYTHON_EXECUTABLE
- NAMES cython cython.bat cython3
- )
-endif()
-
-
-include( FindPackageHandleStandardArgs )
-FIND_PACKAGE_HANDLE_STANDARD_ARGS( Cython REQUIRED_VARS CYTHON_EXECUTABLE )
-
-mark_as_advanced( CYTHON_EXECUTABLE )
diff --git a/src/cmake/modules/GUDHI_compilation_flags.cmake b/src/cmake/modules/GUDHI_compilation_flags.cmake
new file mode 100644
index 00000000..a01d6e13
--- /dev/null
+++ b/src/cmake/modules/GUDHI_compilation_flags.cmake
@@ -0,0 +1,72 @@
+# This files manage compilation flags required by GUDHI
+
+include(TestCXXAcceptsFlag)
+include(CheckCXXSourceCompiles)
+
+# add a compiler flag only if it is accepted
+macro(add_cxx_compiler_flag _flag)
+ string(REPLACE "-" "_" _flag_var ${_flag})
+ check_cxx_accepts_flag("${_flag}" CXX_COMPILER_${_flag_var}_OK)
+ if(CXX_COMPILER_${_flag_var}_OK)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${_flag}")
+ endif()
+endmacro()
+
+function(can_cgal_use_cxx11_thread_local)
+ # This is because of https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/tss.h
+ # CGAL is using boost thread if thread_local is not ready (requires XCode 8 for Mac).
+ # The test in https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/config.h
+ # #if __has_feature(cxx_thread_local) || \
+ # ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
+ # ( _MSC_VER >= 1900 )
+ # #define CGAL_CAN_USE_CXX11_THREAD_LOCAL
+ # #endif
+ set(CGAL_CAN_USE_CXX11_THREAD_LOCAL "
+ int main() {
+ #ifndef __has_feature
+ #define __has_feature(x) 0 // Compatibility with non-clang compilers.
+ #endif
+ #if __has_feature(cxx_thread_local) || \
+ ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
+ ( _MSC_VER >= 1900 )
+ bool has_feature_thread_local = true;
+ #else
+ // Explicit error of compilation for CMake test purpose - has_feature_thread_local is not defined
+ #endif
+ bool result = has_feature_thread_local;
+ } ")
+ check_cxx_source_compiles("${CGAL_CAN_USE_CXX11_THREAD_LOCAL}" CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
+endfunction()
+
+set (CMAKE_CXX_STANDARD 11)
+
+enable_testing()
+
+if(MSVC)
+ # Turn off some VC++ warnings
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
+endif()
+
+add_cxx_compiler_flag("-Wall")
+
+if (DEBUG_TRACES)
+ # For programs to be more verbose
+ message(STATUS "DEBUG_TRACES are activated")
+ add_definitions(-DDEBUG_TRACES)
+endif()
+
+set(GUDHI_CAN_USE_CXX11_THREAD_LOCAL "
+ int main() {
+ thread_local int result = 0;
+ return result;
+ } ")
+check_cxx_source_compiles("${GUDHI_CAN_USE_CXX11_THREAD_LOCAL}" GUDHI_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
+if (GUDHI_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
+ add_definitions(-DGUDHI_CAN_USE_CXX11_THREAD_LOCAL)
+endif()
+
+if(CMAKE_BUILD_TYPE MATCHES Debug)
+ message("++ Debug compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
+else()
+ message("++ Release compilation flags are: ${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_RELEASE}")
+endif()
diff --git a/src/cmake/modules/GUDHI_doxygen_target.cmake b/src/cmake/modules/GUDHI_doxygen_target.cmake
index f3e2d9f5..9e10e566 100644
--- a/src/cmake/modules/GUDHI_doxygen_target.cmake
+++ b/src/cmake/modules/GUDHI_doxygen_target.cmake
@@ -3,14 +3,17 @@ find_package(Doxygen)
if(DOXYGEN_FOUND)
# configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
- #starting from cmake 3.9 the usage of DOXYGEN_EXECUTABLE is deprecated
+ # starting from cmake 3.9 the usage of DOXYGEN_EXECUTABLE is deprecated
if(TARGET Doxygen::doxygen)
get_property(DOXYGEN_EXECUTABLE TARGET Doxygen::doxygen PROPERTY IMPORTED_LOCATION)
endif()
add_custom_target(doxygen ${DOXYGEN_EXECUTABLE} ${GUDHI_USER_VERSION_DIR}/Doxyfile
WORKING_DIRECTORY ${GUDHI_USER_VERSION_DIR}
- DEPENDS ${GUDHI_USER_VERSION_DIR}/Doxyfile ${GUDHI_DOXYGEN_DEPENDENCY}
COMMENT "Generating API documentation with Doxygen in ${GUDHI_USER_VERSION_DIR}/doc/html/" VERBATIM)
+ if(TARGET user_version)
+ # In dev version, doxygen target depends on user_version target. Not existing in user version
+ add_dependencies(doxygen user_version)
+ endif()
endif(DOXYGEN_FOUND)
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index a008dd0a..f03c2177 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -8,11 +8,9 @@ endif(NOT Boost_FOUND)
find_package(GMP)
if(GMP_FOUND)
- message(STATUS "GMP_LIBRARIES = ${GMP_LIBRARIES}")
INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR})
find_package(GMPXX)
if(GMPXX_FOUND)
- message(STATUS "GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
INCLUDE_DIRECTORIES(${GMPXX_INCLUDE_DIR})
endif()
endif()
@@ -54,12 +52,12 @@ if(CGAL_FOUND)
endforeach(CGAL_INCLUDE_DIR ${CGAL_INCLUDE_DIRS})
endif(NOT CGAL_VERSION VERSION_GREATER 4.9.0)
- if (NOT CGAL_VERSION VERSION_GREATER 4.11.0)
+ if (CGAL_VERSION VERSION_LESS 4.11.0)
# For dev version
include_directories(BEFORE "src/common/include/gudhi_patches")
# For user version
include_directories(BEFORE "include/gudhi_patches")
- endif (NOT CGAL_VERSION VERSION_GREATER 4.11.0)
+ endif ()
endif()
endif()
@@ -79,7 +77,6 @@ endif(WITH_GUDHI_USE_TBB)
set(CGAL_WITH_EIGEN3_VERSION 0.0.0)
find_package(Eigen3 3.1.0)
if (EIGEN3_FOUND)
- message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
include( ${EIGEN3_USE_FILE} )
set(CGAL_WITH_EIGEN3_VERSION ${CGAL_VERSION})
endif (EIGEN3_FOUND)
@@ -119,7 +116,34 @@ message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
# Find the correct Python interpreter.
# Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
-find_package(Cython)
+find_package( PythonInterp )
+
+# find_python_module tries to import module in Python interpreter and to retrieve its version number
+# returns ${PYTHON_MODULE_NAME_UP}_VERSION and ${PYTHON_MODULE_NAME_UP}_FOUND
+function( find_python_module PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}; print(${PYTHON_MODULE_NAME}.__version__)"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ OUTPUT_VARIABLE PYTHON_MODULE_VERSION
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove carriage return
+ string(STRIP ${PYTHON_MODULE_VERSION} PYTHON_MODULE_VERSION)
+ set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+endfunction( find_python_module )
+
+if( PYTHONINTERP_FOUND )
+ find_python_module("cython")
+ find_python_module("pytest")
+ find_python_module("matplotlib")
+ find_python_module("numpy")
+endif()
if(NOT GUDHI_CYTHON_PATH)
message(FATAL_ERROR "ERROR: GUDHI_CYTHON_PATH is not valid.")
diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake
index 4abc2574..d43a6fa6 100644
--- a/src/cmake/modules/GUDHI_user_version_target.cmake
+++ b/src/cmake/modules/GUDHI_user_version_target.cmake
@@ -1,94 +1,95 @@
-# Some functionnalities requires CMake 2.8.11 minimum
-if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
+# Definition of the custom target user_version
+add_custom_target(user_version)
- # Definition of the custom target user_version
- add_custom_target(user_version)
-
- if(DEFINED USER_VERSION_DIR)
- # set the GUDHI_USER_VERSION_DIR with USER_VERSION_DIR defined by the user
- set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${USER_VERSION_DIR})
- else()
- # set the GUDHI_USER_VERSION_DIR with timestamp and Gudhi version number
- string(TIMESTAMP DATE_AND_TIME "%Y-%m-%d-%H-%M-%S")
- set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${DATE_AND_TIME}_GUDHI_${GUDHI_VERSION})
- endif()
+if(DEFINED USER_VERSION_DIR)
+ # set the GUDHI_USER_VERSION_DIR with USER_VERSION_DIR defined by the user
+ set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${USER_VERSION_DIR})
+else()
+ # set the GUDHI_USER_VERSION_DIR with timestamp and Gudhi version number
+ string(TIMESTAMP DATE_AND_TIME "%Y-%m-%d-%H-%M-%S")
+ set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${DATE_AND_TIME}_GUDHI_${GUDHI_VERSION})
+endif()
- set(GUDHI_DOXYGEN_DEPENDENCY user_version)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ make_directory ${GUDHI_USER_VERSION_DIR}
+ COMMENT "user_version creation in ${GUDHI_USER_VERSION_DIR}")
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- make_directory ${GUDHI_USER_VERSION_DIR}
- COMMENT "user_version creation in ${GUDHI_USER_VERSION_DIR}")
-
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/Conventions.txt ${GUDHI_USER_VERSION_DIR}/Conventions.txt)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/README ${GUDHI_USER_VERSION_DIR}/README)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/COPYING ${GUDHI_USER_VERSION_DIR}/COPYING)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/CMakeLists.txt ${GUDHI_USER_VERSION_DIR}/CMakeLists.txt)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/Doxyfile ${GUDHI_USER_VERSION_DIR}/Doxyfile)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfigVersion.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfigVersion.cmake.in)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfig.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfig.cmake.in)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/CMakeGUDHIVersion.txt ${GUDHI_USER_VERSION_DIR}/CMakeGUDHIVersion.txt)
-
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/cython ${GUDHI_USER_VERSION_DIR}/cython)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/cmake ${GUDHI_USER_VERSION_DIR}/cmake)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
-
- set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
- if (NOT CGAL_VERSION VERSION_GREATER 4.11.0)
- set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
- else ()
- set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi")
- endif ()
+foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
+ set(GUDHI_DOXYGEN_IMAGE_PATH "${GUDHI_DOXYGEN_IMAGE_PATH} doc/${GUDHI_MODULE}/ \\ \n")
+endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
- foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
- foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
- # Find files
- file(GLOB GUDHI_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_DIRECTORY}/*)
+# Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention
+configure_file(${CMAKE_SOURCE_DIR}/src/Doxyfile.in "${CMAKE_CURRENT_BINARY_DIR}/src/Doxyfile" @ONLY)
- foreach(GUDHI_FILE ${GUDHI_FILES})
- get_filename_component(GUDHI_FILE_NAME ${GUDHI_FILE} NAME)
- # GUDHI_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
- endif()
- endforeach()
- endforeach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_CURRENT_BINARY_DIR}/src/Doxyfile ${GUDHI_USER_VERSION_DIR}/Doxyfile)
- foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
- # include files
- file(GLOB GUDHI_INCLUDE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_INCLUDE_DIRECTORY}/*)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/Conventions.txt ${GUDHI_USER_VERSION_DIR}/Conventions.txt)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/README ${GUDHI_USER_VERSION_DIR}/README)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/COPYING ${GUDHI_USER_VERSION_DIR}/COPYING)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/src/CMakeLists.txt ${GUDHI_USER_VERSION_DIR}/CMakeLists.txt)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfigVersion.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfigVersion.cmake.in)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfig.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfig.cmake.in)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/CMakeGUDHIVersion.txt ${GUDHI_USER_VERSION_DIR}/CMakeGUDHIVersion.txt)
- foreach(GUDHI_INCLUDE_FILE ${GUDHI_INCLUDE_FILES})
- get_filename_component(GUDHI_INCLUDE_FILE_NAME ${GUDHI_INCLUDE_FILE} NAME)
- # GUDHI_INCLUDE_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_INCLUDE_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
- endif()
- endforeach()
- endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/cython ${GUDHI_USER_VERSION_DIR}/cython)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/cmake ${GUDHI_USER_VERSION_DIR}/cmake)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
- endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
+set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
+if (CGAL_VERSION VERSION_LESS 4.11.0)
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
+else ()
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi")
+endif ()
-endif()
+foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
+ foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+ # Find files
+ file(GLOB GUDHI_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_DIRECTORY}/*)
+
+ foreach(GUDHI_FILE ${GUDHI_FILES})
+ get_filename_component(GUDHI_FILE_NAME ${GUDHI_FILE} NAME)
+ # GUDHI_FILE can be a file or a directory
+ if(IS_DIRECTORY ${GUDHI_FILE})
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
+ else()
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
+ endif()
+ endforeach()
+ endforeach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+
+ foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+ # include files
+ file(GLOB GUDHI_INCLUDE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_INCLUDE_DIRECTORY}/*)
+
+ foreach(GUDHI_INCLUDE_FILE ${GUDHI_INCLUDE_FILES})
+ get_filename_component(GUDHI_INCLUDE_FILE_NAME ${GUDHI_INCLUDE_FILE} NAME)
+ # GUDHI_INCLUDE_FILE can be a file or a directory
+ if(IS_DIRECTORY ${GUDHI_INCLUDE_FILE})
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
+ else()
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
+ endif()
+ endforeach()
+ endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+
+endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST}) \ No newline at end of file
diff --git a/src/common/doc/header.html b/src/common/doc/header.html
index 2f54e68d..c12d2816 100644
--- a/src/common/doc/header.html
+++ b/src/common/doc/header.html
@@ -9,7 +9,7 @@
<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
<!-- GUDHI website css for header BEGIN -->
-<link rel="stylesheet" type="text/css" href="http://gudhi.gforge.inria.fr/assets/css/styles_feeling_responsive.css" />
+<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
<!-- GUDHI website css for header END -->
<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="$relpath^jquery.js"></script>
@@ -24,60 +24,61 @@ $extrastylesheet
<!-- GUDHI website header BEGIN -->
<div id="navigation" class="sticky">
- <nav class="top-bar" role="navigation" data-topbar>
- <ul class="title-area">
- <li class="name">
- <h1 class="show-for-small-only"><a href="http://gudhi.gforge.inria.fr" class="icon-tree"> GUDHI C++ library</a></h1>
- </li>
- <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
- <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
- </ul>
- <section class="top-bar-section">
- <ul class="right">
- <li class="divider"></li>
- <li><a href="http://gudhi.gforge.inria.fr/contact/">Contact</a></li>
- </ul>
- <ul class="left">
- <li><a href="http://gudhi.gforge.inria.fr/"> <img src="http://gudhi.gforge.inria.fr/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
- <li class="divider"></li>
- <li class="has-dropdown">
- <a href="#">Project</a>
- <ul class="dropdown">
- <li><a href="http://gudhi.gforge.inria.fr/people/">People</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/keepintouch/">Keep in touch</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/partners/">Partners and Funding</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/relatedprojects/">Related projects</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/theyaretalkingaboutus/">They are talking about us</a></li>
- </ul>
- </li>
- <li class="divider"></li>
- <li class="has-dropdown">
- <a href="#">Download</a>
- <ul class="dropdown">
- <li><a href="http://gudhi.gforge.inria.fr/licensing/">Licensing</a></li>
- <li><a href="https://gforge.inria.fr/frs/?group_id=3865" target="_blank">Get the sources</a></li>
- <li><a href="https://gforge.inria.fr/frs/download.php/file/37365/2018-02-01-16-59-31_GUDHI_2.1.0_OSX_UTILS.tar.gz" target="_blank">Utils for Mac OSx</a></li>
- <li><a href="https://gforge.inria.fr/frs/download.php/file/37366/2018-01-31-09-25-53_GUDHI_2.1.0_WIN64_UTILS.zip" target="_blank">Utils for Win x64</a></li>
- </ul>
- </li>
- <li class="divider"></li>
- <li class="has-dropdown">
- <a href="#">Documentation</a>
- <ul class="dropdown">
- <li><a href="http://gudhi.gforge.inria.fr/doc/latest/">C++ documentation</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/python/latest/">Python documentation</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/python/latest/installation.html">Python installation manual</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/utils/">Utilities</a></li>
- <li><a href="http://bertrand.michel.perso.math.cnrs.fr/Enseignements/TDA-Gudhi-Python.html" target="_blank">Tutorial</a></li>
- </ul>
- </li>
- <li class="divider"></li>
- <li><a href="http://gudhi.gforge.inria.fr/interfaces/">Interfaces</a></li>
- <li class="divider"></li>
- </ul>
- </section>
- </nav>
+ <nav class="top-bar" role="navigation" data-topbar>
+ <ul class="title-area">
+ <li class="name">
+ <h1 class="show-for-small-only"><a href="" class="icon-tree"> GUDHI C++ library</a></h1>
+ </li>
+ <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
+ <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
+ </ul>
+ <section class="top-bar-section">
+ <ul class="right">
+ <li class="divider"></li>
+ <li><a href="/contact/">Contact</a></li>
+ </ul>
+ <ul class="left">
+ <li><a href="/"> <img src="/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Project</a>
+ <ul class="dropdown">
+ <li><a href="/people/">People</a></li>
+ <li><a href="/keepintouch/">Keep in touch</a></li>
+ <li><a href="/partners/">Partners and Funding</a></li>
+ <li><a href="/relatedprojects/">Related projects</a></li>
+ <li><a href="/theyaretalkingaboutus/">They are talking about us</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Download</a>
+ <ul class="dropdown">
+ <li><a href="/licensing/">Licensing</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5253/library-latest.zip" target="_blank">Get the latest sources</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5280/utils_osx-latest.zip" target="_blank">Utils for Mac OSx</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5279/utils_win64-latest.zip" target="_blank">Utils for Win x64</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Documentation</a>
+ <ul class="dropdown">
+ <li><a href="/doc/latest/">C++ documentation</a></li>
+ <li><a href="/doc/latest/installation.html">C++ installation manual</a></li>
+ <li><a href="/python/latest/">Python documentation</a></li>
+ <li><a href="/python/latest/installation.html">Python installation manual</a></li>
+ <li><a href="/utils/">Utilities</a></li>
+ <li><a href="/tutorials/">Tutorials</a></li>
+ <li><a href="/dockerfile/">Dockerfile</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li><a href="/interfaces/">Interfaces</a></li>
+ <li class="divider"></li>
+ </ul>
+ </section>
+ </nav>
</div><!-- /#navigation -->
<!-- GUDHI website header BEGIN -->
diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h
index 25675cc5..c27e4f56 100644
--- a/src/common/doc/installation.h
+++ b/src/common/doc/installation.h
@@ -2,21 +2,34 @@
* \tableofcontents
* As GUDHI is a header only library, there is no need to install the library.
*
- * Examples of GUDHI headers inclusion can be found in \ref demos.
+ * Examples of GUDHI headers inclusion can be found in \ref utilities.
*
* \section compiling Compiling
- * The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> with version 1.48.0 or
- * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015.
+ * The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> &ge; 1.48.0
+ * and <a target="_blank" href="https://www.cmake.org/">CMake</a> &ge; 3.1.
+ * It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015.
*
- * \subsection demos Demos and examples
- * To build the demos and examples, run the following commands in a terminal:
+ * \subsection utilities Utilities and examples
+ * To build the utilities, run the following commands in a terminal:
\verbatim cd /path-to-gudhi/
mkdir build
cd build/
cmake ..
make \endverbatim
- * A list of examples is available <a href="examples.html">here</a>.
+ * By default, examples are disabled. You can activate their compilation with
+ * <a href="https://cmake.org/cmake/help/v3.0/manual/ccmake.1.html">ccmake</a> (on Linux and Mac OSX),
+ * <a href="https://cmake.org/cmake/help/v3.0/manual/cmake-gui.1.html">cmake-gui</a> (on Windows) or y mofifying the
+ * cmake command as follows :
+\verbatim cmake -DWITH_GUDHI_EXAMPLE=ON ..
+make \endverbatim
+ * A list of utilities and examples is available <a href="examples.html">here</a>.
*
+ * \subsection libraryinstallation Installation
+ * To install the library (headers and activated utilities), run the following command in a terminal:
+ * \verbatim make install \endverbatim
+ * This action may require to be in the sudoer or administrator of the machine in function of the operating system and
+ * of <a href="https://cmake.org/cmake/help/v3.0/variable/CMAKE_INSTALL_PREFIX.html">CMAKE_INSTALL_PREFIX</a>.
+ *
* \subsection testsuites Test suites
* To test your build, run the following command in a terminal:
* \verbatim make test \endverbatim
@@ -30,6 +43,10 @@ make doxygen
# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder`
\endverbatim
*
+ * \subsection helloworld Hello world !
+ * The <a target="_blank" href="https://gitlab.inria.fr/GUDHI/hello-gudhi-world">Hello world for GUDHI</a>
+ * project is an example to help developers to make their own C++ project on top of the GUDHI library.
+ *
* \section optionallibrary Optional third-party library
* \subsection gmp GMP
* The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index b3e9ea03..db1e80ce 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -42,6 +42,22 @@
</td>
</tr>
</table>
+ \subsection CechComplexDataStructure ÄŒech complex
+ \image html "cech_complex_representation.png" "ÄŒech complex representation"
+<table border="0">
+ <tr>
+ <td width="25%">
+ <b>Author:</b> Vincent Rouvreau<br>
+ <b>Introduced in:</b> GUDHI 2.2.0<br>
+ <b>Copyright:</b> GPL v3<br>
+ </td>
+ <td width="75%">
+ The ÄŒech complex is a simplicial complex constructed from a proximity graph.<br>
+ The set of all simplices is filtered by the radius of their minimal enclosing ball.<br>
+ <b>User manual:</b> \ref cech_complex - <b>Reference manual:</b> Gudhi::cech_complex::Cech_complex
+ </td>
+ </tr>
+</table>
\subsection CubicalComplexDataStructure Cubical complex
\image html "Cubical_complex_representation.png" "Cubical complex representation"
<table border="0">
@@ -57,12 +73,13 @@
<b>User manual:</b> \ref cubical_complex - <b>Reference manual:</b> Gudhi::cubical_complex::Bitmap_cubical_complex
</td>
</tr>
+</table>
\subsection RipsComplexDataStructure Rips complex
\image html "rips_complex_representation.png" "Rips complex representation"
<table border="0">
<tr>
<td width="25%">
- <b>Author:</b> Cl&eacute;ment Maria, Pawel Dlotko, Vincent Rouvreau<br>
+ <b>Author:</b> Cl&eacute;ment Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse<br>
<b>Introduced in:</b> GUDHI 2.0.0<br>
<b>Copyright:</b> GPL v3<br>
</td>
@@ -75,7 +92,6 @@
</td>
</tr>
</table>
-</table>
\subsection SimplexTreeDataStructure Simplex tree
\image html "Simplex_tree_representation.png" "Simplex tree representation"
<table border="0">
diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt
index 1273c699..04015cdc 100644
--- a/src/common/example/CMakeLists.txt
+++ b/src/common/example/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Common_examples)
add_executable ( vector_double_off_reader example_vector_double_points_off_reader.cpp )
diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h
index 024f0568..05a1e145 100644
--- a/src/common/include/gudhi/Off_reader.h
+++ b/src/common/include/gudhi/Off_reader.h
@@ -137,7 +137,7 @@ class Off_reader {
if (!std::ifstream::sentry(stream_)) return false;
std::getline(stream_, uncomment_line);
} while (uncomment_line[0] == '#');
- return (bool)stream_;
+ return static_cast<bool>(stream_);
}
template<typename OffVisitor>
diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h
index f7baed6f..5ef12f2e 100644
--- a/src/common/include/gudhi/distance_functions.h
+++ b/src/common/include/gudhi/distance_functions.h
@@ -25,7 +25,10 @@
#include <gudhi/Debug_utils.h>
+#include <gudhi/Miniball.hpp>
+
#include <boost/range/metafunctions.hpp>
+#include <boost/range/size.hpp>
#include <cmath> // for std::sqrt
#include <type_traits> // for std::decay
@@ -68,6 +71,53 @@ class Euclidean_distance {
}
};
+/** @brief Compute the radius of the minimal enclosing ball between Points given by a range of coordinates.
+ * The points are assumed to have the same dimension. */
+class Minimal_enclosing_ball_radius {
+ public:
+ /** \brief Minimal_enclosing_ball_radius from two points.
+ *
+ * @param[in] point_1 First point.
+ * @param[in] point_2 second point.
+ * @return The minimal enclosing ball radius for the two points (aka. Euclidean distance / 2.).
+ *
+ * \tparam Point must be a range of Cartesian coordinates.
+ *
+ */
+ template< typename Point >
+ typename std::iterator_traits<typename boost::range_iterator<Point>::type>::value_type
+ operator()(const Point& point_1, const Point& point_2) const {
+ return Euclidean_distance()(point_1, point_2) / 2.;
+ }
+ /** \brief Minimal_enclosing_ball_radius from a point cloud.
+ *
+ * @param[in] point_cloud The points.
+ * @return The minimal enclosing ball radius for the points.
+ *
+ * \tparam Point_cloud must be a range of points with Cartesian coordinates.
+ * Point_cloud is a range over a range of Coordinate.
+ *
+ */
+ template< typename Point_cloud,
+ typename Point_iterator = typename boost::range_const_iterator<Point_cloud>::type,
+ typename Point = typename std::iterator_traits<Point_iterator>::value_type,
+ typename Coordinate_iterator = typename boost::range_const_iterator<Point>::type,
+ typename Coordinate = typename std::iterator_traits<Coordinate_iterator>::value_type>
+ Coordinate
+ operator()(const Point_cloud& point_cloud) const {
+ using Min_sphere = Miniball::Miniball<Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
+
+ Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end());
+#ifdef DEBUG_TRACES
+ std::cout << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = "
+ << boost::size(point_cloud) << " | dimension = "
+ << boost::size(*point_cloud.begin()) << std::endl;
+#endif // DEBUG_TRACES
+
+ return std::sqrt(ms.squared_radius());
+ }
+};
+
} // namespace Gudhi
#endif // DISTANCE_FUNCTIONS_H_
diff --git a/src/common/include/gudhi/graph_simplicial_complex.h b/src/common/include/gudhi/graph_simplicial_complex.h
index 6ab7b0b4..49fe56cc 100644
--- a/src/common/include/gudhi/graph_simplicial_complex.h
+++ b/src/common/include/gudhi/graph_simplicial_complex.h
@@ -42,6 +42,12 @@ struct vertex_filtration_t {
typedef boost::vertex_property_tag kind;
};
+/** \brief Proximity_graph contains the vertices and edges with their filtration values in order to store the result
+ * of `Gudhi::compute_proximity_graph` function.
+ *
+ * \tparam SimplicialComplexForProximityGraph furnishes `Filtration_value` type definition.
+ *
+ */
template <typename SimplicialComplexForProximityGraph>
using Proximity_graph = typename boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
, boost::property < vertex_filtration_t, typename SimplicialComplexForProximityGraph::Filtration_value >
diff --git a/src/common/include/gudhi/random_point_generators.h b/src/common/include/gudhi/random_point_generators.h
index 1f8f2cd8..f8107c8b 100644
--- a/src/common/include/gudhi/random_point_generators.h
+++ b/src/common/include/gudhi/random_point_generators.h
@@ -190,7 +190,8 @@ template <typename Kernel, typename OutputIterator>
static void generate_uniform_points_on_torus_d(const Kernel &k, int dim, std::size_t num_slices,
OutputIterator out,
double radius_noise_percentage = 0.,
- std::vector<typename Kernel::FT> current_point = std::vector<typename Kernel::FT>()) {
+ std::vector<typename Kernel::FT> current_point =
+ std::vector<typename Kernel::FT>()) {
CGAL::Random rng;
int point_size = static_cast<int>(current_point.size());
if (point_size == 2 * dim) {
diff --git a/src/common/include/gudhi/writing_persistence_to_file.h b/src/common/include/gudhi/writing_persistence_to_file.h
index 4c5ce918..34448576 100644
--- a/src/common/include/gudhi/writing_persistence_to_file.h
+++ b/src/common/include/gudhi/writing_persistence_to_file.h
@@ -20,8 +20,8 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#ifndef WRITING_PERSISTENCE_TO_FILE_H
-#define WRITING_PERSISTENCE_TO_FILE_H
+#ifndef WRITING_PERSISTENCE_TO_FILE_H_
+#define WRITING_PERSISTENCE_TO_FILE_H_
#include <iostream>
#include <string>
@@ -112,6 +112,6 @@ void write_persistence_intervals_to_stream(const Persistence_interval_range& int
}
}
-}
+} // namespace Gudhi
-#endif // WRITING_PERSISTENCE_TO_FILE_H
+#endif // WRITING_PERSISTENCE_TO_FILE_H_
diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt
index de3e765a..0b49fa1e 100644
--- a/src/common/test/CMakeLists.txt
+++ b/src/common/test/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(Common_tests)
include(GUDHI_test_coverage)
diff --git a/src/common/utilities/CMakeLists.txt b/src/common/utilities/CMakeLists.txt
index b3e4b436..7f1d1cd7 100644
--- a/src/common/utilities/CMakeLists.txt
+++ b/src/common/utilities/CMakeLists.txt
@@ -1,4 +1,3 @@
-cmake_minimum_required(VERSION 2.6)
project(off_file_from_shape_generator)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt
index b19cc550..1849a6ec 100644
--- a/src/cython/CMakeLists.txt
+++ b/src/cython/CMakeLists.txt
@@ -1,8 +1,5 @@
-cmake_minimum_required(VERSION 2.8)
project(Cython)
-include(CheckCXXSourceCompiles)
-
function( add_gudhi_cython_lib THE_LIB )
if(EXISTS ${THE_LIB})
get_filename_component(THE_LIB_FILE_NAME ${THE_LIB} NAME_WE)
@@ -19,17 +16,51 @@ endfunction( add_gudhi_cython_lib )
# THE_TEST is the python test file name (without .py extension) containing tests functions
function( add_gudhi_py_test THE_TEST )
- # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
- # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
- add_test(NAME ${THE_TEST}_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+ if(PYTEST_FOUND)
+ # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
+ # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
+ add_test(NAME ${THE_TEST}_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+ endif()
endfunction( add_gudhi_py_test )
+# Set gudhi.__debug_info__
+# WARNING : to be done before gudhi.pyx.in configure_file
+function( add_gudhi_debug_info DEBUG_INFO )
+ set(GUDHI_CYTHON_DEBUG_INFO "${GUDHI_CYTHON_DEBUG_INFO} \"${DEBUG_INFO}\\n\" \\\n" PARENT_SCOPE)
+endfunction( add_gudhi_debug_info )
+
if(CYTHON_FOUND)
- message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_EXECUTABLE} - Sphinx is ${SPHINX_PATH}")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}off_reader;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}simplex_tree;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}rips_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}cubical_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}periodic_cubical_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}reader_utils;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}witness_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}strong_witness_complex;")
+
+ add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}")
+ add_gudhi_debug_info("Cython version ${CYTHON_VERSION}")
+ if(PYTEST_FOUND)
+ add_gudhi_debug_info("Pytest version ${PYTEST_VERSION}")
+ endif()
+ if(MATPLOTLIB_FOUND)
+ add_gudhi_debug_info("Matplotlib version ${MATPLOTLIB_VERSION}")
+ endif()
+ if(NUMPY_FOUND)
+ add_gudhi_debug_info("Numpy version ${NUMPY_VERSION}")
+ endif()
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;")
+ endif()
+ message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_VERSION} - Sphinx is ${SPHINX_PATH}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_SYSTEM_NO_DEPRECATED', ")
@@ -52,60 +83,63 @@ if(CYTHON_FOUND)
endif()
if (EIGEN3_FOUND)
+ add_gudhi_debug_info("Eigen3 version ${EIGEN3_VERSION}")
# No problem, even if no CGAL found
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
endif (EIGEN3_FOUND)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'")
- endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}bottleneck_distance;")
+ set(GUDHI_CYTHON_NERVE_GIC "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/nerve_gic.pyx'")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}nerve_gic;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}bottleneck_distance;")
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}nerve_gic;")
+ endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}subsampling;")
set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'")
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}tangential_complex;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}subsampling;")
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}tangential_complex;")
+ endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'")
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}alpha_complex;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}alpha_complex;")
+ endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX
"include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n")
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_witness_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_strong_witness_complex;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}euclidean_witness_complex;")
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}euclidean_strong_witness_complex;")
+ endif ()
+
+ add_gudhi_debug_info("Installed modules are: ${GUDHI_CYTHON_MODULES}")
+ if(GUDHI_CYTHON_MISSING_MODULES)
+ add_gudhi_debug_info("Missing modules are: ${GUDHI_CYTHON_MISSING_MODULES}")
+ endif()
if(CGAL_FOUND)
- # This is because of https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/tss.h
- # CGAL is using boost thread if thread_local is not ready (requires XCode 8 for Mac).
- # The test in https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/config.h
- # #if __has_feature(cxx_thread_local) || \
- # ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
- # ( _MSC_VER >= 1900 )
- # #define CGAL_CAN_USE_CXX11_THREAD_LOCAL
- # #endif
- set(CGAL_CAN_USE_CXX11_THREAD_LOCAL "
- int main() {
- #ifndef __has_feature
- #define __has_feature(x) 0 // Compatibility with non-clang compilers.
- #endif
- #if __has_feature(cxx_thread_local) || \
- ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
- ( _MSC_VER >= 1900 )
- bool has_feature_thread_local = true;
- #else
- // Explicit error of compilation for CMake test purpose - has_feature_thread_local is not defined
- #endif
- bool result = has_feature_thread_local;
- } ")
- check_cxx_source_compiles("${CGAL_CAN_USE_CXX11_THREAD_LOCAL}" CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
-
+ can_cgal_use_cxx11_thread_local()
if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
add_gudhi_cython_lib(${Boost_THREAD_LIBRARY})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
endif()
-
# Add CGAL compilation args
if(CGAL_HEADER_ONLY)
+ add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ")
else(CGAL_HEADER_ONLY)
- add_gudhi_cython_lib(${CGAL_LIBRARIES})
+ add_gudhi_debug_info("CGAL version ${CGAL_VERSION}")
+ add_gudhi_cython_lib(${CGAL_LIBRARY})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ")
# If CGAL is not header only, CGAL library may link with boost system,
add_gudhi_cython_lib(${Boost_SYSTEM_LIBRARY})
@@ -113,10 +147,12 @@ if(CYTHON_FOUND)
endif(CGAL_HEADER_ONLY)
# GMP and GMPXX are not required, but if present, CGAL will link with them.
if(GMP_FOUND)
+ add_gudhi_debug_info("GMP_LIBRARIES = ${GMP_LIBRARIES}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ")
add_gudhi_cython_lib(${GMP_LIBRARIES})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ")
if(GMPXX_FOUND)
+ add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
add_gudhi_cython_lib(${GMPXX_LIBRARIES})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ")
@@ -138,6 +174,7 @@ if(CYTHON_FOUND)
set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${CMAKE_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/include', ")
if (TBB_FOUND AND WITH_GUDHI_USE_TBB)
+ add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
add_gudhi_cython_lib(${TBB_RELEASE_LIBRARY})
add_gudhi_cython_lib(${TBB_MALLOC_RELEASE_LIBRARY})
@@ -178,40 +215,73 @@ if(CYTHON_FOUND)
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
-f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
- # Tangential
- add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ # Tangential
+ add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
+ --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
+
+ add_gudhi_py_test(test_tangential_complex)
+
+ # Witness complex AND Subsampling
+ add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+
+ add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ endif()
+
+ # Subsampling
+ add_gudhi_py_test(test_subsampling)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ # Bottleneck
+ add_test(NAME bottleneck_basic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
- add_gudhi_py_test(test_tangential_complex)
+ add_gudhi_py_test(test_bottleneck_distance)
- # Witness complex AND Subsampling
- add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ # Cover complex
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ add_test(NAME cover_complex_nerve_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py"
+ -f human.off -c 2 -r 10 -g 0.3)
- add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ add_test(NAME cover_complex_coordinate_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py"
+ -f human.off -c 0 -v)
- # Subsampling
- add_gudhi_py_test(test_subsampling)
+ add_test(NAME cover_complex_functional_gic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py"
+ -o lucky_cat.off
+ -f lucky_cat_PCA1 -v)
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
- if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- # Bottleneck
- add_test(NAME bottleneck_basic_example_py_test
+ add_test(NAME cover_complex_voronoi_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py"
+ -f human.off -n 700 -v)
- add_gudhi_py_test(test_bottleneck_distance)
+ add_gudhi_py_test(test_cover_complex)
endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
@@ -221,11 +291,13 @@ if(CYTHON_FOUND)
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
- add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
+ endif()
add_gudhi_py_test(test_alpha_complex)
@@ -244,26 +316,30 @@ if(CYTHON_FOUND)
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
--no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt)
- add_test(NAME random_cubical_complex_persistence_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
- 10 10 10)
+ if(NUMPY_FOUND)
+ add_test(NAME random_cubical_complex_persistence_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
+ 10 10 10)
+ endif()
add_gudhi_py_test(test_cubical_complex)
# Rips
- add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
-
- add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
+
+ add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
+ endif()
add_test(NAME rips_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
@@ -292,7 +368,7 @@ if(CYTHON_FOUND)
add_gudhi_py_test(test_reader_utils)
# Documentation generation is available through sphinx - requires all modules
- if(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if(SPHINX_PATH AND MATPLOTLIB_FOUND AND NUMPY_FOUND AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
# User warning - Sphinx is a static pages generator, and configured to work fine with user_version
# Images and biblio warnings because not found on developper version
@@ -312,5 +388,5 @@ if(CYTHON_FOUND)
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
- endif(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ endif()
endif(CYTHON_FOUND)
diff --git a/src/cython/cython/cubical_complex.pyx b/src/cython/cython/cubical_complex.pyx
index a98a3ec3..e94cd539 100644
--- a/src/cython/cython/cubical_complex.pyx
+++ b/src/cython/cython/cubical_complex.pyx
@@ -104,22 +104,21 @@ cdef class CubicalComplex:
return self.pcohptr != NULL
def num_simplices(self):
- """This function returns the number of simplices of the simplicial
- complex.
+ """This function returns the number of all cubes in the complex.
- :returns: int -- the simplicial complex number of simplices.
+ :returns: int -- the number of all cubes in the complex.
"""
return self.thisptr.num_simplices()
def dimension(self):
- """This function returns the dimension of the simplicial complex.
+ """This function returns the dimension of the complex.
- :returns: int -- the simplicial complex dimension.
+ :returns: int -- the complex dimension.
"""
return self.thisptr.dimension()
def persistence(self, homology_coeff_field=11, min_persistence=0):
- """This function returns the persistence of the simplicial complex.
+ """This function returns the persistence of the complex.
:param homology_coeff_field: The homology coefficient field. Must be a
prime number
@@ -130,7 +129,7 @@ cdef class CubicalComplex:
Sets min_persistence to -1.0 to see all values.
:type min_persistence: float.
:returns: list of pairs(dimension, pair(birth, death)) -- the
- persistence of the simplicial complex.
+ persistence of the complex.
"""
if self.pcohptr != NULL:
del self.pcohptr
@@ -142,12 +141,15 @@ cdef class CubicalComplex:
return persistence_result
def betti_numbers(self):
- """This function returns the Betti numbers of the simplicial complex.
+ """This function returns the Betti numbers of the complex.
:returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
:note: betti_numbers function requires persistence function to be
launched first.
+
+ :note: betti_numbers function always returns [1, 0, 0, ...] as infinity
+ filtration cubes are not removed from the complex.
"""
cdef vector[int] bn_result
if self.pcohptr != NULL:
@@ -155,8 +157,7 @@ cdef class CubicalComplex:
return bn_result
def persistent_betti_numbers(self, from_value, to_value):
- """This function returns the persistent Betti numbers of the
- simplicial complex.
+ """This function returns the persistent Betti numbers of the complex.
:param from_value: The persistence birth limit to be added in the
numbers (persistent birth <= from_value).
@@ -177,8 +178,8 @@ cdef class CubicalComplex:
return pbn_result
def persistence_intervals_in_dimension(self, dimension):
- """This function returns the persistence intervals of the simplicial
- complex in a specific dimension.
+ """This function returns the persistence intervals of the complex in a
+ specific dimension.
:param dimension: The specific dimension.
:type from_value: int.
diff --git a/src/cython/cython/nerve_gic.pyx b/src/cython/cython/nerve_gic.pyx
new file mode 100644
index 00000000..01dd0a4b
--- /dev/null
+++ b/src/cython/cython/nerve_gic.pyx
@@ -0,0 +1,410 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Nerve_gic_interface.h" namespace "Gudhi":
+ cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface":
+ Nerve_gic_interface()
+ double compute_confidence_level_from_distance(double distance)
+ double compute_distance_from_confidence_level(double alpha)
+ void compute_distribution(int N)
+ double compute_p_value()
+ vector[pair[double, double]] compute_PD()
+ void find_simplices()
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree)
+ bool read_point_cloud(string off_file_name)
+ double set_automatic_resolution()
+ void set_color_from_coordinate(int k)
+ void set_color_from_file(string color_file_name)
+ void set_color_from_range(vector[double] color)
+ void set_cover_from_file(string cover_file_name)
+ void set_cover_from_function()
+ void set_cover_from_Euclidean_Voronoi(int m)
+ void set_function_from_coordinate(int k)
+ void set_function_from_file(string func_file_name)
+ void set_function_from_range(vector[double] function)
+ void set_gain(double g)
+ double set_graph_from_automatic_euclidean_rips(int N)
+ void set_graph_from_file(string graph_file_name)
+ void set_graph_from_OFF()
+ void set_graph_from_euclidean_rips(double threshold)
+ void set_mask(int nodemask)
+ void set_resolution_with_interval_length(double resolution)
+ void set_resolution_with_interval_number(int resolution)
+ void set_subsampling(double constant, double power)
+ void set_type(string type)
+ void set_verbose(bool verbose)
+ vector[int] subpopulation(int c)
+ void write_info()
+ void plot_DOT()
+ void plot_OFF()
+ void set_point_cloud_from_range(vector[vector[double]] cloud)
+
+# CoverComplex python interface
+cdef class CoverComplex:
+ """Cover complex data structure.
+
+ The data structure is a simplicial complex, representing a Graph Induced
+ simplicial Complex (GIC) or a Nerve, and whose simplices are computed with
+ a cover C of a point cloud P, which often comes from the preimages of
+ intervals covering the image of a function f defined on P. These intervals
+ are parameterized by their resolution (either their length or their number)
+ and their gain (percentage of overlap). To compute a GIC, one also needs a
+ graph G built on top of P, whose cliques with vertices belonging to
+ different elements of C correspond to the simplices of the GIC.
+ """
+
+ cdef Nerve_gic_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self):
+ """CoverComplex constructor.
+ """
+
+ # The real cython constructor
+ def __cinit__(self):
+ self.thisptr = new Nerve_gic_interface()
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if CoverComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def set_point_cloud_from_range(self, cloud):
+ """ Reads and stores the input point cloud from a vector stored in memory.
+
+ :param cloud: Input vector containing the point cloud.
+ :type cloud: vector[vector[double]]
+ """
+ return self.thisptr.set_point_cloud_from_range(cloud)
+
+ def compute_confidence_level_from_distance(self, distance):
+ """Computes the confidence level of a specific bottleneck distance
+ threshold.
+
+ :param distance: Bottleneck distance.
+ :type distance: double
+ :rtype: double
+ :returns: Confidence level.
+ """
+ return self.thisptr.compute_confidence_level_from_distance(distance)
+
+ def compute_distance_from_confidence_level(self, alpha):
+ """Computes the bottleneck distance threshold corresponding to a
+ specific confidence level.
+
+ :param alpha: Confidence level.
+ :type alpha: double
+ :rtype: double
+ :returns: Bottleneck distance.
+ """
+ return self.thisptr.compute_distance_from_confidence_level(alpha)
+
+ def compute_distribution(self, N=100):
+ """Computes bootstrapped distances distribution.
+
+ :param N: Loop number (default value is 100).
+ :type alpha: int
+ """
+ self.thisptr.compute_distribution(N)
+
+ def compute_p_value(self):
+ """Computes the p-value, i.e. the opposite of the confidence level of
+ the largest bottleneck distance preserving the points in the
+ persistence diagram of the output simplicial complex.
+
+ :rtype: double
+ :returns: p-value.
+ """
+ return self.thisptr.compute_p_value()
+
+ def compute_PD(self):
+ """Computes the extended persistence diagram of the complex.
+ """
+ return self.thisptr.compute_PD()
+
+ def create_simplex_tree(self):
+ """
+ :returns: A simplex tree created from the Cover complex.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr)
+ return simplex_tree
+
+ def find_simplices(self):
+ """Computes the simplices of the simplicial complex.
+ """
+ self.thisptr.find_simplices()
+
+ def read_point_cloud(self, off_file):
+ """Reads and stores the input point cloud from .(n)OFF file.
+
+ :param off_file: Name of the input .OFF or .nOFF file.
+ :type off_file: string
+ :rtype: bool
+ :returns: Read file status.
+ """
+ if os.path.isfile(off_file):
+ return self.thisptr.read_point_cloud(str.encode(off_file))
+ else:
+ print("file " + off_file + " not found.")
+ return False
+
+ def set_automatic_resolution(self):
+ """Computes the optimal length of intervals (i.e. the smallest interval
+ length avoiding discretization artifacts—see :cite:`Carriere17c`) for a
+ functional cover.
+
+ :rtype: double
+ :returns: reso interval length used to compute the cover.
+ """
+ return self.thisptr.set_automatic_resolution()
+
+ def set_color_from_coordinate(self, k=0):
+ """Computes the function used to color the nodes of the simplicial
+ complex from the k-th coordinate.
+
+ :param k: Coordinate to use (start at 0). Default value is 0.
+ :type k: int
+ """
+ return self.thisptr.set_color_from_coordinate(k)
+
+ def set_color_from_file(self, color_file_name):
+ """Computes the function used to color the nodes of the simplicial
+ complex from a file containing the function values.
+
+ :param color_file_name: Name of the input color file.
+ :type color_file_name: string
+ """
+ if os.path.isfile(color_file_name):
+ self.thisptr.set_color_from_file(str.encode(color_file_name))
+ else:
+ print("file " + color_file_name + " not found.")
+
+ def set_color_from_range(self, color):
+ """Computes the function used to color the nodes of the simplicial
+ complex from a vector stored in memory.
+
+ :param color: Input vector of values.
+ :type color: vector[double]
+ """
+ self.thisptr.set_color_from_range(color)
+
+ def set_cover_from_file(self, cover_file_name):
+ """Creates the cover C from a file containing the cover elements of
+ each point (the order has to be the same as in the input file!).
+
+ :param cover_file_name: Name of the input cover file.
+ :type cover_file_name: string
+ """
+ if os.path.isfile(cover_file_name):
+ self.thisptr.set_cover_from_file(str.encode(cover_file_name))
+ else:
+ print("file " + cover_file_name + " not found.")
+
+ def set_cover_from_function(self):
+ """Creates a cover C from the preimages of the function f.
+ """
+ self.thisptr.set_cover_from_function()
+
+ def set_cover_from_Voronoi(self, m=100):
+ """Creates the cover C from the Voronoï cells of a subsampling of the
+ point cloud.
+
+ :param m: Number of points in the subsample. Default value is 100.
+ :type m: int
+ """
+ self.thisptr.set_cover_from_Euclidean_Voronoi(m)
+
+ def set_function_from_coordinate(self, k):
+ """Creates the function f from the k-th coordinate of the point cloud.
+
+ :param k: Coordinate to use (start at 0).
+ :type k: int
+ """
+ self.thisptr.set_function_from_coordinate(k)
+
+ def set_function_from_file(self, func_file_name):
+ """Creates the function f from a file containing the function values.
+
+ :param func_file_name: Name of the input function file.
+ :type func_file_name: string
+ """
+ if os.path.isfile(func_file_name):
+ self.thisptr.set_function_from_file(str.encode(func_file_name))
+ else:
+ print("file " + func_file_name + " not found.")
+
+ def set_function_from_range(self, function):
+ """Creates the function f from a vector stored in memory.
+
+ :param function: Input vector of values.
+ :type function: vector[double]
+ """
+ self.thisptr.set_function_from_range(function)
+
+ def set_gain(self, g = 0.3):
+ """Sets a gain from a value stored in memory.
+
+ :param g: Gain (default value is 0.3).
+ :type g: double
+ """
+ self.thisptr.set_gain(g)
+
+ def set_graph_from_automatic_rips(self, N=100):
+ """Creates a graph G from a Rips complex whose threshold value is
+ automatically tuned with subsampling—see.
+
+ :param N: Number of subsampling iteration (the default reasonable value
+ is 100, but there is no guarantee on how to choose it).
+ :type N: int
+ :rtype: double
+ :returns: Delta threshold used for computing the Rips complex.
+ """
+ return self.thisptr.set_graph_from_automatic_euclidean_rips(N)
+
+ def set_graph_from_file(self, graph_file_name):
+ """Creates a graph G from a file containing the edges.
+
+ :param graph_file_name: Name of the input graph file. The graph file
+ contains one edge per line, each edge being represented by the IDs of
+ its two nodes.
+ :type graph_file_name: string
+ """
+ if os.path.isfile(graph_file_name):
+ self.thisptr.set_graph_from_file(str.encode(graph_file_name))
+ else:
+ print("file " + graph_file_name + " not found.")
+
+ def set_graph_from_OFF(self):
+ """Creates a graph G from the triangulation given by the input OFF
+ file.
+ """
+ self.thisptr.set_graph_from_OFF()
+
+ def set_graph_from_rips(self, threshold):
+ """Creates a graph G from a Rips complex.
+
+ :param threshold: Threshold value for the Rips complex.
+ :type threshold: double
+ """
+ self.thisptr.set_graph_from_euclidean_rips(threshold)
+
+ def set_mask(self, nodemask):
+ """Sets the mask, which is a threshold integer such that nodes in the
+ complex that contain a number of data points which is less than or
+ equal to this threshold are not displayed.
+
+ :param nodemask: Threshold.
+ :type nodemask: int
+ """
+ self.thisptr.set_mask(nodemask)
+
+ def set_resolution_with_interval_length(self, resolution):
+ """Sets a length of intervals from a value stored in memory.
+
+ :param resolution: Length of intervals.
+ :type resolution: double
+ """
+ self.thisptr.set_resolution_with_interval_length(resolution)
+
+ def set_resolution_with_interval_number(self, resolution):
+ """Sets a number of intervals from a value stored in memory.
+
+ :param resolution: Number of intervals.
+ :type resolution: int
+ """
+ self.thisptr.set_resolution_with_interval_number(resolution)
+
+ def set_subsampling(self, constant, power):
+ """Sets the constants used to subsample the data set. These constants
+ are explained in :cite:`Carriere17c`.
+
+ :param constant: Constant.
+ :type constant: double
+ :param power: Power.
+ :type resolution: double
+ """
+ self.thisptr.set_subsampling(constant, power)
+
+ def set_type(self, type):
+ """Specifies whether the type of the output simplicial complex.
+
+ :param type: either "GIC" or "Nerve".
+ :type type: string
+ """
+ self.thisptr.set_type(str.encode(type))
+
+ def set_verbose(self, verbose):
+ """Specifies whether the program should display information or not.
+
+ :param verbose: true = display info, false = do not display info.
+ :type verbose: boolean
+ """
+ self.thisptr.set_verbose(verbose)
+
+ def subpopulation(self, c):
+ """Returns the data subset corresponding to a specific node of the
+ created complex.
+
+ :param c: ID of the node.
+ :type c: int
+ :rtype: vector[int]
+ :returns: Vector of IDs of data points.
+ """
+ return self.thisptr.subpopulation(c)
+
+ def write_info(self):
+ """Creates a .txt file called SC.txt describing the 1-skeleton, which can
+ then be plotted with e.g. KeplerMapper.
+ """
+ return self.thisptr.write_info()
+
+ def plot_dot(self):
+ """Creates a .dot file called SC.dot for neato (part of the graphviz
+ package) once the simplicial complex is computed to get a visualization of
+ its 1-skeleton in a .pdf file.
+ """
+ return self.thisptr.plot_DOT()
+
+ def plot_off(self):
+ """Creates a .off file called SC.off for 3D visualization, which contains
+ the 2-skeleton of the GIC. This function assumes that the cover has been
+ computed with Voronoi. If data points are in 1D or 2D, the remaining
+ coordinates of the points embedded in 3D are set to 0.
+ """
+ return self.thisptr.plot_OFF()
diff --git a/src/cython/cython/periodic_cubical_complex.pyx b/src/cython/cython/periodic_cubical_complex.pyx
index c25b83e9..e626950b 100644
--- a/src/cython/cython/periodic_cubical_complex.pyx
+++ b/src/cython/cython/periodic_cubical_complex.pyx
@@ -106,22 +106,21 @@ cdef class PeriodicCubicalComplex:
return self.pcohptr != NULL
def num_simplices(self):
- """This function returns the number of simplices of the simplicial
- complex.
+ """This function returns the number of all cubes in the complex.
- :returns: int -- the simplicial complex number of simplices.
+ :returns: int -- the number of all cubes in the complex.
"""
return self.thisptr.num_simplices()
def dimension(self):
- """This function returns the dimension of the simplicial complex.
+ """This function returns the dimension of the complex.
- :returns: int -- the simplicial complex dimension.
+ :returns: int -- the complex dimension.
"""
return self.thisptr.dimension()
def persistence(self, homology_coeff_field=11, min_persistence=0):
- """This function returns the persistence of the simplicial complex.
+ """This function returns the persistence of the complex.
:param homology_coeff_field: The homology coefficient field. Must be a
prime number
@@ -132,7 +131,7 @@ cdef class PeriodicCubicalComplex:
Sets min_persistence to -1.0 to see all values.
:type min_persistence: float.
:returns: list of pairs(dimension, pair(birth, death)) -- the
- persistence of the simplicial complex.
+ persistence of the complex.
"""
if self.pcohptr != NULL:
del self.pcohptr
@@ -144,12 +143,15 @@ cdef class PeriodicCubicalComplex:
return persistence_result
def betti_numbers(self):
- """This function returns the Betti numbers of the simplicial complex.
+ """This function returns the Betti numbers of the complex.
:returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
:note: betti_numbers function requires persistence function to be
launched first.
+
+ :note: betti_numbers function always returns [1, 0, 0, ...] as infinity
+ filtration cubes are not removed from the complex.
"""
cdef vector[int] bn_result
if self.pcohptr != NULL:
@@ -157,8 +159,7 @@ cdef class PeriodicCubicalComplex:
return bn_result
def persistent_betti_numbers(self, from_value, to_value):
- """This function returns the persistent Betti numbers of the
- simplicial complex.
+ """This function returns the persistent Betti numbers of the complex.
:param from_value: The persistence birth limit to be added in the
numbers (persistent birth <= from_value).
@@ -179,8 +180,8 @@ cdef class PeriodicCubicalComplex:
return pbn_result
def persistence_intervals_in_dimension(self, dimension):
- """This function returns the persistence intervals of the simplicial
- complex in a specific dimension.
+ """This function returns the persistence intervals of the complex in a
+ specific dimension.
:param dimension: The specific dimension.
:type from_value: int.
diff --git a/src/cython/cython/persistence_graphical_tools.py b/src/cython/cython/persistence_graphical_tools.py
index e2405e96..314bd6db 100755
--- a/src/cython/cython/persistence_graphical_tools.py
+++ b/src/cython/cython/persistence_graphical_tools.py
@@ -1,7 +1,3 @@
-import matplotlib.pyplot as plt
-import numpy as np
-import os
-
"""This file is part of the Gudhi Library. The Gudhi library
(Geometric Understanding in Higher Dimensions) is a generic C++
library for computational topology.
@@ -28,175 +24,197 @@ __author__ = "Vincent Rouvreau, Bertrand Michel"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
-def __min_birth_max_death(persistence, band_boot=0.):
- """This function returns (min_birth, max_death) from the persistence.
-
- :param persistence: The persistence to plot.
- :type persistence: list of tuples(dimension, tuple(birth, death)).
- :param band_boot: bootstrap band
- :type band_boot: float.
- :returns: (float, float) -- (min_birth, max_death).
- """
- # Look for minimum birth date and maximum death date for plot optimisation
- max_death = 0
- min_birth = persistence[0][1][0]
- for interval in reversed(persistence):
- if float(interval[1][1]) != float('inf'):
- if float(interval[1][1]) > max_death:
- max_death = float(interval[1][1])
- if float(interval[1][0]) > max_death:
- max_death = float(interval[1][0])
- if float(interval[1][0]) < min_birth:
- min_birth = float(interval[1][0])
- if band_boot > 0.:
- max_death += band_boot
- return (min_birth, max_death)
+try:
+ import matplotlib.pyplot as plt
+ import matplotlib.patches as mpatches
+ import numpy as np
+ import os
+
+ def __min_birth_max_death(persistence, band=0.):
+ """This function returns (min_birth, max_death) from the persistence.
+
+ :param persistence: The persistence to plot.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param band: band
+ :type band: float.
+ :returns: (float, float) -- (min_birth, max_death).
+ """
+ # Look for minimum birth date and maximum death date for plot optimisation
+ max_death = 0
+ min_birth = persistence[0][1][0]
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ if float(interval[1][1]) > max_death:
+ max_death = float(interval[1][1])
+ if float(interval[1][0]) > max_death:
+ max_death = float(interval[1][0])
+ if float(interval[1][0]) < min_birth:
+ min_birth = float(interval[1][0])
+ if band > 0.:
+ max_death += band
+ return (min_birth, max_death)
-"""
-Only 13 colors for the palette
-"""
-palette = ['#ff0000', '#00ff00', '#0000ff', '#00ffff', '#ff00ff', '#ffff00',
- '#000000', '#880000', '#008800', '#000088', '#888800', '#880088',
- '#008888']
-
-def show_palette_values(alpha=0.6):
- """This function shows palette color values in function of the dimension.
-
- :param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
- :type alpha: float.
- :returns: plot the dimension palette values.
- """
- colors = []
- for color in palette:
- colors.append(color)
-
- y_pos = np.arange(len(palette))
-
- plt.barh(y_pos, y_pos + 1, align='center', alpha=alpha, color=colors)
- plt.ylabel('Dimension')
- plt.title('Dimension palette values')
- return plt
-
-def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, max_barcodes=0):
- """This function plots the persistence bar code.
-
- :param persistence: The persistence to plot.
- :type persistence: list of tuples(dimension, tuple(birth, death)).
- :param persistence_file: A persistence file style name (reset persistence if both are set).
- :type persistence_file: string
- :param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
- :type alpha: float.
- :param max_barcodes: number of maximal barcodes to be displayed
- (persistence will be sorted by life time if max_barcodes is set)
- :type max_barcodes: int.
- :returns: plot -- An horizontal bar plot of persistence.
"""
- if persistence_file is not '':
- if os.path.isfile(persistence_file):
- # Reset persistence
- persistence = []
- diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
- for key in diag.keys():
- for persistence_interval in diag[key]:
- persistence.append((key, persistence_interval))
- else:
- print("file " + persistence_file + " not found.")
- return None
-
- if max_barcodes > 0 and max_barcodes < len(persistence):
- # Sort by life time, then takes only the max_plots elements
- persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_barcodes]
-
- (min_birth, max_death) = __min_birth_max_death(persistence)
- ind = 0
- delta = ((max_death - min_birth) / 10.0)
- # Replace infinity values with max_death + delta for bar code to be more
- # readable
- infinity = max_death + delta
- axis_start = min_birth - delta
- # Draw horizontal bars in loop
- for interval in reversed(persistence):
- if float(interval[1][1]) != float('inf'):
- # Finite death case
- plt.barh(ind, (interval[1][1] - interval[1][0]), height=0.8,
- left = interval[1][0], alpha=alpha,
- color = palette[interval[0]])
- else:
- # Infinite death case for diagram to be nicer
- plt.barh(ind, (infinity - interval[1][0]), height=0.8,
- left = interval[1][0], alpha=alpha,
- color = palette[interval[0]])
- ind = ind + 1
-
- plt.title('Persistence barcode')
- # Ends plot on infinity value and starts a little bit before min_birth
- plt.axis([axis_start, infinity, 0, ind])
- return plt
-
-def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, band_boot=0., max_plots=0):
- """This function plots the persistence diagram with an optional confidence band.
-
- :param persistence: The persistence to plot.
- :type persistence: list of tuples(dimension, tuple(birth, death)).
- :param persistence_file: A persistence file style name (reset persistence if both are set).
- :type persistence_file: string
- :param alpha: alpha value in [0.0, 1.0] for points and horizontal infinity line (default is 0.6).
- :type alpha: float.
- :param band_boot: bootstrap band (not displayed if :math:`\leq` 0.)
- :type band_boot: float.
- :param max_plots: number of maximal plots to be displayed
- :type max_plots: int.
- :returns: plot -- A diagram plot of persistence.
+ Only 13 colors for the palette
"""
- if persistence_file is not '':
- if os.path.isfile(persistence_file):
- # Reset persistence
- persistence = []
- diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
- for key in diag.keys():
- for persistence_interval in diag[key]:
- persistence.append((key, persistence_interval))
- else:
- print("file " + persistence_file + " not found.")
- return None
-
- if max_plots > 0 and max_plots < len(persistence):
- # Sort by life time, then takes only the max_plots elements
- persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_plots]
-
- (min_birth, max_death) = __min_birth_max_death(persistence, band_boot)
- ind = 0
- delta = ((max_death - min_birth) / 10.0)
- # Replace infinity values with max_death + delta for diagram to be more
- # readable
- infinity = max_death + delta
- axis_start = min_birth - delta
-
- # line display of equation : birth = death
- x = np.linspace(axis_start, infinity, 1000)
- # infinity line and text
- plt.plot(x, x, color='k', linewidth=1.0)
- plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
- plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
- # bootstrap band
- if band_boot > 0.:
- plt.fill_between(x, x, x+band_boot, alpha=alpha, facecolor='red')
-
- # Draw points in loop
- for interval in reversed(persistence):
- if float(interval[1][1]) != float('inf'):
- # Finite death case
- plt.scatter(interval[1][0], interval[1][1], alpha=alpha,
- color = palette[interval[0]])
- else:
- # Infinite death case for diagram to be nicer
- plt.scatter(interval[1][0], infinity, alpha=alpha,
- color = palette[interval[0]])
- ind = ind + 1
-
- plt.title('Persistence diagram')
- plt.xlabel('Birth')
- plt.ylabel('Death')
- # Ends plot on infinity value and starts a little bit before min_birth
- plt.axis([axis_start, infinity, axis_start, infinity + delta])
- return plt
+ palette = ['#ff0000', '#00ff00', '#0000ff', '#00ffff', '#ff00ff', '#ffff00',
+ '#000000', '#880000', '#008800', '#000088', '#888800', '#880088',
+ '#008888']
+
+ def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6,
+ max_barcodes=1000, inf_delta=0.1, legend=False):
+ """This function plots the persistence bar code from persistence values list
+ or from a :doc:`persistence file <fileformats>`.
+
+ :param persistence: Persistence values list.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>` style name
+ (reset persistence if both are set).
+ :type persistence_file: string
+ :param alpha: barcode transparency value (0.0 transparent through 1.0 opaque - default is 0.6).
+ :type alpha: float.
+ :param max_barcodes: number of maximal barcodes to be displayed.
+ Set it to 0 to see all, Default value is 1000.
+ (persistence will be sorted by life time if max_barcodes is set)
+ :type max_barcodes: int.
+ :param inf_delta: Infinity is placed at ((max_death - min_birth) x inf_delta).
+ A reasonable value is between 0.05 and 0.5 - default is 0.1.
+ :type inf_delta: float.
+ :returns: A matplotlib object containing horizontal bar plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_barcodes > 0 and max_barcodes < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_barcodes]
+
+ persistence = sorted(persistence, key=lambda birth: birth[1][0])
+
+ (min_birth, max_death) = __min_birth_max_death(persistence)
+ ind = 0
+ delta = ((max_death - min_birth) * inf_delta)
+ # Replace infinity values with max_death + delta for bar code to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+ # Draw horizontal bars in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ # Finite death case
+ plt.barh(ind, (interval[1][1] - interval[1][0]), height=0.8,
+ left = interval[1][0], alpha=alpha,
+ color = palette[interval[0]],
+ linewidth=0)
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.barh(ind, (infinity - interval[1][0]), height=0.8,
+ left = interval[1][0], alpha=alpha,
+ color = palette[interval[0]],
+ linewidth=0)
+ ind = ind + 1
+
+ if legend:
+ dimensions = list(set(item[0] for item in persistence))
+ plt.legend(handles=[mpatches.Patch(color=palette[dim],
+ label=str(dim)) for dim in dimensions],
+ loc='lower right')
+ plt.title('Persistence barcode')
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, 0, ind])
+ return plt
+
+ def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6,
+ band=0., max_plots=1000, inf_delta=0.1, legend=False):
+ """This function plots the persistence diagram from persistence values list
+ or from a :doc:`persistence file <fileformats>`.
+
+ :param persistence: Persistence values list.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>` style name
+ (reset persistence if both are set).
+ :type persistence_file: string
+ :param alpha: plot transparency value (0.0 transparent through 1.0 opaque - default is 0.6).
+ :type alpha: float.
+ :param band: band (not displayed if :math:`\leq` 0. - default is 0.)
+ :type band: float.
+ :param max_plots: number of maximal plots to be displayed
+ Set it to 0 to see all, Default value is 1000.
+ (persistence will be sorted by life time if max_plots is set)
+ :type max_plots: int.
+ :param inf_delta: Infinity is placed at ((max_death - min_birth) x inf_delta).
+ A reasonable value is between 0.05 and 0.5 - default is 0.1.
+ :type inf_delta: float.
+ :returns: A matplotlib object containing diagram plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_plots > 0 and max_plots < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_plots]
+
+ (min_birth, max_death) = __min_birth_max_death(persistence, band)
+ ind = 0
+ delta = ((max_death - min_birth) * inf_delta)
+ # Replace infinity values with max_death + delta for diagram to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+
+ # line display of equation : birth = death
+ x = np.linspace(axis_start, infinity, 1000)
+ # infinity line and text
+ plt.plot(x, x, color='k', linewidth=1.0)
+ plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
+ plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
+ # bootstrap band
+ if band > 0.:
+ plt.fill_between(x, x, x+band, alpha=alpha, facecolor='red')
+
+ # Draw points in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ # Finite death case
+ plt.scatter(interval[1][0], interval[1][1], alpha=alpha,
+ color = palette[interval[0]])
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.scatter(interval[1][0], infinity, alpha=alpha,
+ color = palette[interval[0]])
+ ind = ind + 1
+
+ if legend:
+ dimensions = list(set(item[0] for item in persistence))
+ plt.legend(handles=[mpatches.Patch(color=palette[dim], label=str(dim)) for dim in dimensions])
+
+ plt.title('Persistence diagram')
+ plt.xlabel('Birth')
+ plt.ylabel('Death')
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, axis_start, infinity + delta])
+ return plt
+
+except ImportError:
+ # Continue in case of import error, functions won't be available
+ pass
diff --git a/src/cython/cython/rips_complex.pyx b/src/cython/cython/rips_complex.pyx
index 59c16bff..30ca4443 100644
--- a/src/cython/cython/rips_complex.pyx
+++ b/src/cython/cython/rips_complex.pyx
@@ -51,7 +51,7 @@ cdef class RipsComplex:
"""RipsComplex constructor.
:param max_edge_length: Rips value.
- :type max_edge_length: int
+ :type max_edge_length: float
:param points: A list of points in d-Dimension.
:type points: list of list of double
diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx
index 8abeb5f8..e302486b 100644
--- a/src/cython/cython/simplex_tree.pyx
+++ b/src/cython/cython/simplex_tree.pyx
@@ -55,6 +55,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
void expansion(int max_dim)
void remove_maximal_simplex(vector[int] simplex)
bool prune_above_filtration(double filtration)
+ bool make_filtration_non_decreasing()
cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>":
@@ -64,6 +65,7 @@ cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
vector[int] persistent_betti_numbers(double from_value, double to_value)
vector[pair[double,double]] intervals_in_dimension(int dimension)
void write_output_diagram(string diagram_file_name)
+ vector[pair[vector[int], vector[int]]] persistence_pairs()
# SimplexTree python interface
cdef class SimplexTree:
@@ -399,6 +401,26 @@ cdef class SimplexTree:
"""
self.thisptr.expansion(max_dim)
+ def make_filtration_non_decreasing(self):
+ """This function ensures that each simplex has a higher filtration
+ value than its faces by increasing the filtration values.
+
+ :returns: The filtration modification information.
+ :rtype: bint
+
+
+ .. note::
+
+ Some simplex tree functions require the filtration to be valid.
+ make_filtration_non_decreasing function is not launching
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ but returns the filtration modification
+ information. If the complex has changed , please call
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ to recompute it.
+ """
+ return self.thisptr.make_filtration_non_decreasing()
+
def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False):
"""This function returns the persistence of the simplicial complex.
@@ -486,6 +508,25 @@ cdef class SimplexTree:
" to be launched first.")
return intervals_result
+ def persistence_pairs(self):
+ """This function returns the persistence pairs of the simplicial
+ complex.
+
+ :returns: The persistence intervals.
+ :rtype: list of pair of list of int
+
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result
+ if self.pcohptr != NULL:
+ persistence_pairs_result = self.pcohptr.persistence_pairs()
+ else:
+ print("persistence_pairs function requires persistence function"
+ " to be launched first.")
+ return persistence_pairs_result
+
def write_persistence_diagram(self, persistence_file=''):
"""This function writes the persistence intervals of the simplicial
complex in a user given file name.
diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx
index ac09b7a3..e9d61a37 100644
--- a/src/cython/cython/subsampling.pyx
+++ b/src/cython/cython/subsampling.pyx
@@ -112,7 +112,8 @@ def pick_n_random_points(points=None, off_file='', nb_points=0):
return subsampling_n_random_points(points, nb_points)
def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
- """Subsample a point set by picking random vertices.
+ """Outputs a subset of the input points so that the squared distance
+ between any two points is greater than or equal to min_squared_dist.
:param points: The input point set.
:type points: vector[vector[double]].
@@ -122,8 +123,9 @@ def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
:param off_file: An OFF file style name.
:type off_file: string
- :param min_squared_dist: Number of points of the subsample.
- :type min_squared_dist: unsigned.
+ :param min_squared_dist: Minimum squared distance separating the output \
+ points.
+ :type min_squared_dist: float.
:returns: The subsample point set.
:rtype: vector[vector[double]]
"""
diff --git a/src/cython/cython/tangential_complex.pyx b/src/cython/cython/tangential_complex.pyx
index 10fa1468..4bb07076 100644
--- a/src/cython/cython/tangential_complex.pyx
+++ b/src/cython/cython/tangential_complex.pyx
@@ -33,9 +33,9 @@ __license__ = "GPL v3"
cdef extern from "Tangential_complex_interface.h" namespace "Gudhi":
cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface":
- Tangential_complex_interface(vector[vector[double]] points)
+ Tangential_complex_interface(int intrisic_dim, vector[vector[double]] points)
# bool from_file is a workaround for cython to find the correct signature
- Tangential_complex_interface(string off_file, bool from_file)
+ Tangential_complex_interface(int intrisic_dim, string off_file, bool from_file)
vector[double] get_point(unsigned vertex)
unsigned number_of_vertices()
unsigned number_of_simplices()
@@ -54,9 +54,12 @@ cdef class TangentialComplex:
cdef Tangential_complex_interface * thisptr
# Fake constructor that does nothing but documenting the constructor
- def __init__(self, points=None, off_file=''):
+ def __init__(self, intrisic_dim, points=None, off_file=''):
"""TangentialComplex constructor.
+ :param intrisic_dim: Intrinsic dimension of the manifold.
+ :type intrisic_dim: integer
+
:param points: A list of points in d-Dimension.
:type points: list of list of double
@@ -67,17 +70,17 @@ cdef class TangentialComplex:
"""
# The real cython constructor
- def __cinit__(self, points=None, off_file=''):
+ def __cinit__(self, intrisic_dim, points=None, off_file=''):
if off_file is not '':
if os.path.isfile(off_file):
- self.thisptr = new Tangential_complex_interface(str.encode(off_file), True)
+ self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True)
else:
print("file " + off_file + " not found.")
else:
if points is None:
# Empty tangential construction
points=[]
- self.thisptr = new Tangential_complex_interface(points)
+ self.thisptr = new Tangential_complex_interface(intrisic_dim, points)
def __dealloc__(self):
diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html
index c9356116..bc0e9658 100644
--- a/src/cython/doc/_templates/layout.html
+++ b/src/cython/doc/_templates/layout.html
@@ -56,6 +56,12 @@
</a></p>
{%- endif %}
{%- endblock %}
+ <h2><a href="index.html">GUDHI</a></h2>
+ <h2><a href="fileformats.html">File formats</a></h2>
+ <h2><a href="installation.html">GUDHI installation</a></h2>
+ <h2><a href="citation.html">Acknowledging the GUDHI library</a></h2>
+ <h2><a href="genindex.html">Index</a></h2>
+ <h2><a href="examples.html">Examples</a></h2>
{%- if sidebars != None %}
{#- new style sidebar: explicitly include/exclude templates #}
{%- for sidebartemplate in sidebars %}
@@ -64,13 +70,6 @@
{%- else %}
{#- old style sidebars: using blocks -- should be deprecated #}
{%- block sidebartoc %}
-<h2><a href="index.html">GUDHI</a></h2>
-<h2><a href="fileformats.html">File formats</a></h2>
-<h2><a href="installation.html">GUDHI installation</a></h2>
-<h2><a href="citation.html">Acknowledging the GUDHI library</a></h2>
-<h2><a href="genindex.html">Index</a></h2>
-<h2><a href="examples.html">Examples</a></h2>
-
{%- include "localtoc.html" %}
{%- endblock %}
{%- block sidebarrel %}
@@ -108,7 +107,7 @@
{%- macro css() %}
<!-- GUDHI website css for header BEGIN -->
-<link rel="stylesheet" type="text/css" href="http://gudhi.gforge.inria.fr/assets/css/styles_feeling_responsive.css" />
+<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
<!-- GUDHI website css for header END -->
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
<link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
@@ -166,60 +165,61 @@
<body role="document">
<!-- GUDHI website header BEGIN -->
<div id="navigation" class="sticky">
- <nav class="top-bar" role="navigation" data-topbar>
- <ul class="title-area">
- <li class="name">
- <h1 class="show-for-small-only"><a href="http://gudhi.gforge.inria.fr" class="icon-tree"> GUDHI C++ library</a></h1>
- </li>
- <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
- <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
- </ul>
- <section class="top-bar-section">
- <ul class="right">
- <li class="divider"></li>
- <li><a href="http://gudhi.gforge.inria.fr/contact/">Contact</a></li>
- </ul>
- <ul class="left">
- <li><a href="http://gudhi.gforge.inria.fr/"> <img src="http://gudhi.gforge.inria.fr/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
- <li class="divider"></li>
- <li class="has-dropdown">
- <a href="#">Project</a>
- <ul class="dropdown">
- <li><a href="http://gudhi.gforge.inria.fr/people/">People</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/keepintouch/">Keep in touch</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/partners/">Partners and Funding</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/relatedprojects/">Related projects</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/theyaretalkingaboutus/">They are talking about us</a></li>
- </ul>
- </li>
- <li class="divider"></li>
- <li class="has-dropdown">
- <a href="#">Download</a>
- <ul class="dropdown">
- <li><a href="http://gudhi.gforge.inria.fr/licensing/">Licensing</a></li>
- <li><a href="https://gforge.inria.fr/frs/?group_id=3865" target="_blank">Get the sources</a></li>
- <li><a href="https://gforge.inria.fr/frs/download.php/file/37365/2018-02-01-16-59-31_GUDHI_2.1.0_OSX_UTILS.tar.gz" target="_blank">Utils for Mac OSx</a></li>
- <li><a href="https://gforge.inria.fr/frs/download.php/file/37366/2018-01-31-09-25-53_GUDHI_2.1.0_WIN64_UTILS.zip" target="_blank">Utils for Win x64</a></li>
- </ul>
- </li>
- <li class="divider"></li>
- <li class="has-dropdown">
- <a href="#">Documentation</a>
- <ul class="dropdown">
- <li><a href="http://gudhi.gforge.inria.fr/doc/latest/">C++ documentation</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/python/latest/">Python documentation</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/python/latest/installation.html">Python installation manual</a></li>
- <li><a href="http://gudhi.gforge.inria.fr/utils/">Utilities</a></li>
- <li><a href="http://bertrand.michel.perso.math.cnrs.fr/Enseignements/TDA-Gudhi-Python.html" target="_blank">Tutorial</a></li>
- </ul>
- </li>
- <li class="divider"></li>
- <li><a href="http://gudhi.gforge.inria.fr/interfaces/">Interfaces</a></li>
- <li class="divider"></li>
- </ul>
- </section>
- </nav>
+ <nav class="top-bar" role="navigation" data-topbar>
+ <ul class="title-area">
+ <li class="name">
+ <h1 class="show-for-small-only"><a href="" class="icon-tree"> GUDHI C++ library</a></h1>
+ </li>
+ <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
+ <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
+ </ul>
+ <section class="top-bar-section">
+ <ul class="right">
+ <li class="divider"></li>
+ <li><a href="/contact/">Contact</a></li>
+ </ul>
+ <ul class="left">
+ <li><a href="/"> <img src="/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Project</a>
+ <ul class="dropdown">
+ <li><a href="/people/">People</a></li>
+ <li><a href="/keepintouch/">Keep in touch</a></li>
+ <li><a href="/partners/">Partners and Funding</a></li>
+ <li><a href="/relatedprojects/">Related projects</a></li>
+ <li><a href="/theyaretalkingaboutus/">They are talking about us</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Download</a>
+ <ul class="dropdown">
+ <li><a href="/licensing/">Licensing</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5253/library-latest.zip" target="_blank">Get the latest sources</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5280/utils_osx-latest.zip" target="_blank">Utils for Mac OSx</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5279/utils_win64-latest.zip" target="_blank">Utils for Win x64</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Documentation</a>
+ <ul class="dropdown">
+ <li><a href="/doc/latest/">C++ documentation</a></li>
+ <li><a href="/doc/latest/installation.html">C++ installation manual</a></li>
+ <li><a href="/python/latest/">Python documentation</a></li>
+ <li><a href="/python/latest/installation.html">Python installation manual</a></li>
+ <li><a href="/utils/">Utilities</a></li>
+ <li><a href="/tutorials/">Tutorials</a></li>
+ <li><a href="/dockerfile/">Dockerfile</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li><a href="/interfaces/">Interfaces</a></li>
+ <li class="divider"></li>
+ </ul>
+ </section>
+ </nav>
</div><!-- /#navigation -->
<!-- GUDHI website header BEGIN -->
diff --git a/src/cython/doc/alpha_complex_ref.rst b/src/cython/doc/alpha_complex_ref.rst
index 6a122b09..7da79543 100644
--- a/src/cython/doc/alpha_complex_ref.rst
+++ b/src/cython/doc/alpha_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
==============================
Alpha complex reference manual
==============================
diff --git a/src/cython/doc/alpha_complex_sum.rst b/src/cython/doc/alpha_complex_sum.inc
index 1680a712..1680a712 100644
--- a/src/cython/doc/alpha_complex_sum.rst
+++ b/src/cython/doc/alpha_complex_sum.inc
diff --git a/src/cython/doc/alpha_complex_user.rst b/src/cython/doc/alpha_complex_user.rst
index db7edd6f..d1e9c7cd 100644
--- a/src/cython/doc/alpha_complex_user.rst
+++ b/src/cython/doc/alpha_complex_user.rst
@@ -1,11 +1,15 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Alpha complex user manual
=========================
Definition
----------
-.. include:: alpha_complex_sum.rst
+.. include:: alpha_complex_sum.inc
-Alpha_complex is constructing a :doc:`Simplex_tree <simplex_tree_sum>` using
+Alpha_complex is constructing a :doc:`Simplex_tree <simplex_tree_ref>` using
`Delaunay Triangulation <http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations>`_
:cite:`cgal:hdj-t-15b` from `CGAL <http://www.cgal.org/>`_ (the Computational Geometry Algorithms Library
:cite:`cgal:eb-15b`).
@@ -99,9 +103,9 @@ Filtration value computation algorithm
**end for**
**end for**
**end for**
-
+
make_filtration_non_decreasing()
-
+
prune_above_filtration()
Dimension 2
diff --git a/src/cython/doc/bottleneck_distance_sum.rst b/src/cython/doc/bottleneck_distance_sum.inc
index 030fad9e..030fad9e 100644
--- a/src/cython/doc/bottleneck_distance_sum.rst
+++ b/src/cython/doc/bottleneck_distance_sum.inc
diff --git a/src/cython/doc/bottleneck_distance_user.rst b/src/cython/doc/bottleneck_distance_user.rst
index 7692dce2..605db022 100644
--- a/src/cython/doc/bottleneck_distance_user.rst
+++ b/src/cython/doc/bottleneck_distance_user.rst
@@ -1,9 +1,13 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Bottleneck distance user manual
===============================
Definition
----------
-.. include:: bottleneck_distance_sum.rst
+.. include:: bottleneck_distance_sum.inc
Function
--------
diff --git a/src/cython/doc/citation.rst b/src/cython/doc/citation.rst
index f4fdf83b..117eb9dd 100644
--- a/src/cython/doc/citation.rst
+++ b/src/cython/doc/citation.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Acknowledging the GUDHI library
###############################
diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py
index a13c9751..4a54d4fd 100755
--- a/src/cython/doc/conf.py
+++ b/src/cython/doc/conf.py
@@ -85,7 +85,7 @@ version = gudhi.__version__
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ['_build']
+exclude_patterns = ['_build', '*.inc']
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -125,6 +125,7 @@ html_theme_options = {
"sidebarbgcolor": "#A1ADCD",
"sidebartextcolor": "black",
"sidebarlinkcolor": "#334D5C",
+ "body_max_width": "1200px",
}
# Add any paths that contain custom themes here, relative to this directory.
diff --git a/src/cython/doc/cubical_complex_ref.rst b/src/cython/doc/cubical_complex_ref.rst
index 84aa4223..1fe9d5fb 100644
--- a/src/cython/doc/cubical_complex_ref.rst
+++ b/src/cython/doc/cubical_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Cubical complex reference manual
################################
diff --git a/src/cython/doc/cubical_complex_sum.rst b/src/cython/doc/cubical_complex_sum.inc
index 280ad0e0..280ad0e0 100644
--- a/src/cython/doc/cubical_complex_sum.rst
+++ b/src/cython/doc/cubical_complex_sum.inc
diff --git a/src/cython/doc/cubical_complex_user.rst b/src/cython/doc/cubical_complex_user.rst
index dd82ad93..320bd79b 100644
--- a/src/cython/doc/cubical_complex_user.rst
+++ b/src/cython/doc/cubical_complex_user.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Cubical complex user manual
===========================
Definition
@@ -144,6 +148,7 @@ the program output is:
.. testoutput::
Periodic cubical complex is of dimension 2 - 42 simplices.
+
Examples.
---------
diff --git a/src/cython/doc/euclidean_strong_witness_complex_ref.rst b/src/cython/doc/euclidean_strong_witness_complex_ref.rst
index bebf0f9a..1a602cd5 100644
--- a/src/cython/doc/euclidean_strong_witness_complex_ref.rst
+++ b/src/cython/doc/euclidean_strong_witness_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
=================================================
Euclidean strong witness complex reference manual
=================================================
diff --git a/src/cython/doc/euclidean_witness_complex_ref.rst b/src/cython/doc/euclidean_witness_complex_ref.rst
index 29b8806f..28daf965 100644
--- a/src/cython/doc/euclidean_witness_complex_ref.rst
+++ b/src/cython/doc/euclidean_witness_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
==========================================
Euclidean witness complex reference manual
==========================================
diff --git a/src/cython/doc/examples.rst b/src/cython/doc/examples.rst
index 1e596e18..1f02f8a2 100644
--- a/src/cython/doc/examples.rst
+++ b/src/cython/doc/examples.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Examples
########
@@ -19,3 +23,7 @@ Examples
* :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
* :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
* :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
+ * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>`
+ * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>`
+ * :download:`voronoi_graph_induced_complex.py <../example/voronoi_graph_induced_complex.py>`
+ * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>`
diff --git a/src/cython/doc/fileformats.rst b/src/cython/doc/fileformats.rst
index 4f0b6f6d..ff20f26e 100644
--- a/src/cython/doc/fileformats.rst
+++ b/src/cython/doc/fileformats.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
File formats
############
diff --git a/src/cython/doc/index.rst b/src/cython/doc/index.rst
index 3945d72a..15cbe267 100644
--- a/src/cython/doc/index.rst
+++ b/src/cython/doc/index.rst
@@ -34,32 +34,37 @@ Data structures
Alpha complex
=============
-.. include:: alpha_complex_sum.rst
+.. include:: alpha_complex_sum.inc
+
+Cover complexes
+===============
+
+.. include:: nerve_gic_complex_sum.rst
Cubical complex
===============
-.. include:: cubical_complex_sum.rst
+.. include:: cubical_complex_sum.inc
Rips complex
============
-.. include:: rips_complex_sum.rst
+.. include:: rips_complex_sum.inc
Simplex tree
============
-.. include:: simplex_tree_sum.rst
+.. include:: simplex_tree_sum.inc
Tangential complex
==================
-.. include:: tangential_complex_sum.rst
+.. include:: tangential_complex_sum.inc
Witness complex
===============
-.. include:: witness_complex_sum.rst
+.. include:: witness_complex_sum.inc
Toolbox
@@ -68,17 +73,17 @@ Toolbox
Bottleneck distance
===================
-.. include:: bottleneck_distance_sum.rst
+.. include:: bottleneck_distance_sum.inc
Persistence cohomology
======================
-.. include:: persistent_cohomology_sum.rst
+.. include:: persistent_cohomology_sum.inc
Persistence graphical tools
===========================
-.. include:: persistence_graphical_tools_sum.rst
+.. include:: persistence_graphical_tools_sum.inc
Bibliography
************
diff --git a/src/cython/doc/installation.rst b/src/cython/doc/installation.rst
index c182f176..43576ec9 100644
--- a/src/cython/doc/installation.rst
+++ b/src/cython/doc/installation.rst
@@ -1,12 +1,16 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Installation
############
Compiling
*********
-
-The library uses c++11 and requires `Boost <http://www.boost.org/>`_ with
-version 1.48.0 or more recent. It is a multi-platform library and compiles on
-Linux, Mac OSX and Visual Studio 2015.
+The library uses c++11 and requires `Boost <https://www.boost.org/>`_ ≥ 1.48.0
+and `CMake <https://www.cmake.org/>`_ ≥ 3.1.
+It is a multi-platform library and compiles on Linux, Mac OSX and Visual
+Studio 2015.
It also requires cmake to generate makefiles, and cython to compile the
library.
@@ -43,6 +47,61 @@ following command in a terminal:
export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/cython'
ctest -R py_test
+Debugging issues
+================
+
+If tests fail, please check your PYTHONPATH and try to :code:`import gudhi`
+and check the errors.
+The problem can come from a third-party library bad link or installation.
+
+If :code:`import gudhi` succeeds, please have a look to debug informations:
+
+.. code-block:: python
+
+ import gudhi
+ print(gudhi.__debug_info__)
+
+You shall have something like:
+
+.. code-block:: none
+
+ Python version 2.7.15
+ Cython version 0.26.1
+ Eigen3 version 3.1.1
+ Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
+ persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;alpha_complex;
+ euclidean_witness_complex;euclidean_strong_witness_complex;
+ Missing modules are: bottleneck_distance;nerve_gic;subsampling;tangential_complex;persistence_graphical_tools;
+ CGAL version 4.7.1000
+ GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so
+ GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
+ TBB version 9107 found and used
+
+Here, you can see that bottleneck_distance, nerve_gic, subsampling and
+tangential_complex are missing because of the CGAL version.
+persistence_graphical_tools is not available as numpy and matplotlib are not
+available.
+Unitary tests cannot be run as pytest is missing.
+
+A complete configuration would be :
+
+.. code-block:: none
+
+ Python version 3.6.5
+ Cython version 0.28.2
+ Pytest version 3.3.2
+ Matplotlib version 2.2.2
+ Numpy version 1.14.5
+ Eigen3 version 3.3.4
+ Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
+ persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;persistence_graphical_tools;
+ bottleneck_distance;nerve_gic;subsampling;tangential_complex;alpha_complex;euclidean_witness_complex;
+ euclidean_strong_witness_complex;
+ CGAL header only version 4.11.0
+ GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so
+ GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
+ TBB version 9107 found and used
+
Documentation
=============
diff --git a/src/cython/doc/nerve_gic_complex_ref.rst b/src/cython/doc/nerve_gic_complex_ref.rst
new file mode 100644
index 00000000..e24e01fc
--- /dev/null
+++ b/src/cython/doc/nerve_gic_complex_ref.rst
@@ -0,0 +1,10 @@
+================================
+Cover complexes reference manual
+================================
+
+.. autoclass:: gudhi.CoverComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.CoverComplex.__init__
diff --git a/src/cython/doc/nerve_gic_complex_sum.rst b/src/cython/doc/nerve_gic_complex_sum.rst
new file mode 100644
index 00000000..72782c7a
--- /dev/null
+++ b/src/cython/doc/nerve_gic_complex_sum.rst
@@ -0,0 +1,15 @@
+================================================================= =================================== ===================================
+:Author: Mathieu Carrière :Introduced in: GUDHI 2.1.0 :Copyright: GPL v3
+:Requires: CGAL :math:`\geq` 4.8.1
+================================================================= =================================== ===================================
+
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. |
+| ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information |
+| :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, |
+| :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering |
+| | the image of a scalar-valued function defined on the data. |
+| Graph Induced Complex of a point cloud. | |
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` |
++----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/src/cython/doc/nerve_gic_complex_user.rst b/src/cython/doc/nerve_gic_complex_user.rst
new file mode 100644
index 00000000..d774827e
--- /dev/null
+++ b/src/cython/doc/nerve_gic_complex_user.rst
@@ -0,0 +1,312 @@
+Cover complexes user manual
+===========================
+Definition
+----------
+
+.. include:: nerve_gic_complex_sum.rst
+
+Visualizations of the simplicial complexes can be done with either
+neato (from `graphviz <http://www.graphviz.org/>`_),
+`geomview <http://www.geomview.org/>`_,
+`KeplerMapper <https://github.com/MLWave/kepler-mapper>`_.
+Input point clouds are assumed to be
+`OFF files <http://www.geomview.org/docs/html/OFF.html>`_.
+
+Covers
+------
+
+Nerves and Graph Induced Complexes require a cover C of the input point cloud P,
+that is a set of subsets of P whose union is P itself.
+Very often, this cover is obtained from the preimage of a family of intervals covering
+the image of some scalar-valued function f defined on P. This family is parameterized
+by its resolution, which can be either the number or the length of the intervals,
+and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values).
+
+Nerves
+------
+
+Nerve definition
+^^^^^^^^^^^^^^^^
+
+Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover
+is the simplicial complex that has one k-simplex per k-fold intersection of cover elements.
+See also `Wikipedia <https://en.wikipedia.org/wiki/Nerve_of_a_covering>`_.
+
+.. figure::
+ ../../doc/Nerve_GIC/nerve.png
+ :figclass: align-center
+ :alt: Nerve of a double torus
+
+ Nerve of a double torus
+
+Example
+^^^^^^^
+
+This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off).
+The cover C comes from the preimages of intervals (10 intervals with gain 0.3)
+covering the height function (coordinate 2),
+which are then refined into their connected components using the triangulation of the .OFF file.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+ nerve_complex.set_verbose(True)
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/human.off')):
+ nerve_complex.set_type('Nerve')
+ nerve_complex.set_color_from_coordinate(2)
+ nerve_complex.set_function_from_coordinate(2)
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_resolution_with_interval_number(10)
+ nerve_complex.set_gain(0.3)
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.write_info()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ result_str = 'Nerve is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
+
+the program output is:
+
+.. code-block:: none
+
+ Min function value = -0.979672 and Max function value = 0.816414
+ Interval 0 = [-0.979672, -0.761576]
+ Interval 1 = [-0.838551, -0.581967]
+ Interval 2 = [-0.658942, -0.402359]
+ Interval 3 = [-0.479334, -0.22275]
+ Interval 4 = [-0.299725, -0.0431414]
+ Interval 5 = [-0.120117, 0.136467]
+ Interval 6 = [0.059492, 0.316076]
+ Interval 7 = [0.239101, 0.495684]
+ Interval 8 = [0.418709, 0.675293]
+ Interval 9 = [0.598318, 0.816414]
+ Computing preimages...
+ Computing connected components...
+ 5 interval(s) in dimension 0:
+ [-0.909111, 0.0081753]
+ [-0.171433, 0.367393]
+ [-0.171433, 0.367393]
+ [-0.909111, 0.745853]
+ 0 interval(s) in dimension 1:
+
+.. testoutput::
+
+ Nerve is of dimension 1 - 41 simplices - 21 vertices.
+ [0]
+ [1]
+ [4]
+ [1, 4]
+ [2]
+ [0, 2]
+ [8]
+ [2, 8]
+ [5]
+ [4, 5]
+ [9]
+ [8, 9]
+ [13]
+ [5, 13]
+ [14]
+ [9, 14]
+ [19]
+ [13, 19]
+ [25]
+ [32]
+ [20]
+ [20, 32]
+ [33]
+ [25, 33]
+ [26]
+ [14, 26]
+ [19, 26]
+ [42]
+ [26, 42]
+ [34]
+ [33, 34]
+ [27]
+ [20, 27]
+ [35]
+ [27, 35]
+ [34, 35]
+ [35, 42]
+ [44]
+ [35, 44]
+ [54]
+ [44, 54]
+
+
+The program also writes a file ../../data/points/human.off_sc.txt. The first
+three lines in this file are the location of the input point cloud and the
+function used to compute the cover.
+The fourth line contains the number of vertices nv and edges ne of the Nerve.
+The next nv lines represent the vertices. Each line contains the vertex ID,
+the number of data points it contains, and their average color function value.
+Finally, the next ne lines represent the edges, characterized by the ID of
+their vertices.
+
+Using KeplerMapper, one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/nervevisu.jpg
+ :figclass: align-center
+ :alt: Visualization with KeplerMapper
+
+ Visualization with KeplerMapper
+
+Graph Induced Complexes (GIC)
+-----------------------------
+
+GIC definition
+^^^^^^^^^^^^^^
+
+Again, assume you are given a cover C of your point cloud P. Moreover, assume
+you are also given a graph G built on top of P. Then, for any clique in G
+whose nodes all belong to different elements of C, the GIC includes a
+corresponding simplex, whose dimension is the number of nodes in the clique
+minus one.
+See :cite:`Dey13` for more details.
+
+.. figure::
+ ../../doc/Nerve_GIC/GIC.jpg
+ :figclass: align-center
+ :alt: GIC of a point cloud
+
+ GIC of a point cloud
+
+Example with cover from Voronoï
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the GIC of a point cloud sampled on a 3D human shape
+(human.off).
+We randomly subsampled 100 points in the point cloud, which act as seeds of
+a geodesic Voronoï diagram. Each cell of the diagram is then an element of C.
+The graph G (used to compute both the geodesics for Voronoï and the GIC)
+comes from the triangulation of the human shape. Note that the resulting
+simplicial complex is in dimension 3 in this example.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/human.off')):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate()
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_cover_from_Voronoi(700)
+ nerve_complex.find_simplices()
+ nerve_complex.plot_off()
+
+the program outputs SC.off. Using e.g.
+
+.. code-block:: none
+
+ geomview ../../data/points/human.off_sc.off
+
+one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/gicvoronoivisu.jpg
+ :figclass: align-center
+ :alt: Visualization with Geomview
+
+ Visualization with Geomview
+
+Functional GIC
+^^^^^^^^^^^^^^
+
+If one restricts to the cliques in G whose nodes all belong to preimages of
+consecutive intervals (assuming the cover of the height function is minimal,
+i.e. no more than two intervals can intersect at a time), the GIC is of
+dimension one, i.e. a graph.
+We call this graph the functional GIC. See :cite:`Carriere16` for more details.
+
+Example
+^^^^^^^
+
+Functional GIC comes with automatic selection of the Rips threshold,
+the resolution and the gain of the function cover. See :cite:`Carriere17c` for
+more details. In this example, we compute the functional GIC of a Klein bottle
+embedded in R^5, where the graph G comes from a Rips complex with automatic
+threshold, and the cover C comes from the preimages of intervals covering the
+first coordinate, with automatic resolution and gain. Note that automatic
+threshold, resolution and gain can be computed as well for the Nerve.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/KleinBottle5D.off')):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate(0)
+ nerve_complex.set_function_from_coordinate(0)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+
+the program outputs SC.dot. Using e.g.
+
+.. code-block:: none
+
+ neato ../../data/points/KleinBottle5D.off_sc.dot -Tpdf -o ../../data/points/KleinBottle5D.off_sc.pdf
+
+one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/coordGICvisu2.jpg
+ :figclass: align-center
+ :alt: Visualization with neato
+
+ Visualization with neato
+
+where nodes are colored by the filter function values and, for each node, the
+first number is its ID and the second is the number of data points that its
+contain.
+
+We also provide an example on a set of 72 pictures taken around the same object
+(lucky_cat.off).
+The function is now the first eigenfunction given by PCA, whose values are
+written in a file (lucky_cat_PCA1). Threshold, resolution and gain are
+automatically selected as before.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/COIL_database/lucky_cat.off')):
+ nerve_complex.set_type('GIC')
+ pca_file = gudhi.__root_source_dir__ + \
+ '/data/points/COIL_database/lucky_cat_PCA1'
+ nerve_complex.set_color_from_file(pca_file)
+ nerve_complex.set_function_from_file(pca_file)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+
+the program outputs again SC.dot which gives the following visualization after using neato:
+
+.. figure::
+ ../../doc/Nerve_GIC/funcGICvisu.jpg
+ :figclass: align-center
+ :alt: Visualization with neato
+
+ Visualization with neato
diff --git a/src/cython/doc/periodic_cubical_complex_ref.rst b/src/cython/doc/periodic_cubical_complex_ref.rst
index c6190a1b..4b831647 100644
--- a/src/cython/doc/periodic_cubical_complex_ref.rst
+++ b/src/cython/doc/periodic_cubical_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Periodic cubical complex reference manual
#########################################
diff --git a/src/cython/doc/persistence_graphical_tools_ref.rst b/src/cython/doc/persistence_graphical_tools_ref.rst
index 27c2f68a..a2c6bcef 100644
--- a/src/cython/doc/persistence_graphical_tools_ref.rst
+++ b/src/cython/doc/persistence_graphical_tools_ref.rst
@@ -1,8 +1,11 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
============================================
Persistence graphical tools reference manual
============================================
.. autofunction:: gudhi.__min_birth_max_death
-.. autofunction:: gudhi.show_palette_values
.. autofunction:: gudhi.plot_persistence_barcode
.. autofunction:: gudhi.plot_persistence_diagram
diff --git a/src/cython/doc/persistence_graphical_tools_sum.rst b/src/cython/doc/persistence_graphical_tools_sum.inc
index d602daa7..d602daa7 100644
--- a/src/cython/doc/persistence_graphical_tools_sum.rst
+++ b/src/cython/doc/persistence_graphical_tools_sum.inc
diff --git a/src/cython/doc/persistence_graphical_tools_user.rst b/src/cython/doc/persistence_graphical_tools_user.rst
index a5523d23..292915eb 100644
--- a/src/cython/doc/persistence_graphical_tools_user.rst
+++ b/src/cython/doc/persistence_graphical_tools_user.rst
@@ -1,49 +1,26 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Persistence graphical tools user manual
=======================================
Definition
----------
-.. include:: persistence_graphical_tools_sum.rst
-
-
-Show palette values
--------------------
-
-This function is useful to show the color palette values of dimension:
-
+.. include:: persistence_graphical_tools_sum.inc
-.. testcode::
-
- import gudhi
- plt = gudhi.show_palette_values(alpha=1.0)
- plt.show()
-
-.. plot::
-
- import gudhi
- plt = gudhi.show_palette_values(alpha=1.0)
- plt.show()
Show persistence as a barcode
-----------------------------
This function can display the persistence result as a barcode:
-.. testcode::
-
- import gudhi
-
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
- '/data/bitmap/3d_torus.txt')
- diag = periodic_cc.persistence()
- plt = gudhi.plot_persistence_barcode(diag)
- plt.show()
-
.. plot::
+ :include-source:
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
- '/data/bitmap/3d_torus.txt')
+ perseus_file = gudhi.__root_source_dir__ + '/data/bitmap/3d_torus.txt'
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=perseus_file)
diag = periodic_cc.persistence()
print("diag = ", diag)
plt = gudhi.plot_persistence_barcode(diag)
@@ -54,24 +31,32 @@ Show persistence as a diagram
This function can display the persistence result as a diagram:
-.. testcode::
+.. plot::
+ :include-source:
import gudhi
-
- point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + '/data/points/tore3D_1307.off')
- rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.2)
- simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
- diag = simplex_tree.persistence()
- plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+
+ # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method
+ persistence_file=gudhi.__root_source_dir__ + \
+ '/data/persistence_diagram/rips_on_tore3D_1307.pers'
+ plt = gudhi.plot_persistence_diagram(persistence_file=persistence_file,
+ legend=True)
plt.show()
+If you want more information on a specific dimension, for instance:
+
.. plot::
+ :include-source:
import gudhi
- point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + '/data/points/tore3D_1307.off')
- rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.2)
- simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
- diag = simplex_tree.persistence()
- plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+ persistence_file=gudhi.__root_source_dir__ + \
+ '/data/persistence_diagram/rips_on_tore3D_1307.pers'
+ diag = \
+ gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file=\
+ persistence_file)
+ dim = 1
+ # Display all points with some transparency
+ plt = gudhi.plot_persistence_diagram([(dim,interval) for interval in diag[dim]],
+ max_plots=0, alpha=0.1)
plt.show()
diff --git a/src/cython/doc/persistent_cohomology_sum.rst b/src/cython/doc/persistent_cohomology_sum.inc
index a26df1dc..a26df1dc 100644
--- a/src/cython/doc/persistent_cohomology_sum.rst
+++ b/src/cython/doc/persistent_cohomology_sum.inc
diff --git a/src/cython/doc/persistent_cohomology_user.rst b/src/cython/doc/persistent_cohomology_user.rst
index bf90c163..ce7fc685 100644
--- a/src/cython/doc/persistent_cohomology_user.rst
+++ b/src/cython/doc/persistent_cohomology_user.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Persistent cohomology user manual
=================================
Definition
diff --git a/src/cython/doc/pyplots/barcode_persistence.py b/src/cython/doc/pyplots/barcode_persistence.py
deleted file mode 100755
index de33d506..00000000
--- a/src/cython/doc/pyplots/barcode_persistence.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import gudhi
-
-periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
- '/data/bitmap/3d_torus.txt')
-diag = periodic_cc.persistence()
-plt = gudhi.plot_persistence_barcode(diag)
-plt.show()
diff --git a/src/cython/doc/pyplots/diagram_persistence.py b/src/cython/doc/pyplots/diagram_persistence.py
deleted file mode 100755
index ac20bf47..00000000
--- a/src/cython/doc/pyplots/diagram_persistence.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import gudhi
-
-point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + \
- '/data/points/tore3D_1307.off')
-rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.2)
-simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
-diag = simplex_tree.persistence()
-plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
-plt.show()
diff --git a/src/cython/doc/pyplots/show_palette_values.py b/src/cython/doc/pyplots/show_palette_values.py
deleted file mode 100755
index fdf9645f..00000000
--- a/src/cython/doc/pyplots/show_palette_values.py
+++ /dev/null
@@ -1,3 +0,0 @@
-import gudhi
-plt = gudhi.show_palette_values(alpha=1.0)
-plt.show()
diff --git a/src/cython/doc/reader_utils_ref.rst b/src/cython/doc/reader_utils_ref.rst
index 9c1ea6fc..f3ecebad 100644
--- a/src/cython/doc/reader_utils_ref.rst
+++ b/src/cython/doc/reader_utils_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
=============================
Reader utils reference manual
=============================
diff --git a/src/cython/doc/rips_complex_ref.rst b/src/cython/doc/rips_complex_ref.rst
index b17dc4e0..22b5616c 100644
--- a/src/cython/doc/rips_complex_ref.rst
+++ b/src/cython/doc/rips_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
=============================
Rips complex reference manual
=============================
diff --git a/src/cython/doc/rips_complex_sum.rst b/src/cython/doc/rips_complex_sum.inc
index 5616bfa9..5616bfa9 100644
--- a/src/cython/doc/rips_complex_sum.rst
+++ b/src/cython/doc/rips_complex_sum.inc
diff --git a/src/cython/doc/rips_complex_user.rst b/src/cython/doc/rips_complex_user.rst
index 7738aef0..a8c06cf9 100644
--- a/src/cython/doc/rips_complex_user.rst
+++ b/src/cython/doc/rips_complex_user.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Rips complex user manual
=========================
Definition
@@ -243,7 +247,7 @@ the program output is:
[3, 6] -> 11.00
Correlation matrix
----------------
+------------------
Example from a correlation matrix
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/src/cython/doc/simplex_tree_ref.rst b/src/cython/doc/simplex_tree_ref.rst
index 6d196843..9eb8c199 100644
--- a/src/cython/doc/simplex_tree_ref.rst
+++ b/src/cython/doc/simplex_tree_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
=============================
Simplex tree reference manual
=============================
diff --git a/src/cython/doc/simplex_tree_sum.rst b/src/cython/doc/simplex_tree_sum.inc
index fb0e54c1..fb0e54c1 100644
--- a/src/cython/doc/simplex_tree_sum.rst
+++ b/src/cython/doc/simplex_tree_sum.inc
diff --git a/src/cython/doc/simplex_tree_user.rst b/src/cython/doc/simplex_tree_user.rst
index 4b1dde19..aebeb29f 100644
--- a/src/cython/doc/simplex_tree_user.rst
+++ b/src/cython/doc/simplex_tree_user.rst
@@ -1,9 +1,13 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Simplex tree user manual
========================
Definition
----------
-.. include:: simplex_tree_sum.rst
+.. include:: simplex_tree_sum.inc
A simplicial complex :math:`\mathbf{K}` on a set of vertices :math:`V = \{1, \cdots ,|V|\}` is a collection of
simplices :math:`\{\sigma\}`, :math:`\sigma \subseteq V` such that
diff --git a/src/cython/doc/strong_witness_complex_ref.rst b/src/cython/doc/strong_witness_complex_ref.rst
index 4ed4fe46..d624d711 100644
--- a/src/cython/doc/strong_witness_complex_ref.rst
+++ b/src/cython/doc/strong_witness_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
=======================================
Strong witness complex reference manual
=======================================
diff --git a/src/cython/doc/tangential_complex_ref.rst b/src/cython/doc/tangential_complex_ref.rst
index 35589475..cdfda082 100644
--- a/src/cython/doc/tangential_complex_ref.rst
+++ b/src/cython/doc/tangential_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
===================================
Tangential complex reference manual
===================================
diff --git a/src/cython/doc/tangential_complex_sum.rst b/src/cython/doc/tangential_complex_sum.inc
index 72b4d7ba..72b4d7ba 100644
--- a/src/cython/doc/tangential_complex_sum.rst
+++ b/src/cython/doc/tangential_complex_sum.inc
diff --git a/src/cython/doc/tangential_complex_user.rst b/src/cython/doc/tangential_complex_user.rst
index efa6d7ce..5ce69e86 100644
--- a/src/cython/doc/tangential_complex_user.rst
+++ b/src/cython/doc/tangential_complex_user.rst
@@ -1,6 +1,10 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Tangential complex user manual
==============================
-.. include:: tangential_complex_sum.rst
+.. include:: tangential_complex_sum.inc
Definition
----------
@@ -122,8 +126,8 @@ This example builds the Tangential complex of point set read in an OFF file.
.. testcode::
import gudhi
- tc = gudhi.TangentialComplex(off_file=gudhi.__root_source_dir__ + \
- '/data/points/alphacomplexdoc.off')
+ tc = gudhi.TangentialComplex(intrisic_dim = 1,
+ off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off')
result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \
' simplices - ' + repr(tc.num_vertices()) + ' vertices.'
print(result_str)
@@ -169,7 +173,8 @@ simplices.
.. testcode::
import gudhi
- tc = gudhi.TangentialComplex(points=[[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])
+ tc = gudhi.TangentialComplex(intrisic_dim = 1,
+ points=[[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])
result_str = 'Tangential contains ' + repr(tc.num_vertices()) + ' vertices.'
print(result_str)
diff --git a/src/cython/doc/todos.rst b/src/cython/doc/todos.rst
index 78972a4c..ca274ced 100644
--- a/src/cython/doc/todos.rst
+++ b/src/cython/doc/todos.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
==========
To be done
==========
diff --git a/src/cython/doc/witness_complex_ref.rst b/src/cython/doc/witness_complex_ref.rst
index c78760cb..9987d3fd 100644
--- a/src/cython/doc/witness_complex_ref.rst
+++ b/src/cython/doc/witness_complex_ref.rst
@@ -1,3 +1,7 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
================================
Witness complex reference manual
================================
diff --git a/src/cython/doc/witness_complex_sum.rst b/src/cython/doc/witness_complex_sum.inc
index a8a126a0..a8a126a0 100644
--- a/src/cython/doc/witness_complex_sum.rst
+++ b/src/cython/doc/witness_complex_sum.inc
diff --git a/src/cython/doc/witness_complex_user.rst b/src/cython/doc/witness_complex_user.rst
index 99be5185..40e94134 100644
--- a/src/cython/doc/witness_complex_user.rst
+++ b/src/cython/doc/witness_complex_user.rst
@@ -1,7 +1,11 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
Witness complex user manual
===========================
-.. include:: witness_complex_sum.rst
+.. include:: witness_complex_sum.inc
Definitions
-----------
diff --git a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
index 27550025..4abe22d4 100755
--- a/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -38,7 +38,7 @@ parser = argparse.ArgumentParser(description='AlphaComplex creation from '
'points from the given OFF file.')
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -64,7 +64,7 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/coordinate_graph_induced_complex.py b/src/cython/example/coordinate_graph_induced_complex.py
new file mode 100755
index 00000000..9e93109a
--- /dev/null
+++ b/src/cython/example/coordinate_graph_induced_complex.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Coordinate GIC '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/coordinate_graph_induced_complex.py '
+ '-f ../data/points/KleinBottle5D.off -c 0 -v'
+ '- Constructs the coordinate GIC with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--coordinate", type=int, default=0)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.file)):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate(args.coordinate)
+ nerve_complex.set_function_from_coordinate(args.coordinate)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on coordinate GIC simplices')
+ result_str = 'Coordinate GIC is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
index 1c142d9a..3b29781f 100755
--- a/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -40,7 +40,7 @@ parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
parser.add_argument("-d", "--limit_dimension", type=int, required=True)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -71,7 +71,7 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
index 216fcff2..db34962d 100755
--- a/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -40,7 +40,7 @@ parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
parser.add_argument("-d", "--limit_dimension", type=int, required=True)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -71,7 +71,7 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/functional_graph_induced_complex.py b/src/cython/example/functional_graph_induced_complex.py
new file mode 100755
index 00000000..6ad7c2ec
--- /dev/null
+++ b/src/cython/example/functional_graph_induced_complex.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Functional GIC '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/functional_graph_induced_complex.py '
+ '-o ../data/points/COIL_database/lucky_cat.off '
+ '-f ../data/points/COIL_database/lucky_cat_PCA1'
+ '- Constructs the functional GIC with the '
+ 'points from the given OFF and function files.')
+parser.add_argument("-o", "--off-file", type=str, required=True)
+parser.add_argument("-f", "--function-file", type=str, required=True)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.off_file)):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_file(args.function_file)
+ nerve_complex.set_function_from_file(args.function_file)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on functional GIC simplices')
+ result_str = 'Functional GIC is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/cython/example/gudhi_graphical_tools_example.py b/src/cython/example/gudhi_graphical_tools_example.py
index 9f37efc0..ac3d146c 100755
--- a/src/cython/example/gudhi_graphical_tools_example.py
+++ b/src/cython/example/gudhi_graphical_tools_example.py
@@ -29,11 +29,6 @@ __copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
print("#####################################################################")
-print("Show palette colors values for dimension")
-
-gudhi.show_palette_values()
-
-print("#####################################################################")
print("Show barcode persistence example")
persistence = [(2, (1.0, float('inf'))), (1, (1.4142135623730951, float('inf'))),
@@ -50,5 +45,5 @@ pplot.show()
print("#####################################################################")
print("Show diagram persistence example with a confidence band")
-pplot = gudhi.plot_persistence_diagram(persistence, band_boot=0.2)
+pplot = gudhi.plot_persistence_diagram(persistence, band=0.2)
pplot.show()
diff --git a/src/cython/example/nerve_of_a_covering.py b/src/cython/example/nerve_of_a_covering.py
new file mode 100755
index 00000000..c5577cb1
--- /dev/null
+++ b/src/cython/example/nerve_of_a_covering.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Nerve of a covering creation '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/nerve_of_a_covering.py '
+ '-f ../data/points/human.off -c 2 -r 10 -g 0.3'
+ '- Constructs Nerve of a covering with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--coordinate", type=int, default=0)
+parser.add_argument("-r", "--resolution", type=int, default=10)
+parser.add_argument("-g", "--gain", type=float, default=0.3)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.file)):
+ nerve_complex.set_type('Nerve')
+ nerve_complex.set_color_from_coordinate(args.coordinate)
+ nerve_complex.set_function_from_coordinate(args.coordinate)
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_resolution_with_interval_number(args.resolution)
+ nerve_complex.set_gain(args.gain)
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.write_info()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on graph induced complex simplices')
+ result_str = 'Nerve is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
index 4142fa99..0c9dfc43 100755
--- a/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
+++ b/src/cython/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
@@ -40,7 +40,7 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-c", "--min_edge_correlation", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -80,5 +80,5 @@ print(simplex_tree.betti_numbers())
invert_diag = [(diag[pers][0],(1.-diag[pers][1][0], 1.-diag[pers][1][1])) for pers in range(len(diag))]
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(invert_diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(invert_diag, band=args.band)
pplot.show()
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
index 01d1f38a..4d2ed577 100755
--- a/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
+++ b/src/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
@@ -39,7 +39,7 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -63,5 +63,5 @@ print("betti_numbers()=")
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
pplot.show()
diff --git a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
index 865c66b6..d15d5eb0 100755
--- a/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
+++ b/src/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -39,7 +39,7 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -66,7 +66,7 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
index 680a8bf8..0f8f5e80 100755
--- a/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
+++ b/src/cython/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -33,11 +33,12 @@ parser = argparse.ArgumentParser(description='TangentialComplex creation from '
'points read in a OFF file.',
epilog='Example: '
'example/tangential_complex_plain_homology_from_off_file_example.py '
- '-f ../data/points/tore3D_300.off'
+ '-f ../data/points/tore3D_300.off -i 3'
'- Constructs a tangential complex with the '
'points from the given OFF file')
parser.add_argument("-f", "--file", type=str, required=True)
-parser.add_argument("-b", "--band_boot", type=float, default=0.)
+parser.add_argument("-i", "--intrisic_dim", type=int, required=True)
+parser.add_argument("-b", "--band", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -48,7 +49,7 @@ with open(args.file, 'r') as f:
print("#####################################################################")
print("TangentialComplex creation from points read in a OFF file")
- tc = gudhi.TangentialComplex(off_file=args.file)
+ tc = gudhi.TangentialComplex(intrisic_dim = args.intrisic_dim, off_file=args.file)
st = tc.create_simplex_tree()
message = "Number of simplices=" + repr(st.num_simplices())
@@ -60,7 +61,7 @@ with open(args.file, 'r') as f:
print(st.betti_numbers())
if args.no_diagram == False:
- pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/src/cython/example/voronoi_graph_induced_complex.py b/src/cython/example/voronoi_graph_induced_complex.py
new file mode 100755
index 00000000..8266a0e4
--- /dev/null
+++ b/src/cython/example/voronoi_graph_induced_complex.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Voronoi GIC '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/voronoi_graph_induced_complex.py '
+ '-f ../data/points/human.off -n 700 -v'
+ '- Constructs the Voronoi GIC with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-n", "--subsample-nb-points", type=int, default=100)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.file)):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate()
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_cover_from_Voronoi(args.subsample_nb_points)
+ nerve_complex.find_simplices()
+ nerve_complex.plot_off()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on graph induced complex simplices')
+ result_str = 'Graph induced complex is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/cython/gudhi.pyx.in b/src/cython/gudhi.pyx.in
index b94f2251..0d4b966b 100644
--- a/src/cython/gudhi.pyx.in
+++ b/src/cython/gudhi.pyx.in
@@ -26,6 +26,7 @@ __license__ = "GPL v3"
__version__ = "@GUDHI_VERSION@"
# This variable is used by doctest to find files
__root_source_dir__ = "@CMAKE_SOURCE_DIR@"
+__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@
include '@CMAKE_CURRENT_SOURCE_DIR@/cython/off_reader.pyx'
include '@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx'
@@ -41,3 +42,4 @@ include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx'
@GUDHI_CYTHON_SUBSAMPLING@
@GUDHI_CYTHON_TANGENTIAL_COMPLEX@
@GUDHI_CYTHON_BOTTLENECK_DISTANCE@
+@GUDHI_CYTHON_NERVE_GIC@
diff --git a/src/cython/include/Nerve_gic_interface.h b/src/cython/include/Nerve_gic_interface.h
new file mode 100644
index 00000000..aa71e2a6
--- /dev/null
+++ b/src/cython/include/Nerve_gic_interface.h
@@ -0,0 +1,61 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_NERVE_GIC_INTERFACE_H_
+#define INCLUDE_NERVE_GIC_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/GIC.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+class Nerve_gic_interface : public Cover_complex<std::vector<double>> {
+ public:
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree) {
+ create_complex(*simplex_tree);
+ simplex_tree->initialize_filtration();
+ }
+ void set_cover_from_Euclidean_Voronoi(int m) {
+ set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m);
+ }
+ double set_graph_from_automatic_euclidean_rips(int N) {
+ return set_graph_from_automatic_rips(Gudhi::Euclidean_distance(), N);
+ }
+ void set_graph_from_euclidean_rips(double threshold) {
+ set_graph_from_rips(threshold, Gudhi::Euclidean_distance());
+ }
+};
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_NERVE_GIC_INTERFACE_H_
diff --git a/src/cython/include/Persistent_cohomology_interface.h b/src/cython/include/Persistent_cohomology_interface.h
index a86b1187..8cf71a4e 100644
--- a/src/cython/include/Persistent_cohomology_interface.h
+++ b/src/cython/include/Persistent_cohomology_interface.h
@@ -85,6 +85,32 @@ persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomol
return persistence;
}
+ std::vector<std::pair<std::vector<int>, std::vector<int>>> persistence_pairs() {
+ auto pairs = persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::get_persistent_pairs();
+
+ std::vector<std::pair<std::vector<int>, std::vector<int>>> persistence_pairs;
+ persistence_pairs.reserve(pairs.size());
+ for (auto pair : pairs) {
+ std::vector<int> birth;
+ if (get<0>(pair) != stptr_->null_simplex()) {
+ for (auto vertex : stptr_->simplex_vertex_range(get<0>(pair))) {
+ birth.push_back(vertex);
+ }
+ }
+
+ std::vector<int> death;
+ if (get<1>(pair) != stptr_->null_simplex()) {
+ for (auto vertex : stptr_->simplex_vertex_range(get<1>(pair))) {
+ death.push_back(vertex);
+ }
+ }
+
+ persistence_pairs.push_back(std::make_pair(birth, death));
+ }
+ return persistence_pairs;
+ }
+
private:
// A copy
FilteredComplex* stptr_;
diff --git a/src/cython/include/Tangential_complex_interface.h b/src/cython/include/Tangential_complex_interface.h
index 2772460a..71418886 100644
--- a/src/cython/include/Tangential_complex_interface.h
+++ b/src/cython/include/Tangential_complex_interface.h
@@ -45,24 +45,19 @@ class Tangential_complex_interface {
using TC = Tangential_complex<Dynamic_kernel, CGAL::Dynamic_dimension_tag, CGAL::Parallel_tag>;
public:
- Tangential_complex_interface(const std::vector<std::vector<double>>& points) {
+ Tangential_complex_interface(int intrisic_dim, const std::vector<std::vector<double>>& points) {
Dynamic_kernel k;
- unsigned intrisic_dim = 0;
- if (points.size() > 0)
- intrisic_dim = points[0].size() - 1;
tangential_complex_ = new TC(points, intrisic_dim, k);
tangential_complex_->compute_tangential_complex();
num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices();
}
- Tangential_complex_interface(const std::string& off_file_name, bool from_file = true) {
- Gudhi::Points_off_reader<Point_d> off_reader(off_file_name);
+ Tangential_complex_interface(int intrisic_dim, const std::string& off_file_name, bool from_file = true) {
Dynamic_kernel k;
- unsigned intrisic_dim = 0;
+
+ Gudhi::Points_off_reader<Point_d> off_reader(off_file_name);
std::vector<Point_d> points = off_reader.get_point_cloud();
- if (points.size() > 0)
- intrisic_dim = points[0].size() - 1;
tangential_complex_ = new TC(points, intrisic_dim, k);
tangential_complex_->compute_tangential_complex();
diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in
index b6ca4bcb..4037aab6 100644
--- a/src/cython/setup.py.in
+++ b/src/cython/setup.py.in
@@ -46,4 +46,5 @@ setup(
version='@GUDHI_VERSION@',
url='http://gudhi.gforge.inria.fr/',
ext_modules = cythonize(gudhi),
+ install_requires = ["cython",],
)
diff --git a/src/cython/test/test_cover_complex.py b/src/cython/test/test_cover_complex.py
new file mode 100755
index 00000000..58935264
--- /dev/null
+++ b/src/cython/test/test_cover_complex.py
@@ -0,0 +1,92 @@
+from gudhi import CoverComplex
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+
+def test_empty_constructor():
+ # Try to create an empty CoverComplex
+ cover = CoverComplex()
+ assert cover.__is_defined() == True
+
+def test_non_existing_file_read():
+ # Try to open a non existing file
+ cover = CoverComplex()
+ assert (cover.read_point_cloud('pouetpouettralala.toubiloubabdou') == False)
+
+def test_files_creation():
+ # Create test file
+ cloud_file = open('cloud', 'w')
+ cloud_file.write('nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0')
+ cloud_file.close()
+ cover_file = open('cover', 'w')
+ cover_file.write('1\n2\n3')
+ cover_file.close()
+ graph_file = open('graph', 'w')
+ graph_file.write('0 1\n0 2\n1 2')
+ graph_file.close()
+
+def test_nerve():
+ nerve = CoverComplex()
+ nerve.set_type('Nerve')
+ assert (nerve.read_point_cloud('cloud') == True)
+ nerve.set_color_from_coordinate()
+ nerve.set_graph_from_file('graph')
+ nerve.set_cover_from_file('cover')
+ nerve.find_simplices()
+ stree = nerve.create_simplex_tree()
+
+ assert (stree.num_vertices() == 3)
+ assert ((stree.num_simplices() - stree.num_vertices()) == 0)
+ assert (stree.dimension() == 0)
+
+def test_graph_induced_complex():
+ gic = CoverComplex()
+ gic.set_type('GIC')
+ assert (gic.read_point_cloud('cloud') == True)
+ gic.set_color_from_coordinate()
+ gic.set_graph_from_file('graph')
+ gic.set_cover_from_file('cover')
+ gic.find_simplices()
+ stree = gic.create_simplex_tree()
+
+ assert (stree.num_vertices() == 3)
+ assert ((stree.num_simplices() - stree.num_vertices()) == 4)
+ assert (stree.dimension() == 2)
+
+def test_voronoi_graph_induced_complex():
+ gic = CoverComplex()
+ gic.set_type('GIC')
+ assert (gic.read_point_cloud('cloud') == True)
+ gic.set_color_from_coordinate()
+ gic.set_graph_from_file('graph')
+ gic.set_cover_from_Voronoi(2)
+ gic.find_simplices()
+ stree = gic.create_simplex_tree()
+
+ assert (stree.num_vertices() == 2)
+ assert ((stree.num_simplices() - stree.num_vertices()) == 1)
+ assert (stree.dimension() == 1)
diff --git a/src/cython/test/test_cubical_complex.py b/src/cython/test/test_cubical_complex.py
index 79d39aa8..92e591e9 100755
--- a/src/cython/test/test_cubical_complex.py
+++ b/src/cython/test/test_cubical_complex.py
@@ -72,6 +72,17 @@ def test_dimension_simple_constructor():
assert cub.betti_numbers() == [1, 0, 0]
assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0]
+def test_user_case_simple_constructor():
+ cub = CubicalComplex(dimensions=[3, 3],
+ top_dimensional_cells = [float('inf'), 0.,0.,0.,1.,0.,0.,0.,0.])
+ assert cub.__is_defined() == True
+ assert cub.__is_persistence_defined() == False
+ assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float('inf')))]
+ assert cub.__is_persistence_defined() == True
+ other_cub = CubicalComplex(dimensions=[3, 3],
+ top_dimensional_cells = [1000., 0.,0.,0.,1.,0.,0.,0.,0.])
+ assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float('inf')))]
+
def test_dimension_file_constructor():
# Create test file
test_file = open('CubicalOneSphere.txt', 'w')
diff --git a/src/cython/test/test_simplex_tree.py b/src/cython/test/test_simplex_tree.py
index 029e7729..cb701c9a 100755
--- a/src/cython/test/test_simplex_tree.py
+++ b/src/cython/test/test_simplex_tree.py
@@ -161,3 +161,47 @@ def test_automatic_dimension():
assert st.upper_bound_dimension() == 2
assert st.dimension() == 1
assert st.upper_bound_dimension() == 1
+
+def test_make_filtration_non_decreasing():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # Inserted simplex:
+ # 1
+ # o
+ # /X\
+ # o---o---o---o
+ # 2 0 3\X/4
+ # o
+ # 5
+ assert st.insert([2, 1, 0], filtration=2.0) == True
+ assert st.insert([3, 0], filtration=2.0) == True
+ assert st.insert([3, 4, 5], filtration=2.0) == True
+
+ assert st.make_filtration_non_decreasing() == False
+
+ # Because of non decreasing property of simplex tree, { 0 } , { 1 } and
+ # { 0, 1 } are going to be set from value 2.0 to 1.0
+ st.insert([0, 1, 6, 7], filtration=1.0);
+
+ assert st.make_filtration_non_decreasing() == False
+
+ # Modify specific values to test make_filtration_non_decreasing
+ st.assign_filtration([0,1,6,7], 0.8);
+ st.assign_filtration([0,1,6], 0.9);
+ st.assign_filtration([0,6], 0.6);
+ st.assign_filtration([3,4,5], 1.2);
+ st.assign_filtration([3,4], 1.1);
+ st.assign_filtration([4,5], 1.99);
+
+ assert st.make_filtration_non_decreasing() == True
+
+ assert st.filtration([0,1,6,7]) == 1.
+ assert st.filtration([0,1,6]) == 1.
+ assert st.filtration([0,1]) == 1.
+ assert st.filtration([0]) == 1.
+ assert st.filtration([1]) == 1.
+ assert st.filtration([3,4,5]) == 2.
+ assert st.filtration([3,4]) == 2.
+ assert st.filtration([4,5]) == 2.
diff --git a/src/cython/test/test_tangential_complex.py b/src/cython/test/test_tangential_complex.py
index fe623c7b..5385a0d3 100755
--- a/src/cython/test/test_tangential_complex.py
+++ b/src/cython/test/test_tangential_complex.py
@@ -29,7 +29,7 @@ __license__ = "GPL v3"
def test_tangential():
point_list = [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]
- tc = TangentialComplex(points=point_list)
+ tc = TangentialComplex(intrisic_dim = 1, points=point_list)
assert tc.__is_defined() == True
assert tc.num_vertices() == 4