summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGard Spreemann <gspreemann@gmail.com>2017-10-08 11:15:17 +0200
committerGard Spreemann <gspreemann@gmail.com>2017-10-08 11:15:17 +0200
commit866f6ce614e9c09c97fed12c8c0c2c9fb84fad3f (patch)
tree0c90eb9bab09ccc9785cdf2dc59f0ec861670b85
parent8d7329f3e5ad843e553c3c5503cecc28ef2eead6 (diff)
GUDHI 2.0.1 as released by upstream in a tarball.upstream/2.0.1
-rw-r--r--CMakeGUDHIVersion.txt2
-rw-r--r--CMakeLists.txt53
-rw-r--r--Doxyfile10
-rw-r--r--GudhUI/CMakeLists.txt34
-rw-r--r--cmake/modules/GUDHI_modules.cmake41
-rw-r--r--cmake/modules/GUDHI_third_party_libraries.cmake28
-rw-r--r--cmake/modules/GUDHI_user_version_target.cmake (renamed from cmake/modules/GUDHI_user_version_target.txt)7
-rw-r--r--cython/CMakeLists.txt376
-rwxr-xr-xcython/cython/persistence_graphical_tools.py76
-rw-r--r--cython/cython/reader_utils.pyx95
-rw-r--r--cython/cython/simplex_tree.pyx81
-rw-r--r--cython/doc/Makefile.in44
-rw-r--r--cython/doc/_templates/layout.html1
-rw-r--r--cython/doc/alpha_complex_sum.rst4
-rw-r--r--cython/doc/alpha_complex_user.rst11
-rw-r--r--cython/doc/bottleneck_distance_sum.rst2
-rw-r--r--cython/doc/bottleneck_distance_user.rst4
-rw-r--r--cython/doc/citation.rst2
-rwxr-xr-xcython/doc/conf.py90
-rw-r--r--cython/doc/cubical_complex_sum.rst22
-rw-r--r--cython/doc/cubical_complex_user.rst20
-rw-r--r--cython/doc/examples.rst19
-rw-r--r--cython/doc/fileformats.rst33
-rwxr-xr-xcython/doc/generate_examples.py43
-rw-r--r--cython/doc/index.rst27
-rw-r--r--cython/doc/installation.rst23
-rw-r--r--cython/doc/make.bat.in67
-rw-r--r--cython/doc/persistence_graphical_tools_user.rst31
-rw-r--r--cython/doc/persistent_cohomology_sum.rst2
-rw-r--r--cython/doc/persistent_cohomology_user.rst2
-rwxr-xr-xcython/doc/pyplots/barcode_persistence.py6
-rwxr-xr-xcython/doc/pyplots/diagram_persistence.py9
-rwxr-xr-xcython/doc/pyplots/show_palette_values.py3
-rwxr-xr-xcython/doc/python3-sphinx-build.py (renamed from cython/doc/python3-sphinx-build)0
-rw-r--r--cython/doc/reader_utils_ref.rst11
-rw-r--r--cython/doc/rips_complex_sum.rst2
-rw-r--r--cython/doc/rips_complex_user.rst8
-rw-r--r--cython/doc/simplex_tree_sum.rst2
-rw-r--r--cython/doc/tangential_complex_sum.rst4
-rw-r--r--cython/doc/tangential_complex_user.rst17
-rw-r--r--cython/doc/witness_complex_sum.rst26
-rw-r--r--cython/doc/witness_complex_user.rst4
-rwxr-xr-xcython/example/alpha_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xcython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py5
-rwxr-xr-xcython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py5
-rwxr-xr-xcython/example/gudhi_graphical_tools_example.py9
-rwxr-xr-xcython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py47
-rwxr-xr-xcython/example/rips_complex_diagram_persistence_from_off_file_example.py4
-rwxr-xr-xcython/example/rips_persistence_diagram.py3
-rwxr-xr-xcython/example/simplex_tree_example.py1
-rwxr-xr-xcython/example/tangential_complex_plain_homology_from_off_file_example.py4
-rw-r--r--cython/gudhi.pyx.in20
-rw-r--r--cython/include/Reader_utils_interface.h56
-rw-r--r--cython/include/Rips_complex_interface.h6
-rw-r--r--cython/include/Simplex_tree_interface.h8
-rw-r--r--cython/setup.py.in (renamed from cython/cythonize_gudhi.py.in)5
-rwxr-xr-xcython/test/test_cubical_complex.py4
-rwxr-xr-xcython/test/test_reader_utils.py88
-rwxr-xr-xcython/test/test_simplex_tree.py1
-rw-r--r--data/persistence_diagram/first.pers5
-rw-r--r--data/persistence_diagram/second.pers3
-rw-r--r--data/points/SO3.COPYRIGHT2
-rw-r--r--data/points/generator/CMakeLists.txt15
-rw-r--r--data/points/generator/README29
-rw-r--r--doc/Alpha_complex/Intro_alpha_complex.h83
-rw-r--r--doc/Persistent_cohomology/Intro_persistent_cohomology.h11
-rw-r--r--doc/Simplex_tree/Intro_simplex_tree.h2
-rw-r--r--doc/Spatial_searching/Intro_spatial_searching.h2
-rw-r--r--doc/common/MathJax.COPYRIGHT55
-rw-r--r--doc/common/MathJax.js53
-rw-r--r--doc/common/file_formats.h59
-rw-r--r--doc/common/main_page.h27
-rw-r--r--example/Alpha_complex/CMakeLists.txt8
-rw-r--r--example/Bitmap_cubical_complex/Bitmap_cubical_complex.cpp24
-rw-r--r--example/Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp22
-rw-r--r--example/Bitmap_cubical_complex/CMakeLists.txt7
-rw-r--r--example/Bottleneck_distance/CMakeLists.txt28
-rw-r--r--example/Bottleneck_distance/bottleneck_read_file_example.cpp48
-rw-r--r--example/Contraction/CMakeLists.txt7
-rw-r--r--example/Contraction/Garland_heckbert.cpp6
-rw-r--r--example/Contraction/Rips_contraction.cpp7
-rw-r--r--example/Persistent_cohomology/CMakeLists.txt50
-rw-r--r--example/Persistent_cohomology/README2
-rw-r--r--example/Persistent_cohomology/alpha_complex_3d_persistence.cpp7
-rw-r--r--example/Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp7
-rw-r--r--example/Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp14
-rw-r--r--example/Persistent_cohomology/persistence_from_file.cpp3
-rw-r--r--example/Persistent_cohomology/persistence_from_simple_simplex_tree.cpp3
-rw-r--r--example/Persistent_cohomology/rips_distance_matrix_persistence.cpp2
-rw-r--r--example/Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp14
-rw-r--r--example/Rips_complex/CMakeLists.txt9
-rw-r--r--example/Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp2
-rw-r--r--example/Simplex_tree/CMakeLists.txt8
-rw-r--r--example/Simplex_tree/simple_simplex_tree.cpp3
-rw-r--r--example/Skeleton_blocker/CMakeLists.txt6
-rw-r--r--example/Skeleton_blocker/Skeleton_blocker_iteration.cpp7
-rw-r--r--example/Spatial_searching/CMakeLists.txt1
-rw-r--r--example/Spatial_searching/example_spatial_searching.cpp24
-rw-r--r--example/Subsampling/CMakeLists.txt6
-rw-r--r--example/Tangential_complex/CMakeLists.txt7
-rw-r--r--example/Witness_complex/CMakeLists.txt17
-rw-r--r--example/common/CMakeLists.txt14
-rw-r--r--include/gudhi/Bitmap_cubical_complex.h27
-rw-r--r--include/gudhi/Bottleneck.h4
-rw-r--r--include/gudhi/Clock.h20
-rw-r--r--include/gudhi/Euclidean_strong_witness_complex.h2
-rw-r--r--include/gudhi/Euclidean_witness_complex.h2
-rw-r--r--include/gudhi/Hasse_complex.h11
-rw-r--r--include/gudhi/Kd_tree_search.h81
-rw-r--r--include/gudhi/Neighbors_finder.h8
-rw-r--r--include/gudhi/Persistence_graph.h8
-rw-r--r--include/gudhi/Persistent_cohomology.h13
-rw-r--r--include/gudhi/Simplex_tree.h26
-rw-r--r--include/gudhi/Strong_witness_complex.h4
-rw-r--r--include/gudhi/Tangential_complex.h21
-rw-r--r--include/gudhi/Witness_complex.h4
-rw-r--r--include/gudhi/pick_n_random_points.h4
-rw-r--r--include/gudhi/random_point_generators.h32
-rw-r--r--include/gudhi/reader_utils.h145
-rw-r--r--include/gudhi/sparsify_point_set.h2
-rw-r--r--utilities/common/CMakeLists.txt17
-rw-r--r--utilities/common/README19
-rw-r--r--utilities/common/off_file_from_shape_generator.cpp (renamed from data/points/generator/hypergenerator.cpp)92
123 files changed, 1765 insertions, 1098 deletions
diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt
index bd0c57ac..bfef1590 100644
--- a/CMakeGUDHIVersion.txt
+++ b/CMakeGUDHIVersion.txt
@@ -1,6 +1,6 @@
set (GUDHI_MAJOR_VERSION 2)
set (GUDHI_MINOR_VERSION 0)
-set (GUDHI_PATCH_VERSION 0)
+set (GUDHI_PATCH_VERSION 1)
set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION})
message(STATUS "GUDHI version : ${GUDHI_VERSION}")
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ebcb6888..795005b1 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -7,7 +7,27 @@ enable_testing()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
-# For "make doxygen"
+# Add your new module in the list, order is not important
+include(GUDHI_modules)
+
+add_gudhi_module(common)
+add_gudhi_module(Alpha_complex)
+add_gudhi_module(Bitmap_cubical_complex)
+add_gudhi_module(Bottleneck_distance)
+add_gudhi_module(Contraction)
+add_gudhi_module(Hasse_complex)
+add_gudhi_module(Persistent_cohomology)
+add_gudhi_module(Rips_complex)
+add_gudhi_module(Simplex_tree)
+add_gudhi_module(Skeleton_blocker)
+add_gudhi_module(Spatial_searching)
+add_gudhi_module(Subsampling)
+add_gudhi_module(Tangential_complex)
+add_gudhi_module(Witness_complex)
+
+message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"")
+
+# For "make doxygen" - Requires GUDHI_USER_VERSION_DIR to be set
set(GUDHI_USER_VERSION_DIR ${CMAKE_SOURCE_DIR})
include(GUDHI_doxygen_target)
@@ -39,27 +59,22 @@ endif()
# Gudhi compilation part
include_directories(include)
-add_subdirectory(example/common)
-add_subdirectory(example/Simplex_tree)
-add_subdirectory(example/Persistent_cohomology)
-add_subdirectory(example/Skeleton_blocker)
-add_subdirectory(example/Contraction)
-add_subdirectory(example/Bitmap_cubical_complex)
-add_subdirectory(example/Witness_complex)
-add_subdirectory(example/Alpha_complex)
-add_subdirectory(example/Rips_complex)
-add_subdirectory(example/Spatial_searching)
-add_subdirectory(example/Subsampling)
-add_subdirectory(example/Tangential_complex)
-add_subdirectory(example/Bottleneck_distance)
-
-# data points generator
-add_subdirectory(data/points/generator)
+# Include module CMake subdirectories
+# GUDHI_SUB_DIRECTORIES is managed in CMAKE_MODULE_PATH/GUDHI_modules.cmake
+foreach(GUDHI_MODULE ${GUDHI_MODULES})
+ foreach(GUDHI_SUB_DIRECTORY ${GUDHI_SUB_DIRECTORIES})
+ if(EXISTS ${CMAKE_SOURCE_DIR}/${GUDHI_SUB_DIRECTORY}/${GUDHI_MODULE}/CMakeLists.txt)
+ add_subdirectory(${CMAKE_SOURCE_DIR}/${GUDHI_SUB_DIRECTORY}/${GUDHI_MODULE}/)
+ endif()
+ endforeach()
+endforeach()
add_subdirectory(GudhUI)
-# specific for cython module
-add_subdirectory(${GUDHI_CYTHON_PATH})
+if (WITH_GUDHI_PYTHON)
+ # specific for cython module
+ add_subdirectory(${GUDHI_CYTHON_PATH})
+endif()
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
diff --git a/Doxyfile b/Doxyfile
index d2d0a447..0ef81e5c 100644
--- a/Doxyfile
+++ b/Doxyfile
@@ -38,7 +38,7 @@ PROJECT_NAME = "GUDHI"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "2.0.0"
+PROJECT_NUMBER = "2.0.1"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -1427,7 +1427,7 @@ FORMULA_TRANSPARENT = YES
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
-USE_MATHJAX = NO
+USE_MATHJAX = YES
# When MathJax is enabled you can set the default output format to be used for
# the MathJax output. See the MathJax site (see:
@@ -1450,14 +1450,14 @@ MATHJAX_FORMAT = HTML-CSS
# The default value is: http://cdn.mathjax.org/mathjax/latest.
# This tag requires that the tag USE_MATHJAX is set to YES.
-MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+MATHJAX_RELPATH = ../common
# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
# extension names that should be enabled during MathJax rendering. For example
# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
# This tag requires that the tag USE_MATHJAX is set to YES.
-MATHJAX_EXTENSIONS =
+MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
# of code that will be used on startup of the MathJax code. See the MathJax site
@@ -1612,7 +1612,7 @@ PAPER_TYPE = a4
# If left blank no extra packages will be included.
# This tag requires that the tag GENERATE_LATEX is set to YES.
-EXTRA_PACKAGES = amsfonts amsmath amssymb algorithm algpseudocode
+EXTRA_PACKAGES = amsfonts amsmath amssymb
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
# generated LaTeX document. The header should contain everything until the first
diff --git a/GudhUI/CMakeLists.txt b/GudhUI/CMakeLists.txt
index ca2e47c1..374195d0 100644
--- a/GudhUI/CMakeLists.txt
+++ b/GudhUI/CMakeLists.txt
@@ -5,28 +5,6 @@ find_package(Qt5 COMPONENTS Widgets Xml OpenGL)
find_package(QGLViewer)
find_package(OpenGL)
-if (CGAL_VERSION VERSION_EQUAL 4.8.0)
- message(ERROR " GudhUI does not compile with CGAL 4.8.0. 4.8.1, 4.8.2 and 4.9 are OK.")
-endif()
-
-if (NOT CGAL_FOUND)
- message(ERROR " GudhUI requires CGAL and will not be compiled.")
-endif()
-
-if (NOT Qt5_FOUND)
- message(ERROR " GudhUI requires Qt5 and will not be compiled.")
-endif()
-
-if (NOT OPENGL_FOUND)
- message(ERROR " GudhUI requires OpenGL and will not be compiled.")
-endif()
-
-if (NOT QGLVIEWER_FOUND)
- message(ERROR " GudhUI requires QGLViewer and will not be compiled.")
-endif()
-
-
-
if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_VERSION VERSION_EQUAL 4.8.0)
set(CMAKE_AUTOMOC ON)
@@ -37,8 +15,6 @@ if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_
SET(Boost_USE_MULTITHREAD OFF)
include_directories (${QGLVIEWER_INCLUDE_DIR})
- #####################################################################
-
add_executable ( GudhUI
gui/gudhui.cpp
gui/MainWindow.cpp
@@ -52,12 +28,10 @@ if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_
target_link_libraries( GudhUI Qt5::Widgets Qt5::Xml Qt5::OpenGL )
target_link_libraries( GudhUI ${QGLVIEWER_LIBRARIES} )
target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
-if (TBB_FOUND)
- target_link_libraries( GudhUI ${TBB_LIBRARIES})
-endif()
+ if (TBB_FOUND)
+ target_link_libraries( GudhUI ${TBB_LIBRARIES})
+ endif()
-###############################################################################
+ install(TARGETS GudhUI DESTINATION bin)
-else()
- message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt5, and will not be compiled.")
endif()
diff --git a/cmake/modules/GUDHI_modules.cmake b/cmake/modules/GUDHI_modules.cmake
new file mode 100644
index 00000000..f95d0c34
--- /dev/null
+++ b/cmake/modules/GUDHI_modules.cmake
@@ -0,0 +1,41 @@
+# A function to add a new module in GUDHI
+
+set(GUDHI_MODULES "")
+set(GUDHI_MODULES_FULL_LIST "")
+function(add_gudhi_module file_path)
+ option("WITH_MODULE_GUDHI_${file_path}" "Activate/desactivate ${file_path} compilation and installation" ON)
+ if (WITH_MODULE_GUDHI_${file_path})
+ set(GUDHI_MODULES ${GUDHI_MODULES} ${file_path} PARENT_SCOPE)
+ endif()
+ # Required by user_version
+ set(GUDHI_MODULES_FULL_LIST ${GUDHI_MODULES_FULL_LIST} ${file_path} PARENT_SCOPE)
+ # Include module headers is independant - You may ask for no Alpha complex module but Python interface i.e.
+ if(IS_DIRECTORY ${CMAKE_SOURCE_DIR}/src/${file_path}/include/)
+ include_directories(src/${file_path}/include/)
+ endif()
+
+endfunction(add_gudhi_module)
+
+option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF)
+option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF)
+option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON)
+option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON)
+option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON)
+
+if (WITH_GUDHI_BENCHMARK)
+ set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};benchmark")
+endif()
+if (WITH_GUDHI_EXAMPLE)
+ set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};example")
+endif()
+if (WITH_GUDHI_TEST)
+ set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};test")
+endif()
+if (WITH_GUDHI_UTILITIES)
+ set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};utilities")
+endif()
+
+message("++ GUDHI_SUB_DIRECTORIES list is:\"${GUDHI_SUB_DIRECTORIES}\"")
+
+
+
diff --git a/cmake/modules/GUDHI_third_party_libraries.cmake b/cmake/modules/GUDHI_third_party_libraries.cmake
index 5f84c602..f2bbafdc 100644
--- a/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -1,6 +1,6 @@
# This files manage third party libraries required by GUDHI
-find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer date_time program_options thread)
+find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework program_options thread)
if(NOT Boost_FOUND)
message(FATAL_ERROR "NOTICE: This program requires Boost and will not be compiled.")
@@ -99,6 +99,8 @@ add_definitions(-DBOOST_RESULT_OF_USE_DECLTYPE)
add_definitions(-DBOOST_ALL_NO_LIB)
# problem with Visual Studio link on Boost program_options
add_definitions( -DBOOST_ALL_DYN_LINK )
+# problem on Mac with boost_system and boost_thread
+add_definitions( -DBOOST_SYSTEM_NO_DEPRECATED )
INCLUDE_DIRECTORIES(${Boost_INCLUDE_DIRS})
LINK_DIRECTORIES(${Boost_LIBRARY_DIRS})
@@ -106,19 +108,6 @@ LINK_DIRECTORIES(${Boost_LIBRARY_DIRS})
message(STATUS "boost include dirs:" ${Boost_INCLUDE_DIRS})
message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
-macro( find_the_lib placeholder THE_LIBS )
- set (THE_LIB_WE_FOUND "NO")
- foreach(THE_LIB ${THE_LIBS})
- if(EXISTS ${THE_LIB})
- get_filename_component(THE_LIB_WE ${THE_LIB} NAME_WE)
- if (NOT THE_LIB_WE_FOUND)
- set (THE_LIB_WE_FOUND "YES")
- set(returnValue "${THE_LIB_WE}")
- endif(NOT THE_LIB_WE_FOUND)
- endif(EXISTS ${THE_LIB})
- endforeach(THE_LIB ${THE_LIBS})
-endmacro( find_the_lib )
-
# Find the correct Python interpreter.
# Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
find_package(Cython)
@@ -128,17 +117,20 @@ if(NOT GUDHI_CYTHON_PATH)
endif(NOT GUDHI_CYTHON_PATH)
if(PYTHONINTERP_FOUND AND CYTHON_FOUND)
- # Unitary tests are available through py.test
- find_program( PYTEST_PATH py.test )
# Default found version 2
if(PYTHON_VERSION_MAJOR EQUAL 2)
# Documentation generation is available through sphinx
find_program( SPHINX_PATH sphinx-build )
elseif(PYTHON_VERSION_MAJOR EQUAL 3)
- # Documentation generation is available through sphinx
- set(SPHINX_PATH "${CMAKE_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/doc/python3-sphinx-build")
+ # No sphinx-build in Pyton3, just hack it
+ set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "${CMAKE_CURRENT_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/doc/python3-sphinx-build.py")
else()
message(FATAL_ERROR "ERROR: Try to compile the Cython interface. Python version ${PYTHON_VERSION_STRING} is not valid.")
endif(PYTHON_VERSION_MAJOR EQUAL 2)
+ # get PYTHON_SITE_PACKAGES relative path from a python command line
+ execute_process(
+ COMMAND "${PYTHON_EXECUTABLE}" -c "from distutils.sysconfig import get_python_lib; print (get_python_lib(prefix='', plat_specific=True))"
+ OUTPUT_VARIABLE PYTHON_SITE_PACKAGES
+ OUTPUT_STRIP_TRAILING_WHITESPACE)
endif(PYTHONINTERP_FOUND AND CYTHON_FOUND)
diff --git a/cmake/modules/GUDHI_user_version_target.txt b/cmake/modules/GUDHI_user_version_target.cmake
index ca6bcd34..cff64ad2 100644
--- a/cmake/modules/GUDHI_user_version_target.txt
+++ b/cmake/modules/GUDHI_user_version_target.cmake
@@ -47,11 +47,10 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
- set(GUDHI_MODULES "common;Alpha_complex;Bitmap_cubical_complex;Bottleneck_distance;Contraction;Hasse_complex;Persistent_cohomology;Rips_complex;Simplex_tree;Skeleton_blocker;Spatial_searching;Subsampling;Tangential_complex;Witness_complex")
- set(GUDHI_DIRECTORIES "doc;example;concept")
+ set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
- foreach(GUDHI_MODULE ${GUDHI_MODULES})
+ foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
# Find files
file(GLOB GUDHI_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_DIRECTORY}/*)
@@ -86,6 +85,6 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
endforeach()
endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
- endforeach(GUDHI_MODULE ${GUDHI_MODULES})
+ endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
endif()
diff --git a/cython/CMakeLists.txt b/cython/CMakeLists.txt
index 96c2acb3..afca9d60 100644
--- a/cython/CMakeLists.txt
+++ b/cython/CMakeLists.txt
@@ -1,20 +1,65 @@
cmake_minimum_required(VERSION 2.8)
project(Cython)
+function( add_gudhi_cython_lib THE_LIB )
+ if(EXISTS ${THE_LIB})
+ get_filename_component(THE_LIB_FILE_NAME ${THE_LIB} NAME_WE)
+ if(WIN32)
+ message("++ ${THE_LIB} => THE_LIB_FILE_NAME = ${THE_LIB_FILE_NAME}")
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'${THE_LIB_FILE_NAME}', " PARENT_SCOPE)
+ else(WIN32)
+ STRING(REGEX REPLACE "lib" "" UNIX_LIB_FILE_NAME ${THE_LIB_FILE_NAME})
+ message("++ ${THE_LIB} => UNIX_LIB_FILE_NAME = ${UNIX_LIB_FILE_NAME}")
+ set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'${UNIX_LIB_FILE_NAME}', " PARENT_SCOPE)
+ endif(WIN32)
+ endif(EXISTS ${THE_LIB})
+endfunction( add_gudhi_cython_lib )
+
+# THE_TEST is the python test file name (without .py extension) containing tests functions
+function( add_gudhi_py_test THE_TEST )
+ # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
+ # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
+ add_test(NAME ${THE_TEST}_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+endfunction( add_gudhi_py_test )
+
+
if(CYTHON_FOUND)
- message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_EXECUTABLE} - py.test is ${PYTEST_PATH} - Sphinx is ${SPHINX_PATH}")
+ message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_EXECUTABLE} - Sphinx is ${SPHINX_PATH}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
- set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
- if(WIN32)
- set( returnValue "" )
- find_the_lib (${returnValue} ${Boost_SYSTEM_LIBRARY})
- set(BOOST_SYSTEM_LIB_NAME ${returnValue})
- else()
- set(BOOST_SYSTEM_LIB_NAME "boost_system")
- endif()
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'${BOOST_SYSTEM_LIB_NAME}', ")
+ set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_SYSTEM_NO_DEPRECATED', ")
+
+ # This is because of https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/tss.h
+ # CGAL is using boost thread if thread_local is not ready (requires XCode 8 for Mac).
+ # The test in https://github.com/CGAL/cgal/blob/master/Installation/include/CGAL/config.h
+ # #if __has_feature(cxx_thread_local) || \
+ # ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
+ # ( _MSC_VER >= 1900 )
+ # #define CGAL_CAN_USE_CXX11_THREAD_LOCAL
+ # #endif
+ set(CGAL_CAN_USE_CXX11_THREAD_LOCAL "
+ int main() {
+ #ifndef __has_feature
+ #define __has_feature(x) 0 // Compatibility with non-clang compilers.
+ #endif
+ #if __has_feature(cxx_thread_local) || \
+ ( (__GNUC__ * 100 + __GNUC_MINOR__) >= 408 && __cplusplus >= 201103L ) || \
+ ( _MSC_VER >= 1900 )
+ bool has_feature_thread_local = true;
+ #else
+ // Explicit error of compilation for CMake test purpose - has_feature_thread_local is not defined
+ #endif
+ bool result = has_feature_thread_local;
+ } ")
+ check_cxx_source_compiles("${CGAL_CAN_USE_CXX11_THREAD_LOCAL}" CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
+
+ if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
+ add_gudhi_cython_lib(${Boost_THREAD_LIBRARY})
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
+ endif()
# Gudhi and CGAL compilation option
if(MSVC)
@@ -25,9 +70,9 @@ if(CYTHON_FOUND)
if(CMAKE_COMPILER_IS_GNUCXX)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ")
endif(CMAKE_COMPILER_IS_GNUCXX)
- if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
+ if (CMAKE_CXX_COMPILER_ID MATCHES Intel)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-fp-model strict', ")
- endif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel")
+ endif(CMAKE_CXX_COMPILER_ID MATCHES Intel)
if (DEBUG_TRACES)
# For programs to be more verbose
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DDEBUG_TRACES', ")
@@ -38,101 +83,40 @@ if(CYTHON_FOUND)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
endif (EIGEN3_FOUND)
- # Copy recursively include, cython, example, doc and test repositories before packages finding
- # Some tests and doc files are removed in case some packages are not found
- file(COPY include DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY cython DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY example DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY test DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- file(COPY doc DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
- # Developper version for doc images
- file(GLOB GUDHI_DEV_DOC_IMAGES "${CMAKE_SOURCE_DIR}/src/*/doc/*.png")
- file(COPY ${GUDHI_DEV_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- file(GLOB GUDHI_DEV_DOC_IMAGES "${CMAKE_SOURCE_DIR}/src/*/doc/*.svg")
- file(COPY ${GUDHI_DEV_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- # User version for doc images
- file(GLOB GUDHI_USER_DOC_IMAGES "${CMAKE_SOURCE_DIR}/doc/*/*.png")
- file(COPY ${GUDHI_USER_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- file(GLOB GUDHI_USER_DOC_IMAGES "${CMAKE_SOURCE_DIR}/doc/*/*.svg")
- file(COPY ${GUDHI_USER_DOC_IMAGES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/img")
- # Biblio
- file(GLOB GUDHI_BIB_FILES "${CMAKE_SOURCE_DIR}/biblio/*.bib")
- file(COPY ${GUDHI_BIB_FILES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- # Cubical complex perseus doc example
- file(GLOB GUDHI_CUBICAL_PERSEUS_FILES "${CMAKE_SOURCE_DIR}/data/bitmap/*cubicalcomplexdoc.txt")
- file(COPY ${GUDHI_CUBICAL_PERSEUS_FILES} DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- file(COPY "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- # Persistence graphical tools examples
- file(COPY "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
- file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/doc/")
-
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- # If CGAL_VERSION >= 4.8.1, include subsampling
- set(GUDHI_CYTHON_SUBSAMPLING "include 'cython/subsampling.pyx'")
- set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include 'cython/tangential_complex.pyx'")
- set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include 'cython/bottleneck_distance.pyx'")
- else (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- # Remove subsampling unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_subsampling.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/subsampling_user.rst")
- # Remove tangential complex and bottleneck unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_tangential_complex.py)
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_bottleneck_distance.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/bottleneck_distance_user.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/tangential_complex_user.rst")
+ set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'")
endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- # If CGAL_VERSION >= 4.7.0, include alpha
- set(GUDHI_CYTHON_ALPHA_COMPLEX "include 'cython/alpha_complex.pyx'")
- else (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- # Remove alpha complex unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_alpha_complex.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_sum.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/alpha_complex_user.rst")
- endif (NOT CGAL_VERSION VERSION_LESS 4.7.0)
- if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
- # If CGAL_VERSION >= 4.6.0, include euclidean versions of witness complex
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'")
+ set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'")
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'")
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX
- "include 'cython/euclidean_witness_complex.pyx'\ninclude 'cython/euclidean_strong_witness_complex.pyx'\n")
- else (NOT CGAL_VERSION VERSION_LESS 4.6.0)
- # Remove alpha complex unitary tests
- file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/test/test_euclidean_witness_complex.py)
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/euclidean_witness_complex_ref.rst")
- file(REMOVE "${CMAKE_CURRENT_BINARY_DIR}/doc/euclidean_strong_witness_complex_ref.rst")
- endif (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n")
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
if(CGAL_FOUND)
# Add CGAL compilation args
if(CGAL_HEADER_ONLY)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ")
else(CGAL_HEADER_ONLY)
- if(WIN32)
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'CGAL-vc140-mt-4.7', ")
- else(WIN32)
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'CGAL', ")
- endif(WIN32)
+ add_gudhi_cython_lib(${CGAL_LIBRARIES})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ")
+ # If CGAL is not header only, CGAL library may link with boost system,
+ add_gudhi_cython_lib(${Boost_SYSTEM_LIBRARY})
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
endif(CGAL_HEADER_ONLY)
# GMP and GMPXX are not required, but if present, CGAL will link with them.
if(GMP_FOUND)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ")
- if(WIN32)
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'libgmp-10', ")
- else(WIN32)
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'gmp', ")
- endif(WIN32)
- set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ")
+ add_gudhi_cython_lib(${GMP_LIBRARIES})
+ set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ")
if(GMPXX_FOUND)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'gmpxx', ")
+ add_gudhi_cython_lib(${GMPXX_LIBRARIES})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ")
endif(GMPXX_FOUND)
endif(GMP_FOUND)
@@ -153,132 +137,178 @@ if(CYTHON_FOUND)
if (TBB_FOUND)
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
- set(GUDHI_CYTHON_LIBRARIES "${GUDHI_CYTHON_LIBRARIES}'tbb', 'tbbmalloc', ")
+ add_gudhi_cython_lib(${TBB_RELEASE_LIBRARY})
+ add_gudhi_cython_lib(${TBB_MALLOC_RELEASE_LIBRARY})
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ")
set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ")
endif()
- # set sphinx-build in make files
- configure_file(doc/Makefile.in "${CMAKE_CURRENT_BINARY_DIR}/doc/Makefile" @ONLY)
- configure_file(doc/make.bat.in "${CMAKE_CURRENT_BINARY_DIR}/doc/make.bat" @ONLY)
+ if(UNIX)
+ set( GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}")
+ endif(UNIX)
- # Generate cythonize_gudhi.py file to cythonize Gudhi
- configure_file(cythonize_gudhi.py.in "${CMAKE_CURRENT_BINARY_DIR}/cythonize_gudhi.py" @ONLY)
+ # Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention
+ configure_file(setup.py.in "${CMAKE_CURRENT_BINARY_DIR}/setup.py" @ONLY)
# Generate gudhi.pyx - Gudhi cython file
configure_file(gudhi.pyx.in "${CMAKE_CURRENT_BINARY_DIR}/gudhi.pyx" @ONLY)
add_custom_command(
- OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ OUTPUT gudhi.so
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/cythonize_gudhi.py" "build_ext" "--inplace")
+ COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/setup.py" "build_ext" "--inplace")
- add_custom_target(cython ALL DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ add_custom_target(cython ALL DEPENDS gudhi.so
COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests")
- if(UNIX)
- set( ENV{PYTHONPATH} $ENV{PYTHONPATH}:${CMAKE_CURRENT_BINARY_DIR}/ )
- endif(UNIX)
+ # For installation purpose
+ # TODO(VR) : files matching pattern mechanism is copying all cython directory
+ install(DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" DESTINATION "${PYTHON_SITE_PACKAGES}/" FILES_MATCHING
+ PATTERN "*.so"
+ PATTERN "*.dylib"
+ PATTERN "*.pyd")
# Test examples
- add_test(NAME alpha_complex_from_points_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/alpha_complex_from_points_example.py")
- set_tests_properties(alpha_complex_from_points_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ # Bottleneck and Alpha
+ add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
+ -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
- add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
- set_tests_properties(alpha_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ # Tangential
+ add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
- add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
- -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
- set_tests_properties(alpha_rips_persistence_bottleneck_distance_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ add_gudhi_py_test(test_tangential_complex)
- add_test(NAME bottleneck_basic_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/bottleneck_basic_example.py")
- set_tests_properties(bottleneck_basic_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ # Witness complex AND Subsampling
+ add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- set_tests_properties(euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- set_tests_properties(euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ # Subsampling
+ add_gudhi_py_test(test_subsampling)
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ # Bottleneck
+ add_test(NAME bottleneck_basic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
+
+ add_gudhi_py_test(test_bottleneck_distance)
+ endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ # Alpha
+ add_test(NAME alpha_complex_from_points_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
+
+ add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
+
+ add_gudhi_py_test(test_alpha_complex)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ # Euclidean witness
+ add_gudhi_py_test(test_euclidean_witness_complex)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+
+ # Cubical
add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
--no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt)
- set_tests_properties(periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME random_cubical_complex_persistence_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/random_cubical_complex_persistence_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
10 10 10)
- set_tests_properties(random_cubical_complex_persistence_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ add_gudhi_py_test(test_cubical_complex)
+
+ # Rips
add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
- set_tests_properties(rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
- set_tests_properties(rips_complex_diagram_persistence_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
add_test(NAME rips_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/rips_complex_from_points_example.py)
- set_tests_properties(rips_complex_from_points_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_from_points_example.py)
+
+ add_gudhi_py_test(test_rips_complex)
+ # Simplex tree
add_test(NAME simplex_tree_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/simplex_tree_example.py)
- set_tests_properties(simplex_tree_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/simplex_tree_example.py)
- add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
- set_tests_properties(tangential_complex_plain_homology_from_off_file_example_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
+ add_gudhi_py_test(test_simplex_tree)
+ # Witness
add_test(NAME witness_complex_from_nearest_landmark_table_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/example/witness_complex_from_nearest_landmark_table.py)
- set_tests_properties(witness_complex_from_nearest_landmark_table_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
-
- # Unitary tests are available through py.test
- if(PYTEST_PATH)
- add_test(
- NAME gudhi_cython_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} "${PYTEST_PATH}")
- set_tests_properties(gudhi_cython_py_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}")
- endif(PYTEST_PATH)
-
- # Documentation generation is available through sphinx
- if(SPHINX_PATH)
- if (UNIX)
- add_custom_target(sphinx
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc
- DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
- COMMAND make html doctest)
- else (UNIX)
- add_custom_target(sphinx
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/doc
- COMMAND make.bat html doctest)
- endif (UNIX)
- endif(SPHINX_PATH)
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/witness_complex_from_nearest_landmark_table.py)
+
+ add_gudhi_py_test(test_witness_complex)
+
+ # Reader utils
+ add_gudhi_py_test(test_reader_utils)
+
+ # Documentation generation is available through sphinx - requires all modules
+ if(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
+ # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
+ # Images and biblio warnings because not found on developper version
+ if (GUDHI_CYTHON_PATH STREQUAL "src/cython")
+ set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
+ endif()
+ # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
+ DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
+
+ add_test(NAME sphinx_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
+
+ endif(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
endif(CYTHON_FOUND)
diff --git a/cython/cython/persistence_graphical_tools.py b/cython/cython/persistence_graphical_tools.py
index a984633e..fb837e29 100755
--- a/cython/cython/persistence_graphical_tools.py
+++ b/cython/cython/persistence_graphical_tools.py
@@ -1,11 +1,12 @@
import matplotlib.pyplot as plt
import numpy as np
+import os
"""This file is part of the Gudhi Library. The Gudhi library
(Geometric Understanding in Higher Dimensions) is a generic C++
library for computational topology.
- Author(s): Vincent Rouvreau
+ Author(s): Vincent Rouvreau, Bertrand Michel
Copyright (C) 2016 INRIA
@@ -23,15 +24,17 @@ import numpy as np
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
-__author__ = "Vincent Rouvreau"
+__author__ = "Vincent Rouvreau, Bertrand Michel"
__copyright__ = "Copyright (C) 2016 INRIA"
__license__ = "GPL v3"
-def __min_birth_max_death(persistence):
+def __min_birth_max_death(persistence, band_boot=0.):
"""This function returns (min_birth, max_death) from the persistence.
:param persistence: The persistence to plot.
:type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param band_boot: bootstrap band
+ :type band_boot: float.
:returns: (float, float) -- (min_birth, max_death).
"""
# Look for minimum birth date and maximum death date for plot optimisation
@@ -45,6 +48,8 @@ def __min_birth_max_death(persistence):
max_death = float(interval[1][0])
if float(interval[1][0]) < min_birth:
min_birth = float(interval[1][0])
+ if band_boot > 0.:
+ max_death += band_boot
return (min_birth, max_death)
"""
@@ -59,7 +64,7 @@ def show_palette_values(alpha=0.6):
:param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
:type alpha: float.
- :returns: plot -- An horizontal bar plot of dimensions color.
+ :returns: plot the dimension palette values.
"""
colors = []
for color in palette:
@@ -70,18 +75,38 @@ def show_palette_values(alpha=0.6):
plt.barh(y_pos, y_pos + 1, align='center', alpha=alpha, color=colors)
plt.ylabel('Dimension')
plt.title('Dimension palette values')
+ return plt
- plt.show()
-
-def plot_persistence_barcode(persistence, alpha=0.6):
+def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6, max_barcodes=0):
"""This function plots the persistence bar code.
:param persistence: The persistence to plot.
:type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A persistence file style name (reset persistence if both are set).
+ :type persistence_file: string
:param alpha: alpha value in [0.0, 1.0] for horizontal bars (default is 0.6).
:type alpha: float.
+ :param max_barcodes: number of maximal barcodes to be displayed
+ (persistence will be sorted by life time if max_barcodes is set)
+ :type max_barcodes: int.
:returns: plot -- An horizontal bar plot of persistence.
"""
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_barcodes > 0 and max_barcodes < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_barcodes]
+
(min_birth, max_death) = __min_birth_max_death(persistence)
ind = 0
delta = ((max_death - min_birth) / 10.0)
@@ -106,18 +131,40 @@ def plot_persistence_barcode(persistence, alpha=0.6):
plt.title('Persistence barcode')
# Ends plot on infinity value and starts a little bit before min_birth
plt.axis([axis_start, infinity, 0, ind])
- plt.show()
+ return plt
-def plot_persistence_diagram(persistence, alpha=0.6):
- """This function plots the persistence diagram.
+def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6, band_boot=0., max_plots=0):
+ """This function plots the persistence diagram with an optional confidence band.
:param persistence: The persistence to plot.
:type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A persistence file style name (reset persistence if both are set).
+ :type persistence_file: string
:param alpha: alpha value in [0.0, 1.0] for points and horizontal infinity line (default is 0.6).
:type alpha: float.
- :returns: plot -- An diagram plot of persistence.
+ :param band_boot: bootstrap band (not displayed if :math:`\leq` 0.)
+ :type band_boot: float.
+ :param max_plots: number of maximal plots to be displayed
+ :type max_plots: int.
+ :returns: plot -- A diagram plot of persistence.
"""
- (min_birth, max_death) = __min_birth_max_death(persistence)
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_plots > 0 and max_plots < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_plots]
+
+ (min_birth, max_death) = __min_birth_max_death(persistence, band_boot)
ind = 0
delta = ((max_death - min_birth) / 10.0)
# Replace infinity values with max_death + delta for diagram to be more
@@ -131,6 +178,9 @@ def plot_persistence_diagram(persistence, alpha=0.6):
plt.plot(x, x, color='k', linewidth=1.0)
plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
+ # bootstrap band
+ if band_boot > 0.:
+ plt.fill_between(x, x, x+band_boot, alpha=alpha, facecolor='red')
# Draw points in loop
for interval in reversed(persistence):
@@ -149,4 +199,4 @@ def plot_persistence_diagram(persistence, alpha=0.6):
plt.ylabel('Death')
# Ends plot on infinity value and starts a little bit before min_birth
plt.axis([axis_start, infinity, axis_start, infinity + delta])
- plt.show()
+ return plt
diff --git a/cython/cython/reader_utils.pyx b/cython/cython/reader_utils.pyx
new file mode 100644
index 00000000..3a17c5a0
--- /dev/null
+++ b/cython/cython/reader_utils.pyx
@@ -0,0 +1,95 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp.map cimport map
+from libcpp.pair cimport pair
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+cdef extern from "Reader_utils_interface.h" namespace "Gudhi":
+ vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator)
+ map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename)
+ vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim)
+
+def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'):
+ """Read lower triangular matrix from a CSV style file.
+
+ :param csv_file: A CSV file style name.
+ :type csv_file: string
+ :param separator: The value separator in the CSV file. Default value is ';'
+ :type separator: char
+
+ :returns: The lower triangular matrix.
+ :rtype: vector[vector[double]]
+ """
+ if csv_file is not '':
+ if os.path.isfile(csv_file):
+ return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0]))
+ print("file " + csv_file + " not set or not found.")
+ return []
+
+def read_persistence_intervals_grouped_by_dimension(persistence_file=''):
+ """Reads a file containing persistence intervals.
+ Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+ The return value is an `map[dim, vector[pair[birth, death]]]`
+ where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+ Note: the function does not check that birth <= death.
+
+ :param persistence_file: A persistence file style name.
+ :type persistence_file: string
+
+ :returns: The persistence pairs grouped by dimension.
+ :rtype: map[int, vector[pair[double, double]]]
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file))
+ print("file " + persistence_file + " not set or not found.")
+ return []
+
+def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1):
+ """Reads a file containing persistence intervals.
+ Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+ If `only_this_dim` = -1, dimension is ignored and all lines are returned.
+ If `only_this_dim` is >= 0, only the lines where dimension = `only_this_dim`
+ (or where dimension is not specified) are returned.
+ The return value is an `vector[pair[birth, death]]`
+ where `birth` a `double`, and `death` a `double`.
+ Note: the function does not check that birth <= death.
+
+ :param persistence_file: A persistence file style name.
+ :type persistence_file: string
+
+ :returns: The persistence pairs grouped by dimension.
+ :rtype: map[int, vector[pair[double, double]]]
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ return read_pers_intervals_in_dimension(str.encode(persistence_file), only_this_dim)
+ print("file " + persistence_file + " not set or not found.")
+ return []
diff --git a/cython/cython/simplex_tree.pyx b/cython/cython/simplex_tree.pyx
index 9d40a8b5..45487158 100644
--- a/cython/cython/simplex_tree.pyx
+++ b/cython/cython/simplex_tree.pyx
@@ -2,6 +2,7 @@ from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
from libcpp cimport bool
+from libcpp.string cimport string
"""This file is part of the Gudhi Library. The Gudhi library
(Geometric Understanding in Higher Dimensions) is a generic C++
@@ -35,9 +36,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface<Gudhi::Simplex_tree_options_full_featured>":
Simplex_tree()
- double filtration()
double simplex_filtration(vector[int] simplex)
- void set_filtration(double filtration)
void initialize_filtration()
int num_vertices()
int num_simplices()
@@ -61,6 +60,7 @@ cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
vector[int] betti_numbers()
vector[int] persistent_betti_numbers(double from_value, double to_value)
vector[pair[double,double]] intervals_in_dimension(int dimension)
+ void write_output_diagram(string diagram_file_name)
# SimplexTree python interface
cdef class SimplexTree:
@@ -113,14 +113,6 @@ cdef class SimplexTree:
"""
return self.thisptr.simplex_filtration(simplex)
- def set_filtration(self, filtration):
- """This function sets the main simplicial complex filtration value.
-
- :param filtration: The filtration value.
- :type filtration: float.
- """
- self.thisptr.set_filtration(<double> filtration)
-
def initialize_filtration(self):
"""This function initializes and sorts the simplicial complex
filtration vector.
@@ -183,10 +175,10 @@ cdef class SimplexTree:
:returns: true if the simplex was found, false otherwise.
:rtype: bool
"""
- cdef vector[int] complex
+ cdef vector[int] csimplex
for i in simplex:
- complex.push_back(i)
- return self.thisptr.find_simplex(complex)
+ csimplex.push_back(i)
+ return self.thisptr.find_simplex(csimplex)
def insert(self, simplex, filtration=0.0):
"""This function inserts the given N-simplex and its subfaces with the
@@ -200,10 +192,10 @@ cdef class SimplexTree:
:returns: true if the simplex was found, false otherwise.
:rtype: bool
"""
- cdef vector[int] complex
+ cdef vector[int] csimplex
for i in simplex:
- complex.push_back(i)
- return self.thisptr.insert_simplex_and_subfaces(complex,
+ csimplex.push_back(i)
+ return self.thisptr.insert_simplex_and_subfaces(csimplex,
<double>filtration)
def get_filtration(self):
@@ -232,35 +224,35 @@ cdef class SimplexTree:
:returns: The (simplices of the) skeleton of a maximum dimension.
:rtype: list of tuples(simplex, filtration)
"""
- cdef vector[pair[vector[int], double]] skeletons \
+ cdef vector[pair[vector[int], double]] skeleton \
= self.thisptr.get_skeleton(<int>dimension)
ct = []
- for filtered_complex in skeletons:
+ for filtered_simplex in skeleton:
v = []
- for vertex in filtered_complex.first:
+ for vertex in filtered_simplex.first:
v.append(vertex)
- ct.append((v, filtered_complex.second))
+ ct.append((v, filtered_simplex.second))
return ct
def get_star(self, simplex):
- """This function returns the stars of a given N-simplex.
+ """This function returns the star of a given N-simplex.
:param simplex: The N-simplex, represented by a list of vertex.
:type simplex: list of int.
:returns: The (simplices of the) star of a simplex.
:rtype: list of tuples(simplex, filtration)
"""
- cdef vector[int] complex
+ cdef vector[int] csimplex
for i in simplex:
- complex.push_back(i)
- cdef vector[pair[vector[int], double]] stars \
- = self.thisptr.get_star(complex)
+ csimplex.push_back(i)
+ cdef vector[pair[vector[int], double]] star \
+ = self.thisptr.get_star(csimplex)
ct = []
- for filtered_complex in stars:
+ for filtered_simplex in star:
v = []
- for vertex in filtered_complex.first:
+ for vertex in filtered_simplex.first:
v.append(vertex)
- ct.append((v, filtered_complex.second))
+ ct.append((v, filtered_simplex.second))
return ct
def get_cofaces(self, simplex, codimension):
@@ -275,17 +267,17 @@ cdef class SimplexTree:
:returns: The (simplices of the) cofaces of a simplex
:rtype: list of tuples(simplex, filtration)
"""
- cdef vector[int] complex
+ cdef vector[int] csimplex
for i in simplex:
- complex.push_back(i)
+ csimplex.push_back(i)
cdef vector[pair[vector[int], double]] cofaces \
- = self.thisptr.get_cofaces(complex, <int>codimension)
+ = self.thisptr.get_cofaces(csimplex, <int>codimension)
ct = []
- for filtered_complex in cofaces:
+ for filtered_simplex in cofaces:
v = []
- for vertex in filtered_complex.first:
+ for vertex in filtered_simplex.first:
v.append(vertex)
- ct.append((v, filtered_complex.second))
+ ct.append((v, filtered_simplex.second))
return ct
def remove_maximal_simplex(self, simplex):
@@ -385,7 +377,7 @@ cdef class SimplexTree:
complex in a specific dimension.
:param dimension: The specific dimension.
- :type from_value: int.
+ :type dimension: int.
:returns: The persistence intervals.
:rtype: list of pair of float
@@ -399,3 +391,22 @@ cdef class SimplexTree:
print("intervals_in_dim function requires persistence function"
" to be launched first.")
return intervals_result
+
+ def write_persistence_diagram(self, persistence_file=''):
+ """This function writes the persistence intervals of the simplicial
+ complex in a user given file name.
+
+ :param persistence_file: The specific dimension.
+ :type persistence_file: string.
+
+ :note: intervals_in_dim function requires persistence function to be
+ launched first.
+ """
+ if self.pcohptr != NULL:
+ if persistence_file != '':
+ self.pcohptr.write_output_diagram(str.encode(persistence_file))
+ else:
+ print("persistence_file must be specified")
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
diff --git a/cython/doc/Makefile.in b/cython/doc/Makefile.in
deleted file mode 100644
index 526350b3..00000000
--- a/cython/doc/Makefile.in
+++ /dev/null
@@ -1,44 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = @SPHINX_PATH@
-PAPER =
-BUILDDIR = _build
-
-# User-friendly check for sphinx-build
-ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
-$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
-endif
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
-
-clean:
- rm -f examples.inc
- rm -rf $(BUILDDIR)/*
-
-# GUDHI specific : Examples.inc is generated with generate_examples.py (and deleted on clean)
-
-html:
- ./generate_examples.py
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-doctest:
- $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
- @echo "Testing of doctests in the sources finished, look at the " \
- "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/cython/doc/_templates/layout.html b/cython/doc/_templates/layout.html
index b11c1236..243f33c6 100644
--- a/cython/doc/_templates/layout.html
+++ b/cython/doc/_templates/layout.html
@@ -65,6 +65,7 @@
{#- old style sidebars: using blocks -- should be deprecated #}
{%- block sidebartoc %}
<h2><a href="index.html">GUDHI</a></h2>
+<h2><a href="fileformats.html">File formats</a></h2>
<h2><a href="installation.html">GUDHI installation</a></h2>
<h2><a href="citation.html">Acknowledging the GUDHI library</a></h2>
<h2><a href="genindex.html">Index</a></h2>
diff --git a/cython/doc/alpha_complex_sum.rst b/cython/doc/alpha_complex_sum.rst
index 8437e901..1680a712 100644
--- a/cython/doc/alpha_complex_sum.rst
+++ b/cython/doc/alpha_complex_sum.rst
@@ -5,7 +5,7 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | Alpha_complex is a simplicial complex constructed from the finite |
-| img/alpha_complex_representation.png | cells of a Delaunay Triangulation. |
+| ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. |
| :alt: Alpha complex representation | |
| :figclass: align-center | The filtration value of each simplex is computed as the square of the |
| | circumradius of the simplex if the circumsphere is empty (the simplex |
@@ -16,7 +16,7 @@
| | complex. |
| | |
| | This package requires having CGAL version 4.7 or higher (4.8.1 is |
-| | advised for better perfomances). |
+| | advised for better performance). |
+----------------------------------------------------------------+------------------------------------------------------------------------+
| :doc:`alpha_complex_user` | :doc:`alpha_complex_ref` |
+----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/cython/doc/alpha_complex_user.rst b/cython/doc/alpha_complex_user.rst
index e8268ef1..db7edd6f 100644
--- a/cython/doc/alpha_complex_user.rst
+++ b/cython/doc/alpha_complex_user.rst
@@ -75,7 +75,7 @@ In order to build the alpha complex, first, a Simplex tree is built from the cel
(The filtration value is set to NaN, which stands for unknown value):
.. figure::
- img/alpha_complex_doc.png
+ ../../doc/Alpha_complex/alpha_complex_doc.png
:figclass: align-center
:alt: Simplex tree structure construction example
@@ -112,7 +112,7 @@ computes the filtration value of the triangle, and then propagates the filtratio
here:
.. figure::
- img/alpha_complex_doc_420.png
+ ../../doc/Alpha_complex/alpha_complex_doc_420.png
:figclass: align-center
:alt: Filtration value propagation example
@@ -142,7 +142,7 @@ Prune above given filtration value
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()`
-int he `C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
+in the `C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
In the following example, the value is given by the user as argument of the program.
@@ -158,7 +158,8 @@ Then, it is asked to display information about the alpha complex:
.. testcode::
import gudhi
- alpha_complex = gudhi.AlphaComplex(off_file='alphacomplexdoc.off')
+ alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off')
simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0)
result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
@@ -200,6 +201,6 @@ the program output is:
CGAL citations
==============
-.. bibliography:: how_to_cite_cgal.bib
+.. bibliography:: ../../biblio/how_to_cite_cgal.bib
:filter: docnames
:style: unsrt
diff --git a/cython/doc/bottleneck_distance_sum.rst b/cython/doc/bottleneck_distance_sum.rst
index 5c475d0d..030fad9e 100644
--- a/cython/doc/bottleneck_distance_sum.rst
+++ b/cython/doc/bottleneck_distance_sum.rst
@@ -5,7 +5,7 @@
+-----------------------------------------------------------------+----------------------------------------------------------------------+
| .. figure:: | Bottleneck distance measures the similarity between two persistence |
-| img/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a |
+| ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a |
| :figclass: align-center | perfect matching between the points of the two diagrams (+ all the |
| | diagonal points) such that any couple of matched points are at |
| Bottleneck distance is the length of | distance at most b. |
diff --git a/cython/doc/bottleneck_distance_user.rst b/cython/doc/bottleneck_distance_user.rst
index 0066992f..7692dce2 100644
--- a/cython/doc/bottleneck_distance_user.rst
+++ b/cython/doc/bottleneck_distance_user.rst
@@ -25,7 +25,7 @@ This example computes the bottleneck distance from 2 persistence diagrams:
message = "Bottleneck distance approximation=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1)
print(message)
- message = "Bottleneck distance exact value=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0)
+ message = "Bottleneck distance value=" + '%.2f' % gudhi.bottleneck_distance(diag1, diag2)
print(message)
The output is:
@@ -33,4 +33,4 @@ The output is:
.. testoutput::
Bottleneck distance approximation=0.81
- Bottleneck distance exact value=0.75
+ Bottleneck distance value=0.75
diff --git a/cython/doc/citation.rst b/cython/doc/citation.rst
index 6cdfb7cc..f4fdf83b 100644
--- a/cython/doc/citation.rst
+++ b/cython/doc/citation.rst
@@ -12,4 +12,4 @@ Manual, as well as for publications directly related to the GUDHI library.
GUDHI bibtex
************
-.. literalinclude:: how_to_cite_gudhi.bib
+.. literalinclude:: ../../biblio/how_to_cite_gudhi.bib
diff --git a/cython/doc/conf.py b/cython/doc/conf.py
index 42bfd59c..19a880d4 100755
--- a/cython/doc/conf.py
+++ b/cython/doc/conf.py
@@ -21,7 +21,7 @@ import os
#sys.path.insert(0, os.path.abspath('.'))
# Path to Gudhi.so from source path
-sys.path.insert(0, os.path.abspath('..'))
+sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
@@ -58,18 +58,20 @@ source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
+import gudhi
+
# General information about the project.
-project = u'GUDHI'
-copyright = u'2016, GUDHI Editorial Board'
+project = gudhi.__name__
+copyright = gudhi.__copyright__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-version = '2.0'
+version = gudhi.__version__
# The full version, including alpha/beta/rc tags.
-release = '2.0.0'
+#release = '2.0.1-rc1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -198,81 +200,3 @@ html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'GUDHIdoc'
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- ('index', 'GUDHI.tex', u'GUDHI Documentation',
- u'Vincent Rouvreau', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'gudhi', u'GUDHI Documentation',
- [u'Vincent Rouvreau'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- ('index', 'GUDHI', u'GUDHI Documentation',
- u'Vincent Rouvreau', 'GUDHI', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
diff --git a/cython/doc/cubical_complex_sum.rst b/cython/doc/cubical_complex_sum.rst
index 3ddf6375..280ad0e0 100644
--- a/cython/doc/cubical_complex_sum.rst
+++ b/cython/doc/cubical_complex_sum.rst
@@ -2,14 +2,14 @@
:Author: Pawel Dlotko :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
================================================================= =================================== ===================================
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| .. figure:: | The cubical complex is an example of a structured complex useful in |
-| img/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image |
-| :alt: Cubical complex representation | analysis. |
-| :figclass: align-center | |
-| | |
-| Cubical complex representation | |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
-| | * :doc:`periodic_cubical_complex_ref` |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
++--------------------------------------------------------------------------+----------------------------------------------------------------------+
+| .. figure:: | The cubical complex is an example of a structured complex useful in |
+| ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image |
+| :alt: Cubical complex representation | analysis. |
+| :figclass: align-center | |
+| | |
+| Cubical complex representation | |
++--------------------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++--------------------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/cython/doc/cubical_complex_user.rst b/cython/doc/cubical_complex_user.rst
index 344b9554..2bfac62a 100644
--- a/cython/doc/cubical_complex_user.rst
+++ b/cython/doc/cubical_complex_user.rst
@@ -59,7 +59,7 @@ directions, allows to determine, dimension, neighborhood, boundary and coboundar
:math:`C \in \mathcal{K}`.
.. figure::
- img/Cubical_complex_representation.png
+ ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png
:alt: Cubical complex.
:figclass: align-center
@@ -87,7 +87,7 @@ in the example below). Next, in lexicographical order, the filtration of top dim
20 4 7 6 5 in the example below).
.. figure::
- img/exampleBitmap.png
+ ../../doc/Bitmap_cubical_complex/exampleBitmap.png
:alt: Example of a input data.
:figclass: align-center
@@ -95,14 +95,15 @@ in the example below). Next, in lexicographical order, the filtration of top dim
The input file for the following complex is:
-.. literalinclude:: cubicalcomplexdoc.txt
+.. literalinclude:: ../../data/bitmap/cubicalcomplexdoc.txt
-.. centered:: cubicalcomplexdoc.txt
+.. centered:: ../../data/bitmap/cubicalcomplexdoc.txt
.. testcode::
import gudhi
- cubical_complex = gudhi.CubicalComplex(perseus_file='cubicalcomplexdoc.txt')
+ cubical_complex = gudhi.CubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/cubicalcomplexdoc.txt')
result_str = 'Cubical complex is of dimension ' + repr(cubical_complex.dimension()) + ' - ' + \
repr(cubical_complex.num_simplices()) + ' simplices.'
print(result_str)
@@ -127,16 +128,17 @@ complex with periodic boundary conditions. One can also use Perseus style input
conditions in a given direction, then number of top dimensional cells in this direction have to be multiplied by -1.
For instance:
-.. literalinclude:: periodiccubicalcomplexdoc.txt
+.. literalinclude:: ../../data/bitmap/periodiccubicalcomplexdoc.txt
-.. centered:: periodiccubicalcomplexdoc.txt
+.. centered:: ../../data/bitmap/periodiccubicalcomplexdoc.txt
Indicate that we have imposed periodic boundary conditions in the direction x, but not in the direction y.
.. testcode::
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='periodiccubicalcomplexdoc.txt')
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/periodiccubicalcomplexdoc.txt')
result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \
repr(periodic_cc.num_simplices()) + ' simplices.'
print(result_str)
@@ -155,6 +157,6 @@ End user programs are available in cython/example/ folder.
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/cython/doc/examples.rst b/cython/doc/examples.rst
index a89e0596..1e596e18 100644
--- a/cython/doc/examples.rst
+++ b/cython/doc/examples.rst
@@ -1,4 +1,21 @@
Examples
########
-.. include:: examples.inc
+.. only:: builder_html
+
+ * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+ * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>`
+ * :download:`alpha_rips_persistence_bottleneck_distance.py <../example/alpha_rips_persistence_bottleneck_distance.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
diff --git a/cython/doc/fileformats.rst b/cython/doc/fileformats.rst
new file mode 100644
index 00000000..156ef4e4
--- /dev/null
+++ b/cython/doc/fileformats.rst
@@ -0,0 +1,33 @@
+File formats
+############
+
+Persistence Diagram
+*******************
+
+Such a file, whose extension is usually ``.pers``, contains a list of
+persistence intervals.
+
+Lines starting with ``#`` are ignored (comments).
+
+Other lines might contain 2, 3 or 4 values (the number of values on each line
+must be the same for all lines)::
+
+ [[field] dimension] birth death
+
+Here is a simple sample file::
+
+ # Persistence diagram example
+ 2 2.7 3.7
+ 2 9.6 14.
+ # Some comments
+ 3 34.2 34.974
+ 4 3. inf
+
+Other sample files can be found in the data/persistence_diagram folder.
+
+Such files can be generated with
+:meth:`gudhi.SimplexTree.write_persistence_diagram`, read with
+:meth:`gudhi.read_persistence_intervals_grouped_by_dimension`, or
+:meth:`gudhi.read_persistence_intervals_in_dimension` and displayed with
+:meth:`gudhi.plot_persistence_barcode` or
+:meth:`gudhi.plot_persistence_diagram`.
diff --git a/cython/doc/generate_examples.py b/cython/doc/generate_examples.py
deleted file mode 100755
index d64d506c..00000000
--- a/cython/doc/generate_examples.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python
-
-from os import listdir
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Vincent Rouvreau
-
- Copyright (C) 2017 INRIA
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Vincent Rouvreau"
-__copyright__ = "Copyright (C) 2017 INRIA"
-__license__ = "GPL v3"
-
-"""
-generate_examples.py generates examples.inc to be included in examples.rst.
-Refer to Makefile and make.bat to see if it is correctly launched.
-"""
-
-output_file = open('examples.inc','w')
-
-output_file.write('.. only:: builder_html\n\n')
-
-for file in listdir('../example/'):
- output_file.write(" * :download:`" + file + " <../example/" + file + ">`\n")
-
-output_file.close()
diff --git a/cython/doc/index.rst b/cython/doc/index.rst
index 94dbc65a..3945d72a 100644
--- a/cython/doc/index.rst
+++ b/cython/doc/index.rst
@@ -1,15 +1,17 @@
-GUDHI documentation
-###################
+GUDHI Python module documentation
+#################################
-.. image:: img/Gudhi_banner.png
- :align: center
+.. figure::
+ ../../doc/common/Gudhi_banner.png
+ :alt: Gudhi banner
+ :figclass: align-center
Introduction
************
-The Gudhi library (Geometry Understanding in Higher Dimensions) is a generic
-open source `C++ library <http://gudhi.gforge.inria.fr/doc/latest/>`_, with a
-`Python interface <http://gudhi.gforge.inria.fr/python/latest/>`_, for
+The Python interface for the Gudhi library (Geometry Understanding in Higher
+Dimensions) is a generic open source
+`Python module <http://gudhi.gforge.inria.fr/python/latest/>`_, for
Computational Topology and Topological Data Analysis
(`TDA <https://en.wikipedia.org/wiki/Topological_data_analysis>`_).
The GUDHI library intends to help the development of new algorithmic solutions
@@ -20,12 +22,11 @@ data structures.
The current release of the GUDHI library includes:
* Data structures to represent, construct and manipulate simplicial complexes.
-* Algorithms to compute persistent homology and multi-field persistent homology.
-* Simplication of simplicial complexes by edge contraction.
+* Simplification of simplicial complexes by edge contraction.
+* Algorithms to compute persistent homology and bottleneck distance.
-All data-structures are generic and several of their aspects can be
-parameterized via template classes. We refer to :cite:`gudhilibrary_ICMS14`
-for a detailed description of the design of the library.
+We refer to :cite:`gudhilibrary_ICMS14` for a detailed description of the
+design of the library.
Data structures
***************
@@ -82,6 +83,6 @@ Persistence graphical tools
Bibliography
************
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/cython/doc/installation.rst b/cython/doc/installation.rst
index f98a5039..c182f176 100644
--- a/cython/doc/installation.rst
+++ b/cython/doc/installation.rst
@@ -68,31 +68,32 @@ The :doc:`Alpha complex </alpha_complex_user>`,
C++ library which provides easy access to efficient and reliable geometric
algorithms.
-Having CGAL version 4.6.0 or higher installed is recommended. The procedure to
-install this library according to your operating system is detailed
+Having CGAL, the Computational Geometry Algorithms Library, version 4.7.0 or
+higher installed is recommended. The procedure to install this library
+according to your operating system is detailed
`here <http://doc.cgal.org/latest/Manual/installation.html>`_.
-The following examples require the Computational Geometry Algorithms Library:
-
-.. only:: builder_html
-
- * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
- * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
-
-The following example requires CGAL version ≥ 4.7.0:
+The following examples requires CGAL version ≥ 4.7.0:
.. only:: builder_html
* :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
* :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
-The following example requires CGAL version ≥ 4.8.0:
+The following examples requires CGAL version ≥ 4.8.0:
.. only:: builder_html
* :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
* :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+The following examples requires CGAL version ≥ 4.8.1:
+
+.. only:: builder_html
+
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
Eigen3
======
diff --git a/cython/doc/make.bat.in b/cython/doc/make.bat.in
deleted file mode 100644
index ff1a6d56..00000000
--- a/cython/doc/make.bat.in
+++ /dev/null
@@ -1,67 +0,0 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=@SPHINX_PATH@
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-set I18NSPHINXOPTS=%SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
- set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
- set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
- :help
- echo.Please use `make ^<target^>` where ^<target^> is one of
- echo. html to make standalone HTML files
- echo. doctest to run all doctests embedded in the documentation if enabled
- goto end
-)
-
-if "%1" == "clean" (
- del examples.inc
- for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
- del /q /s %BUILDDIR%\*
- goto end
-)
-
-
-%SPHINXBUILD% 2> nul
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.http://sphinx-doc.org/
- exit /b 1
-)
-
-:: GUDHI specific : Examples.inc is generated with generate_examples.py (and deleted on clean)
-
-if "%1" == "html" (
- generate_examples.py
- %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/html.
- goto end
-)
-
-if "%1" == "doctest" (
- %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
- goto end
-)
-
-:end
diff --git a/cython/doc/persistence_graphical_tools_user.rst b/cython/doc/persistence_graphical_tools_user.rst
index cae18323..9033331f 100644
--- a/cython/doc/persistence_graphical_tools_user.rst
+++ b/cython/doc/persistence_graphical_tools_user.rst
@@ -14,12 +14,14 @@ This function is useful to show the color palette values of dimension:
.. testcode::
import gudhi
- gudhi.show_palette_values(alpha=1.0)
+ plt = gudhi.show_palette_values(alpha=1.0)
+ plt.show()
.. plot::
import gudhi
- gudhi.show_palette_values(alpha=1.0)
+ plt = gudhi.show_palette_values(alpha=1.0)
+ plt.show()
Show persistence as a barcode
-----------------------------
@@ -30,17 +32,22 @@ This function can display the persistence result as a barcode:
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='3d_torus.txt')
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/3d_torus.txt')
diag = periodic_cc.persistence()
- gudhi.plot_persistence_barcode(diag)
+ plt = gudhi.plot_persistence_barcode(diag)
+ plt.show()
.. plot::
import gudhi
- periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='3d_torus.txt')
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/3d_torus.txt')
diag = periodic_cc.persistence()
- gudhi.plot_persistence_barcode(diag)
+ print("diag = ", diag)
+ plt = gudhi.plot_persistence_barcode(diag)
+ plt.show()
Show persistence as a diagram
-----------------------------
@@ -51,16 +58,20 @@ This function can display the persistence result as a diagram:
import gudhi
- rips_complex = gudhi.RipsComplex(off_file='tore3D_300.off', max_edge_length=2.0)
+ rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/tore3D_1307.off', max_edge_length=0.2)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
diag = simplex_tree.persistence()
- gudhi.plot_persistence_diagram(diag)
+ plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+ plt.show()
.. plot::
import gudhi
- rips_complex = gudhi.RipsComplex(off_file='tore3D_300.off', max_edge_length=2.0)
+ rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/tore3D_1307.off', max_edge_length=0.2)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
diag = simplex_tree.persistence()
- gudhi.plot_persistence_diagram(diag)
+ plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+ plt.show()
diff --git a/cython/doc/persistent_cohomology_sum.rst b/cython/doc/persistent_cohomology_sum.rst
index d1f79cb4..a26df1dc 100644
--- a/cython/doc/persistent_cohomology_sum.rst
+++ b/cython/doc/persistent_cohomology_sum.rst
@@ -4,7 +4,7 @@
+-----------------------------------------------------------------+-----------------------------------------------------------------------+
| .. figure:: | The theory of homology consists in attaching to a topological space |
-| img/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological |
+| ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological |
| :figclass: align-center | features like connected components, holes, cavities, etc. Persistent |
| | homology studies the evolution -- birth, life and death -- of these |
| Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the |
diff --git a/cython/doc/persistent_cohomology_user.rst b/cython/doc/persistent_cohomology_user.rst
index 72f1a7f7..bf90c163 100644
--- a/cython/doc/persistent_cohomology_user.rst
+++ b/cython/doc/persistent_cohomology_user.rst
@@ -109,6 +109,6 @@ We provide several example files: run these examples with -h for details on thei
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/cython/doc/pyplots/barcode_persistence.py b/cython/doc/pyplots/barcode_persistence.py
index c06ac5a7..de33d506 100755
--- a/cython/doc/pyplots/barcode_persistence.py
+++ b/cython/doc/pyplots/barcode_persistence.py
@@ -1,5 +1,7 @@
import gudhi
-periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file='../3d_torus.txt')
+periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/3d_torus.txt')
diag = periodic_cc.persistence()
-gudhi.plot_persistence_barcode(diag)
+plt = gudhi.plot_persistence_barcode(diag)
+plt.show()
diff --git a/cython/doc/pyplots/diagram_persistence.py b/cython/doc/pyplots/diagram_persistence.py
index b4714fe3..c2fbf801 100755
--- a/cython/doc/pyplots/diagram_persistence.py
+++ b/cython/doc/pyplots/diagram_persistence.py
@@ -1,5 +1,8 @@
import gudhi
-alpha_complex = gudhi.AlphaComplex(off_file='../tore3D_300.off')
-diag = alpha_complex.persistence()
-gudhi.plot_persistence_diagram(diag)
+rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/tore3D_1307.off', max_edge_length=0.2)
+simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
+diag = simplex_tree.persistence()
+plt = gudhi.plot_persistence_diagram(diag, band_boot=0.13)
+plt.show()
diff --git a/cython/doc/pyplots/show_palette_values.py b/cython/doc/pyplots/show_palette_values.py
index e72a55fd..fdf9645f 100755
--- a/cython/doc/pyplots/show_palette_values.py
+++ b/cython/doc/pyplots/show_palette_values.py
@@ -1,2 +1,3 @@
import gudhi
-gudhi.show_palette_values(alpha=1.0)
+plt = gudhi.show_palette_values(alpha=1.0)
+plt.show()
diff --git a/cython/doc/python3-sphinx-build b/cython/doc/python3-sphinx-build.py
index 44b94169..44b94169 100755
--- a/cython/doc/python3-sphinx-build
+++ b/cython/doc/python3-sphinx-build.py
diff --git a/cython/doc/reader_utils_ref.rst b/cython/doc/reader_utils_ref.rst
new file mode 100644
index 00000000..9c1ea6fc
--- /dev/null
+++ b/cython/doc/reader_utils_ref.rst
@@ -0,0 +1,11 @@
+=============================
+Reader utils reference manual
+=============================
+
+.. autofunction:: gudhi.read_off
+
+.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file
+
+.. autofunction:: gudhi.read_persistence_intervals_grouped_by_dimension
+
+.. autofunction:: gudhi.read_persistence_intervals_in_dimension
diff --git a/cython/doc/rips_complex_sum.rst b/cython/doc/rips_complex_sum.rst
index 2b65fc19..5616bfa9 100644
--- a/cython/doc/rips_complex_sum.rst
+++ b/cython/doc/rips_complex_sum.rst
@@ -4,7 +4,7 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton |
-| img/rips_complex_representation.png | graph. |
+| ../../doc/Rips_complex/rips_complex_representation.png | graph. |
| :figclass: align-center | |
| | The filtration value of each edge is computed from a user-given |
| Rips complex representation | distance function and is inserted until a user-given threshold |
diff --git a/cython/doc/rips_complex_user.rst b/cython/doc/rips_complex_user.rst
index f9760976..96ba9944 100644
--- a/cython/doc/rips_complex_user.rst
+++ b/cython/doc/rips_complex_user.rst
@@ -26,7 +26,7 @@ structure, and then expands the simplicial complex when required.
Vertex name correspond to the index of the point in the given range (aka. the point cloud).
.. figure::
- img/rips_complex_representation.png
+ ../../doc/Rips_complex/rips_complex_representation.png
:align: center
Rips-complex one skeleton graph representation
@@ -101,7 +101,8 @@ Finally, it is asked to display information about the Rips complex.
.. testcode::
import gudhi
- rips_complex = gudhi.RipsComplex(off_file='alphacomplexdoc.off', max_edge_length=12.0)
+ rips_complex = gudhi.RipsComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off', max_edge_length=12.0)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
@@ -205,7 +206,8 @@ Finally, it is asked to display information about the Rips complex.
.. testcode::
import gudhi
- rips_complex = gudhi.RipsComplex(csv_file='full_square_distance_matrix.csv', max_edge_length=12.0)
+ rips_complex = gudhi.RipsComplex(csv_file=gudhi.__root_source_dir__ + \
+ '/data/distance_matrix/full_square_distance_matrix.csv', max_edge_length=12.0)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
diff --git a/cython/doc/simplex_tree_sum.rst b/cython/doc/simplex_tree_sum.rst
index 3174fb62..fb0e54c1 100644
--- a/cython/doc/simplex_tree_sum.rst
+++ b/cython/doc/simplex_tree_sum.rst
@@ -4,7 +4,7 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | The simplex tree is an efficient and flexible data structure for |
-| img/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. |
+| ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. |
| :alt: Simplex tree representation | |
| :figclass: align-center | The data structure is described in |
| | :cite:`boissonnatmariasimplextreealgorithmica` |
diff --git a/cython/doc/tangential_complex_sum.rst b/cython/doc/tangential_complex_sum.rst
index 2b05bc10..72b4d7ba 100644
--- a/cython/doc/tangential_complex_sum.rst
+++ b/cython/doc/tangential_complex_sum.rst
@@ -5,10 +5,10 @@
+----------------------------------------------------------------+------------------------------------------------------------------------+
| .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to |
-| img/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- |
+| ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- |
| :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from |
| | an unknown manifold. The running time depends only linearly on the |
-| **Tangential complex representation** | extrinsic dimension :math:`d` and exponentially on the intrinsic |
+| Tangential complex representation | extrinsic dimension :math:`d` and exponentially on the intrinsic |
| | dimension :math:`k`. |
+----------------------------------------------------------------+------------------------------------------------------------------------+
| :doc:`tangential_complex_user` | :doc:`tangential_complex_ref` |
diff --git a/cython/doc/tangential_complex_user.rst b/cython/doc/tangential_complex_user.rst
index 03f9fea6..efa6d7ce 100644
--- a/cython/doc/tangential_complex_user.rst
+++ b/cython/doc/tangential_complex_user.rst
@@ -22,7 +22,7 @@ Let us start with the description of the Tangential complex of a simple
example, with :math:`k = 1` and :math:`d = 2`. The input data is 4 points
:math:`P` located on a curve embedded in 2D.
-.. figure:: img/tc_example_01.png
+.. figure:: ../../doc/Tangential_complex/tc_example_01.png
:alt: The input
:figclass: align-center
@@ -31,7 +31,7 @@ example, with :math:`k = 1` and :math:`d = 2`. The input data is 4 points
For each point :math:`p`, estimate its tangent subspace :math:`T_p` (e.g.
using PCA).
-.. figure:: img/tc_example_02.png
+.. figure:: ../../doc/Tangential_complex/tc_example_02.png
:alt: The estimated normals
:figclass: align-center
@@ -42,7 +42,7 @@ Let us add the Voronoi diagram of the points in orange. For each point
:math:`p`, construct its star in the Delaunay triangulation of :math:`P`
restricted to :math:`T_p`.
-.. figure:: img/tc_example_03.png
+.. figure:: ../../doc/Tangential_complex/tc_example_03.png
:alt: The Voronoi diagram
:figclass: align-center
@@ -62,7 +62,7 @@ simplex is not in the star of all its vertices.
Let us take the same example.
-.. figure:: img/tc_example_07_before.png
+.. figure:: ../../doc/Tangential_complex/tc_example_07_before.png
:alt: Before
:figclass: align-center
@@ -70,7 +70,7 @@ Let us take the same example.
Let us slightly move the tangent subspace :math:`T_q`
-.. figure:: img/tc_example_07_after.png
+.. figure:: ../../doc/Tangential_complex/tc_example_07_after.png
:alt: After
:figclass: align-center
@@ -79,7 +79,7 @@ Let us slightly move the tangent subspace :math:`T_q`
Now, the star of :math:`Q` contains :math:`QP`, but the star of :math:`P` does
not contain :math:`QP`. We have an inconsistency.
-.. figure:: img/tc_example_08.png
+.. figure:: ../../doc/Tangential_complex/tc_example_08.png
:alt: After
:figclass: align-center
@@ -122,7 +122,8 @@ This example builds the Tangential complex of point set read in an OFF file.
.. testcode::
import gudhi
- tc = gudhi.TangentialComplex(off_file='alphacomplexdoc.off')
+ tc = gudhi.TangentialComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off')
result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \
' simplices - ' + repr(tc.num_vertices()) + ' vertices.'
print(result_str)
@@ -190,6 +191,6 @@ The output is:
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/cython/doc/witness_complex_sum.rst b/cython/doc/witness_complex_sum.rst
index b65522ba..a8a126a0 100644
--- a/cython/doc/witness_complex_sum.rst
+++ b/cython/doc/witness_complex_sum.rst
@@ -3,15 +3,17 @@
:Euclidean version requires: CGAL :math:`\geq` 4.6.0 Eigen3
================================================================= =================================== ===================================
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| .. image:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on |
-| img/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. |
-| | |
-| | The data structure is described in |
-| | :cite:`boissonnatmariasimplextreealgorithmica`. |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
-| :doc:`witness_complex_user` | * :doc:`witness_complex_ref` |
-| | * :doc:`strong_witness_complex_ref` |
-| | * :doc:`euclidean_witness_complex_ref` |
-| | * :doc:`euclidean_strong_witness_complex_ref` |
-+-----------------------------------------------------------------+----------------------------------------------------------------------+
++-------------------------------------------------------------------+----------------------------------------------------------------------+
+| .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on |
+| ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. |
+| :alt: Witness complex representation | |
+| :figclass: align-center | The data structure is described in |
+| | :cite:`boissonnatmariasimplextreealgorithmica`. |
+| | |
+| Witness complex representation | |
++-------------------------------------------------------------------+----------------------------------------------------------------------+
+| :doc:`witness_complex_user` | * :doc:`witness_complex_ref` |
+| | * :doc:`strong_witness_complex_ref` |
+| | * :doc:`euclidean_witness_complex_ref` |
+| | * :doc:`euclidean_strong_witness_complex_ref` |
++-------------------------------------------------------------------+----------------------------------------------------------------------+
diff --git a/cython/doc/witness_complex_user.rst b/cython/doc/witness_complex_user.rst
index aa9cbb2c..29413269 100644
--- a/cython/doc/witness_complex_user.rst
+++ b/cython/doc/witness_complex_user.rst
@@ -33,7 +33,7 @@ Both definitions can be relaxed by a real value :math:`\alpha`:
which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and
**strong relaxed witness complex** respectively.
-.. figure:: img/swit.svg
+.. figure:: ../../doc/Witness_complex/swit.svg
:alt: Strongly witnessed simplex
:figclass: align-center
@@ -126,6 +126,6 @@ Here is an example of constructing a strong witness complex filtration and compu
Bibliography
============
-.. bibliography:: bibliography.bib
+.. bibliography:: ../../biblio/bibliography.bib
:filter: docnames
:style: unsrt
diff --git a/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py b/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
index adedc7d2..b4487be4 100755
--- a/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
+++ b/cython/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -38,6 +38,7 @@ parser = argparse.ArgumentParser(description='AlphaComplex creation from '
'points from the given OFF file.')
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -63,7 +64,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
index 2371c36c..e3f362dc 100755
--- a/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/cython/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -40,6 +40,7 @@ parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -70,8 +71,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
-
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
index 5748aa8a..c236d992 100755
--- a/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/cython/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -40,6 +40,7 @@ parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -70,8 +71,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
-
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/cython/example/gudhi_graphical_tools_example.py b/cython/example/gudhi_graphical_tools_example.py
index bc3b16ec..ed87806b 100755
--- a/cython/example/gudhi_graphical_tools_example.py
+++ b/cython/example/gudhi_graphical_tools_example.py
@@ -44,4 +44,11 @@ gudhi.plot_persistence_barcode(persistence)
print("#####################################################################")
print("Show diagram persistence example")
-gudhi.plot_persistence_diagram(persistence)
+pplot = gudhi.plot_persistence_diagram(persistence)
+pplot.show()
+
+print("#####################################################################")
+print("Show diagram persistence example with a confidence band")
+
+pplot = gudhi.plot_persistence_diagram(persistence, band_boot=0.2)
+pplot.show()
diff --git a/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
index 664eb5c4..3baebd17 100755
--- a/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
+++ b/cython/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
@@ -39,33 +39,28 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
-with open(args.file, 'r') as f:
- first_line = f.readline()
- if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
- print("#####################################################################")
- print("RipsComplex creation from distance matrix read in a csv file")
-
- message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
- print(message)
-
- rips_complex = gudhi.RipsComplex(off_file=args.file, max_edge_length=args.max_edge_length)
- simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
-
- message = "Number of simplices=" + repr(simplex_tree.num_simplices())
- print(message)
-
- diag = simplex_tree.persistence()
-
- print("betti_numbers()=")
- print(simplex_tree.betti_numbers())
-
- if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
- else:
- print(args.file, "is not a valid OFF file")
-
- f.close()
+print("#####################################################################")
+print("RipsComplex creation from distance matrix read in a csv file")
+
+message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
+print(message)
+
+rips_complex = gudhi.RipsComplex(csv_file=args.file, max_edge_length=args.max_edge_length)
+simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+print(message)
+
+diag = simplex_tree.persistence()
+
+print("betti_numbers()=")
+print(simplex_tree.betti_numbers())
+
+if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
diff --git a/cython/example/rips_complex_diagram_persistence_from_off_file_example.py b/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
index 4c21b98e..5951eedf 100755
--- a/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
+++ b/cython/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -39,6 +39,7 @@ parser = argparse.ArgumentParser(description='RipsComplex creation from '
parser.add_argument("-f", "--file", type=str, required=True)
parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -64,7 +65,8 @@ with open(args.file, 'r') as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/cython/example/rips_persistence_diagram.py b/cython/example/rips_persistence_diagram.py
index 4e5cd2c8..9bfea41c 100755
--- a/cython/example/rips_persistence_diagram.py
+++ b/cython/example/rips_persistence_diagram.py
@@ -39,4 +39,5 @@ simplex_tree = rips.create_simplex_tree(max_dimension=1)
diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0)
print("diag=", diag)
-gudhi.plot_persistence_diagram(diag)
+pplot = gudhi.plot_persistence_diagram(diag)
+pplot.show()
diff --git a/cython/example/simplex_tree_example.py b/cython/example/simplex_tree_example.py
index 3af20fcf..831d9da8 100755
--- a/cython/example/simplex_tree_example.py
+++ b/cython/example/simplex_tree_example.py
@@ -52,7 +52,6 @@ else:
st.set_dimension(3)
print("dimension=", st.dimension())
-st.set_filtration(4.0)
st.initialize_filtration()
print("filtration=", st.get_filtration())
print("filtration[1, 2]=", st.filtration([1, 2]))
diff --git a/cython/example/tangential_complex_plain_homology_from_off_file_example.py b/cython/example/tangential_complex_plain_homology_from_off_file_example.py
index 4845eb47..6145e7f2 100755
--- a/cython/example/tangential_complex_plain_homology_from_off_file_example.py
+++ b/cython/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -37,6 +37,7 @@ parser = argparse.ArgumentParser(description='TangentialComplex creation from '
'- Constructs a tangential complex with the '
'points from the given OFF file')
parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-b", "--band_boot", type=float, default=0.)
parser.add_argument('--no-diagram', default=False, action='store_true' , help='Flag for not to display the diagrams')
args = parser.parse_args()
@@ -59,7 +60,8 @@ with open(args.file, 'r') as f:
print(st.betti_numbers())
if args.no_diagram == False:
- gudhi.plot_persistence_diagram(diag)
+ pplot = gudhi.plot_persistence_diagram(diag, band_boot=args.band_boot)
+ pplot.show()
else:
print(args.file, "is not a valid OFF file")
diff --git a/cython/gudhi.pyx.in b/cython/gudhi.pyx.in
index 34d7c3b5..a8dd9f80 100644
--- a/cython/gudhi.pyx.in
+++ b/cython/gudhi.pyx.in
@@ -23,15 +23,19 @@
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 INRIA"
__license__ = "GPL v3"
+__version__ = "@GUDHI_VERSION@"
+# This variable is used by doctest to find files
+__root_source_dir__ = "@CMAKE_SOURCE_DIR@"
-include "cython/off_reader.pyx"
-include "cython/simplex_tree.pyx"
-include "cython/rips_complex.pyx"
-include "cython/cubical_complex.pyx"
-include "cython/periodic_cubical_complex.pyx"
-include "cython/persistence_graphical_tools.py"
-include "cython/witness_complex.pyx"
-include "cython/strong_witness_complex.pyx"
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/off_reader.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/rips_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/cubical_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/periodic_cubical_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/persistence_graphical_tools.py'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/reader_utils.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/witness_complex.pyx'
+include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx'
@GUDHI_CYTHON_ALPHA_COMPLEX@
@GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX@
@GUDHI_CYTHON_SUBSAMPLING@
diff --git a/cython/include/Reader_utils_interface.h b/cython/include/Reader_utils_interface.h
new file mode 100644
index 00000000..8ec34f61
--- /dev/null
+++ b/cython/include/Reader_utils_interface.h
@@ -0,0 +1,56 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_READER_UTILS_INTERFACE_H_
+#define INCLUDE_READER_UTILS_INTERFACE_H_
+
+#include <gudhi/reader_utils.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+#include <map>
+#include <utility> // for pair<>
+
+namespace Gudhi {
+
+// Redefine functions with a different name in order the original name can be used in the Python version.
+std::vector<std::vector<double>> read_matrix_from_csv_file(const std::string& filename,
+ const char separator = ';') {
+ return read_lower_triangular_matrix_from_csv_file<double>(filename, separator);
+}
+
+inline std::map<int, std::vector<std::pair<double, double>>>
+ read_pers_intervals_grouped_by_dimension(std::string const& filename) {
+ return read_persistence_intervals_grouped_by_dimension(filename);
+}
+
+inline std::vector<std::pair<double, double>>
+ read_pers_intervals_in_dimension(std::string const& filename, int only_this_dim = -1) {
+ return read_persistence_intervals_in_dimension(filename, only_this_dim);
+}
+
+
+} // namespace Gudhi
+
+
+#endif // INCLUDE_READER_UTILS_INTERFACE_H_
diff --git a/cython/include/Rips_complex_interface.h b/cython/include/Rips_complex_interface.h
index 6d813f4a..02985727 100644
--- a/cython/include/Rips_complex_interface.h
+++ b/cython/include/Rips_complex_interface.h
@@ -66,11 +66,15 @@ class Rips_complex_interface {
} else {
// Rips construction where values is a distance matrix
Distance_matrix distances =
- read_lower_triangular_matrix_from_csv_file<Simplex_tree_interface<>::Filtration_value>(file_name);
+ Gudhi::read_lower_triangular_matrix_from_csv_file<Simplex_tree_interface<>::Filtration_value>(file_name);
rips_complex_ = new Rips_complex<Simplex_tree_interface<>::Filtration_value>(distances, threshold);
}
}
+ ~Rips_complex_interface() {
+ delete rips_complex_;
+ }
+
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, int dim_max) {
rips_complex_->create_complex(*simplex_tree, dim_max);
simplex_tree->initialize_filtration();
diff --git a/cython/include/Simplex_tree_interface.h b/cython/include/Simplex_tree_interface.h
index 45ce1916..09e7e992 100644
--- a/cython/include/Simplex_tree_interface.h
+++ b/cython/include/Simplex_tree_interface.h
@@ -70,14 +70,14 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
}
// Do not interface this function, only used in strong witness interface for complex creation
- bool insert_simplex(const std::vector<std::size_t>& complex, Filtration_value filtration = 0) {
- Insertion_result result = Base::insert_simplex(complex, filtration);
+ bool insert_simplex(const std::vector<std::size_t>& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex(simplex, filtration);
return (result.second);
}
// Do not interface this function, only used in strong witness interface for complex creation
- bool insert_simplex_and_subfaces(const std::vector<std::size_t>& complex, Filtration_value filtration = 0) {
- Insertion_result result = Base::insert_simplex_and_subfaces(complex, filtration);
+ bool insert_simplex_and_subfaces(const std::vector<std::size_t>& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
return (result.second);
}
diff --git a/cython/cythonize_gudhi.py.in b/cython/setup.py.in
index 5a97e9f3..fefa36bb 100644
--- a/cython/cythonize_gudhi.py.in
+++ b/cython/setup.py.in
@@ -29,20 +29,21 @@ __license__ = "GPL v3"
gudhi = Extension(
"gudhi",
- sources = ['gudhi.pyx',],
+ sources = ['@CMAKE_CURRENT_BINARY_DIR@/gudhi.pyx',],
language = 'c++',
extra_compile_args=[@GUDHI_CYTHON_EXTRA_COMPILE_ARGS@],
extra_link_args=[@GUDHI_CYTHON_EXTRA_LINK_ARGS@],
libraries=[@GUDHI_CYTHON_LIBRARIES@],
library_dirs=[@GUDHI_CYTHON_LIBRARY_DIRS@],
include_dirs = [@GUDHI_CYTHON_INCLUDE_DIRS@],
+ runtime_library_dirs=[@GUDHI_CYTHON_RUNTIME_LIBRARY_DIRS@],
)
setup(
name = 'gudhi',
author='Vincent Rouvreau',
author_email='gudhi-contact@lists.gforge.inria.fr',
- version='0.1.0',
+ version='@GUDHI_VERSION@',
url='http://gudhi.gforge.inria.fr/',
ext_modules = cythonize(gudhi),
)
diff --git a/cython/test/test_cubical_complex.py b/cython/test/test_cubical_complex.py
index 2e281ee4..9a365823 100755
--- a/cython/test/test_cubical_complex.py
+++ b/cython/test/test_cubical_complex.py
@@ -67,7 +67,7 @@ def test_dimension_constructor():
top_dimensional_cells = [1,2,3,4,5,6,7,8,9])
assert cub.__is_defined() == True
assert cub.__is_persistence_defined() == False
- assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, 1.8446744073709552e+19))]
+ assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float('inf')))]
assert cub.__is_persistence_defined() == True
assert cub.betti_numbers() == [1, 0]
assert cub.persistent_betti_numbers(0, 1000) == [0, 0]
@@ -80,7 +80,7 @@ def test_dimension_constructor():
cub = CubicalComplex(perseus_file='CubicalOneSphere.txt')
assert cub.__is_defined() == True
assert cub.__is_persistence_defined() == False
- assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, 1.8446744073709552e+19))]
+ assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float('inf')))]
assert cub.__is_persistence_defined() == True
assert cub.betti_numbers() == [1, 0, 0]
assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0]
diff --git a/cython/test/test_reader_utils.py b/cython/test/test_reader_utils.py
new file mode 100755
index 00000000..25591fb3
--- /dev/null
+++ b/cython/test/test_reader_utils.py
@@ -0,0 +1,88 @@
+import gudhi
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+
+def test_non_existing_csv_file():
+ # Try to open a non existing file
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file='pouetpouettralala.toubiloubabdou')
+ assert matrix == []
+
+def test_full_square_distance_matrix_csv_file():
+ # Create test file
+ test_file = open('full_square_distance_matrix.csv', 'w')
+ test_file.write('0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;')
+ test_file.close()
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file="full_square_distance_matrix.csv")
+ assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]]
+
+def test_lower_triangular_distance_matrix_csv_file():
+ # Create test file
+ test_file = open('lower_triangular_distance_matrix.csv', 'w')
+ test_file.write('\n1,\n2,3,\n4,5,6,\n7,8,9,10,')
+ test_file.close()
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file="lower_triangular_distance_matrix.csv", separator=",")
+ assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]]
+
+def test_non_existing_persistence_file():
+ # Try to open a non existing file
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='pouetpouettralala.toubiloubabdou')
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='pouetpouettralala.toubiloubabdou', only_this_dim=1)
+ assert persistence == []
+
+def test_read_persistence_intervals_without_dimension():
+ # Create test file
+ test_file = open('persistence_intervals_without_dimension.pers', 'w')
+ test_file.write('# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf')
+ test_file.close()
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers')
+ assert persistence == [(2.7, 3.7), (9.6, 14.), (34.2, 34.974), (3., float('Inf'))]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers', only_this_dim=0)
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_without_dimension.pers', only_this_dim=1)
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='persistence_intervals_without_dimension.pers')
+ assert persistence == {-1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float('Inf'))]}
+
+def test_read_persistence_intervals_with_dimension():
+ # Create test file
+ test_file = open('persistence_intervals_with_dimension.pers', 'w')
+ test_file.write('# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf')
+ test_file.close()
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers')
+ assert persistence == [(2.7, 3.7), (9.6, 14.), (34.2, 34.974), (3., float('Inf'))]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=0)
+ assert persistence == [(2.7, 3.7)]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=1)
+ assert persistence == [(9.6, 14.), (3., float('Inf'))]
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=2)
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(persistence_file='persistence_intervals_with_dimension.pers', only_this_dim=3)
+ assert persistence == [(34.2, 34.974)]
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(persistence_file='persistence_intervals_with_dimension.pers')
+ assert persistence == {0: [(2.7, 3.7)], 1: [(9.6, 14.0), (3.0, float('Inf'))], 3: [(34.2, 34.974)]}
diff --git a/cython/test/test_simplex_tree.py b/cython/test/test_simplex_tree.py
index 3ae537e3..4d452d7d 100755
--- a/cython/test/test_simplex_tree.py
+++ b/cython/test/test_simplex_tree.py
@@ -53,7 +53,6 @@ def test_insertion():
assert st.find([2, 3]) == False
# filtration test
- st.set_filtration(5.0)
st.initialize_filtration()
assert st.filtration([0, 1, 2]) == 4.0
assert st.filtration([0, 2]) == 4.0
diff --git a/data/persistence_diagram/first.pers b/data/persistence_diagram/first.pers
new file mode 100644
index 00000000..193c60ba
--- /dev/null
+++ b/data/persistence_diagram/first.pers
@@ -0,0 +1,5 @@
+# Simple persistence diagram
+2.7 3.7
+9.6 14.
+34.2 34.974
+3. inf \ No newline at end of file
diff --git a/data/persistence_diagram/second.pers b/data/persistence_diagram/second.pers
new file mode 100644
index 00000000..6292fde6
--- /dev/null
+++ b/data/persistence_diagram/second.pers
@@ -0,0 +1,3 @@
+2.8 4.45
+9.5 14.1
+3.2 inf \ No newline at end of file
diff --git a/data/points/SO3.COPYRIGHT b/data/points/SO3.COPYRIGHT
index 26bbb0ad..a9383f8f 100644
--- a/data/points/SO3.COPYRIGHT
+++ b/data/points/SO3.COPYRIGHT
@@ -7,3 +7,5 @@ This software is an implementation of the two following papers:
J. C. Mitchell. Discrete Uniform Sampling of Rotation Groups Using Orthogonal Images. SIAM Journal of Scientific Computing, 30(1):525-547, 2007.
A. Yershova, S. Jain, S. M. LaValle, and J. C. Mitchell. Generating Uniform Incremental Grids on SO(3) Using the Hopf Fibration International Journal of Robotics Research, November 2009.
+
+This software copyright is GNU General Public License as published by the Free Software Foundation version 2.
diff --git a/data/points/generator/CMakeLists.txt b/data/points/generator/CMakeLists.txt
deleted file mode 100644
index 88d377a7..00000000
--- a/data/points/generator/CMakeLists.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-cmake_minimum_required(VERSION 2.6)
-project(data_points_generator)
-
-if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
- add_executable ( data_points_generator hypergenerator.cpp )
- target_link_libraries(data_points_generator ${Boost_SYSTEM_LIBRARY})
- add_test(NAME data_points_generator_on_sphere_1000_3_15.2 COMMAND $<TARGET_FILE:data_points_generator>
- "on" "sphere" "onSphere.off" "1000" "3" "15.2")
- add_test(NAME data_points_generator_in_sphere_100_2 COMMAND $<TARGET_FILE:data_points_generator>
- "in" "sphere" "inSphere.off" "100" "2")
-
- # on cube is not available in CGAL
- add_test(NAME data_points_generator_in_cube_10000_3_5.8 COMMAND $<TARGET_FILE:data_points_generator>
- "in" "cube" "inCube.off" "10000" "3" "5.8")
-endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/data/points/generator/README b/data/points/generator/README
index 951bcfe1..3183a51f 100644
--- a/data/points/generator/README
+++ b/data/points/generator/README
@@ -1,32 +1,3 @@
-=========================== C++ generators =====================================
-
-To build the C++ generators, run in a Terminal:
-
-cd /path-to-gudhi/
-cmake .
-cd /path-to-data-generator/
-make
-
-======================= data_points_generator ==================================
-
-Example of use :
-
-*** Hyper sphere|cube generator
-
-./data_points_generator on sphere onSphere.off 1000 3 15.2
-
- => generates a onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2
-
-./data_points_generator in sphere inSphere.off 100 2
-
- => generates a inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default)
-
-./data_points_generator in cube inCube.off 10000 3 5.8
-
- => generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8
-
-!! Warning: hypegenerator on cube is not available !!
-
===================== aurelien_alvarez_surfaces_in_R8 ==========================
This generator is written in Python.
diff --git a/doc/Alpha_complex/Intro_alpha_complex.h b/doc/Alpha_complex/Intro_alpha_complex.h
index 69959fc5..cf1a946a 100644
--- a/doc/Alpha_complex/Intro_alpha_complex.h
+++ b/doc/Alpha_complex/Intro_alpha_complex.h
@@ -89,31 +89,64 @@ namespace alpha_complex {
* \image html "alpha_complex_doc.png" "Simplicial complex structure construction example"
*
* \subsection filtrationcomputation Filtration value computation algorithm
- *
- * \f{algorithm}{
- * \caption{Filtration value computation algorithm}\label{alpha}
- * \begin{algorithmic}
- * \For{i : dimension $\rightarrow$ 0}
- * \ForAll{$\sigma$ of dimension i}
- * \If {filtration($\sigma$) is NaN}
- * \State filtration($\sigma$) = $\alpha^2(\sigma)$
- * \EndIf
- * \ForAll{$\tau$ face of $\sigma$} \Comment{propagate alpha filtration value}
- * \If {filtration($\tau$) is not NaN}
- * \State filtration($\tau$) = min (filtration($\tau$), filtration($\sigma$))
- * \Else
- * \If {$\tau$ is not Gabriel for $\sigma$}
- * \State filtration($\tau$) = filtration($\sigma$)
- * \EndIf
- * \EndIf
- * \EndFor
- * \EndFor
- * \EndFor
- * \State make\_filtration\_non\_decreasing()
- * \State prune\_above\_filtration()
- * \end{algorithmic}
- * \f}
- *
+ *
+ *
+ *
+ * <ul>
+ * <li style="list-style-type: none;">\f$ \textbf{for } i : dimension \rightarrow 0 \textbf{ do} \f$
+ * <ul>
+ * <li style="list-style-type: none;">\f$\textbf{for all } \sigma of dimension i \f$
+ * <ul>
+ * <li style="list-style-type: none;">\f$\textbf{if } filtration( \sigma ) is NaN \textbf{ then} \f$
+ * <ul>
+ * <li style="list-style-type: none;">\f$ filtration( \sigma ) = \alpha^2( \sigma ) \f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{end if}\f$
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{for all } \tau face of \sigma \textbf{ do} \f$
+ * &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;// propagate alpha filtration value
+ * <ul>
+ * <li style="list-style-type: none;">\f$\textbf{if } filtration( \tau ) is not NaN \textbf{ then} \f$
+ * <ul>
+ * <li style="list-style-type: none;">\f$ filtration( \tau ) = min ( filtration( \tau ), filtration( \sigma ) ) \f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{else}\f$
+ * <ul>
+ * <li style="list-style-type: none;">\f$\textbf{if } \tau is not Gabriel for \sigma \textbf{ then} \f$
+ * <ul>
+ * <li style="list-style-type: none;">\f$ filtration( \tau ) = filtration( \sigma ) \f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{end if}\f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{end if}\f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{end for}\f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{end for}\f$
+ * </li>
+ * </ul>
+ * </li>
+ * <li style="list-style-type: none;">\f$\textbf{end for}\f$
+ * </li>
+ * <li style="list-style-type: none;">\f$make\_filtration\_non\_decreasing()\f$
+ * </li>
+ * <li style="list-style-type: none;">\f$prune\_above\_filtration()\f$
+ * </li>
+ * </ul>
+ *
+ *
* \subsubsection dimension2 Dimension 2
*
* From the example above, it means the algorithm looks into each triangle ([0,1,2], [0,2,4], [1,2,3], ...),
diff --git a/doc/Persistent_cohomology/Intro_persistent_cohomology.h b/doc/Persistent_cohomology/Intro_persistent_cohomology.h
index e17e5926..6400116b 100644
--- a/doc/Persistent_cohomology/Intro_persistent_cohomology.h
+++ b/doc/Persistent_cohomology/Intro_persistent_cohomology.h
@@ -189,10 +189,10 @@ and a weights file.
\code $> ./weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off
../../data/points/tore3D_300.weights 2 0.45 \endcode
\code Simplex_tree dim: 3
-2 -0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938 \endcode
+2 0 -1 inf
+2 1 -0.931784 0.000103311
+2 1 -0.906588 2.60165e-05
+2 2 -0.43556 0.0393798 \endcode
\li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
Persistent_cohomology/alpha_complex_persistence.cpp</a> computes the persistent homology with
@@ -208,7 +208,8 @@ Simplex_tree dim: 3
\li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the periodic alpha complex on points sampling from an OFF file.
-\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off 3 1.0 \endcode
+\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off
+../../data/points/iso_cuboid_3_in_0_1.txt 3 1.0 \endcode
\code Periodic Delaunay computed.
Simplex_tree dim: 3
3 0 0 inf
diff --git a/doc/Simplex_tree/Intro_simplex_tree.h b/doc/Simplex_tree/Intro_simplex_tree.h
index 940dd694..f5b72ff6 100644
--- a/doc/Simplex_tree/Intro_simplex_tree.h
+++ b/doc/Simplex_tree/Intro_simplex_tree.h
@@ -32,7 +32,7 @@ namespace Gudhi {
* \author Cl&eacute;ment Maria
*
* A simplicial complex \f$\mathbf{K}\f$ on a set of vertices \f$V = \{1, \cdots ,|V|\}\f$ is a collection of
- * implices \f$\{\sigma\}\f$, \f$\sigma \subseteq V\f$ such that
+ * simplices \f$\{\sigma\}\f$, \f$\sigma \subseteq V\f$ such that
* \f$\tau \subseteq \sigma \in \mathbf{K} \rightarrow \tau \in \mathbf{K}\f$. The dimension \f$n=|\sigma|-1\f$ of
* \f$\sigma\f$ is its number of elements minus \f$1\f$.
*
diff --git a/doc/Spatial_searching/Intro_spatial_searching.h b/doc/Spatial_searching/Intro_spatial_searching.h
index 23705378..1ee5e92e 100644
--- a/doc/Spatial_searching/Intro_spatial_searching.h
+++ b/doc/Spatial_searching/Intro_spatial_searching.h
@@ -46,7 +46,7 @@ namespace spatial_searching {
*
* \section spatial_searching_examples Example
*
- * This example generates 500 random points, then performs queries for nearest and farthest points using different methods.
+ * This example generates 500 random points, then performs all-near-neighbors searches, and queries for nearest and furthest neighbors using different methods.
*
* \include Spatial_searching/example_spatial_searching.cpp
*
diff --git a/doc/common/MathJax.COPYRIGHT b/doc/common/MathJax.COPYRIGHT
new file mode 100644
index 00000000..077d6dc5
--- /dev/null
+++ b/doc/common/MathJax.COPYRIGHT
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+ You must give any other recipients of the Work or Derivative Works a copy of this License; and
+ You must cause any modified files to carry prominent notices stating that You changed the files; and
+ You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+ If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
diff --git a/doc/common/MathJax.js b/doc/common/MathJax.js
new file mode 100644
index 00000000..35e1994e
--- /dev/null
+++ b/doc/common/MathJax.js
@@ -0,0 +1,53 @@
+(function () {
+ var newMathJax = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js';
+ var oldMathJax = 'cdn.mathjax.org/mathjax/latest/MathJax.js';
+
+ var replaceScript = function (script, src) {
+ //
+ // Make redirected script
+ //
+ var newScript = document.createElement('script');
+ newScript.src = newMathJax + src.replace(/.*?(\?|$)/, '$1');
+ //
+ // Move onload and onerror handlers to new script
+ //
+ newScript.onload = script.onload;
+ newScript.onerror = script.onerror;
+ script.onload = script.onerror = null;
+ //
+ // Move any content (old-style configuration scripts)
+ //
+ while (script.firstChild) newScript.appendChild(script.firstChild);
+ //
+ // Copy script id
+ //
+ if (script.id != null) newScript.id = script.id;
+ //
+ // Replace original script with new one
+ //
+ script.parentNode.replaceChild(newScript, script);
+ //
+ // Issue a console warning
+ //
+ console.warn('WARNING: cdn.mathjax.org has been retired. Check https://www.mathjax.org/cdn-shutting-down/ for migration tips.')
+ }
+
+ if (document.currentScript) {
+ var script = document.currentScript;
+ replaceScript(script, script.src);
+ } else {
+ //
+ // Look for current script by searching for one with the right source
+ //
+ var n = oldMathJax.length;
+ var scripts = document.getElementsByTagName('script');
+ for (var i = 0; i < scripts.length; i++) {
+ var script = scripts[i];
+ var src = (script.src || '').replace(/.*?:\/\//,'');
+ if (src.substr(0, n) === oldMathJax) {
+ replaceScript(script, src);
+ break;
+ }
+ }
+ }
+})(); \ No newline at end of file
diff --git a/doc/common/file_formats.h b/doc/common/file_formats.h
new file mode 100644
index 00000000..d715aa4d
--- /dev/null
+++ b/doc/common/file_formats.h
@@ -0,0 +1,59 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+* (Geometric Understanding in Higher Dimensions) is a generic C++
+* library for computational topology.
+*
+* Author(s): Clément Jamin
+*
+* Copyright (C) 2017 INRIA
+*
+* This program is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+*
+* This program is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+#ifndef DOC_COMMON_FILE_FORMAT_H_
+#define DOC_COMMON_FILE_FORMAT_H_
+
+namespace Gudhi {
+
+/*! \page fileformats File formats
+
+ \tableofcontents
+
+ \section FileFormatsPers Persistence Diagram
+
+ Such a file, whose extension is usually `.pers`, contains a list of persistence intervals.<br>
+ Lines starting with `#` are ignored (comments).<br>
+ Other lines might contain 2, 3 or 4 values (the number of values on each line must be the same for all lines):
+ \verbatim
+ [[field] dimension] birth death
+ \endverbatim
+
+ Here is a simple sample file:
+ \verbatim
+ # Persistence diagram example
+ 2 2.7 3.7
+ 2 9.6 14.
+ # Some comments
+ 3 34.2 34.974
+ 4 3. inf
+ \endverbatim
+
+ Other sample files can be found in the `data/persistence_diagram` folder.
+
+ Such files can be generated with `Gudhi::persistent_cohomology::Persistent_cohomology::output_diagram()` and read with
+ `Gudhi::read_persistence_intervals_and_dimension()`, `Gudhi::read_persistence_intervals_grouped_by_dimension()` or
+ `Gudhi::read_persistence_intervals_in_dimension()`.
+*/
+} // namespace Gudhi
+
+#endif // DOC_COMMON_FILE_FORMAT_H_
diff --git a/doc/common/main_page.h b/doc/common/main_page.h
index 2e45e5aa..1a7994a5 100644
--- a/doc/common/main_page.h
+++ b/doc/common/main_page.h
@@ -1,11 +1,10 @@
-/*! \mainpage
+/*! \mainpage The C++ library
* \tableofcontents
* \image html "Gudhi_banner.png" "" width=20cm
*
* \section Introduction Introduction
* The GUDHI library (Geometry Understanding in Higher Dimensions) is a generic open source
- * <a class="el" target="_blank" href="http://gudhi.gforge.inria.fr/doc/latest/">C++ library</a>, with a
- * <a class="el" target="_blank" href="http://gudhi.gforge.inria.fr/python/latest/">Python interface</a>, for
+ * <a class="el" target="_blank" href="http://gudhi.gforge.inria.fr/doc/latest/">C++ library</a> for
* Computational Topology and Topological Data Analysis
* (<a class="el" target="_blank" href="https://en.wikipedia.org/wiki/Topological_data_analysis">TDA</a>).
* The GUDHI library intends to help the development of new algorithmic solutions in TDA and their transfer to
@@ -16,7 +15,7 @@
*
* \li Data structures to represent, construct and manipulate simplicial complexes.
* \li Simplification of simplicial complexes by edge contraction.
- * \li Algorithms to compute persistent homology persistent homology.
+ * \li Algorithms to compute persistent homology and bottleneck distance.
*
* All data-structures are generic and several of their aspects can be parameterized via template classes.
* We refer to \cite gudhilibrary_ICMS14 for a detailed description of the design of the library.
@@ -161,7 +160,7 @@
<b>Author:</b> Fran&ccedil;ois Godi<br>
<b>Introduced in:</b> GUDHI 2.0.0<br>
<b>Copyright:</b> GPL v3<br>
- <b>Requires:</b> \ref cgal &ge; 4.8.1 and \ref eigen3
+ <b>Requires:</b> \ref cgal &ge; 4.8.1
</td>
<td width="75%">
Bottleneck distance measures the similarity between two persistence diagrams.
@@ -330,13 +329,29 @@ make doxygen
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
+ * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
* \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
* Persistent_cohomology/alpha_complex_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
- *
+ * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
+ * Spatial_searching/example_spatial_searching.cpp</a>
+ * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
+ * Subsampling/example_choose_n_farthest_points.cpp</a>
+ * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
+ * Subsampling/example_custom_kernel.cpp</a>
+ * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
+ * Subsampling/example_pick_n_random_points.cpp</a>
+ * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
+ * Subsampling/example_sparsify_point_set.cpp</a>
+ * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
+ * Tangential_complex/example_basic.cpp</a>
+ * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
+ * Tangential_complex/example_with_perturb.cpp</a>
+ *
* \subsection tbb Threading Building Blocks
* <a target="_blank" href="https://www.threadingbuildingblocks.org/">Intel&reg; TBB</a> lets you easily write parallel
* C++ programs that take full advantage of multicore performance, that are portable and composable, and that have
diff --git a/example/Alpha_complex/CMakeLists.txt b/example/Alpha_complex/CMakeLists.txt
index a4853d78..5bf553e9 100644
--- a/example/Alpha_complex/CMakeLists.txt
+++ b/example/Alpha_complex/CMakeLists.txt
@@ -5,9 +5,9 @@ project(Alpha_complex_examples)
# cmake -DCGAL_DIR=~/workspace/CGAL-4.7 ..
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
add_executable ( Alpha_complex_example_from_points Alpha_complex_from_points.cpp )
- target_link_libraries(Alpha_complex_example_from_points ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(Alpha_complex_example_from_points ${CGAL_LIBRARY})
add_executable ( Alpha_complex_example_from_off Alpha_complex_from_off.cpp )
- target_link_libraries(Alpha_complex_example_from_off ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(Alpha_complex_example_from_off ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Alpha_complex_example_from_points ${TBB_LIBRARIES})
target_link_libraries(Alpha_complex_example_from_off ${TBB_LIBRARIES})
@@ -29,4 +29,8 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
add_test(Alpha_complex_example_from_off_32_diff_files ${DIFF_PATH}
${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt)
endif()
+
+ install(TARGETS Alpha_complex_example_from_points DESTINATION bin)
+ install(TARGETS Alpha_complex_example_from_off DESTINATION bin)
+
endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
diff --git a/example/Bitmap_cubical_complex/Bitmap_cubical_complex.cpp b/example/Bitmap_cubical_complex/Bitmap_cubical_complex.cpp
index e6bc6648..67735ba1 100644
--- a/example/Bitmap_cubical_complex/Bitmap_cubical_complex.cpp
+++ b/example/Bitmap_cubical_complex/Bitmap_cubical_complex.cpp
@@ -27,8 +27,9 @@
// standard stuff
#include <iostream>
-#include <sstream>
+#include <string>
#include <vector>
+#include <cstddef>
int main(int argc, char** argv) {
std::cout << "This program computes persistent homology, by using bitmap_cubical_complex class, of cubical " <<
@@ -38,9 +39,6 @@ int main(int argc, char** argv) {
"filtrations of top dimensional cells. We assume that the cells are in the lexicographical order. See " <<
"CubicalOneSphere.txt or CubicalTwoSphere.txt for example.\n" << std::endl;
- int p = 2;
- double min_persistence = 0;
-
if (argc != 2) {
std::cerr << "Wrong number of parameters. Please provide the name of a file with a Perseus style bitmap at " <<
"the input. The program will now terminate.\n";
@@ -56,16 +54,26 @@ int main(int argc, char** argv) {
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(b);
+ int p = 2;
+ double min_persistence = 0;
+
pcoh.init_coefficients(p); // initializes the coefficient field for homology
pcoh.compute_persistent_cohomology(min_persistence);
- std::stringstream ss;
- ss << argv[1] << "_persistence";
- std::ofstream out(ss.str().c_str());
+ std::string output_file_name(argv[1]);
+ output_file_name += "_persistence";
+
+ std::size_t last_in_path = output_file_name.find_last_of("/\\");
+
+ if (last_in_path != std::string::npos) {
+ output_file_name = output_file_name.substr(last_in_path+1);
+ }
+
+ std::ofstream out(output_file_name.c_str());
pcoh.output_diagram(out);
out.close();
- std::cout << "Result in file: " << ss.str().c_str() << "\n";
+ std::cout << "Result in file: " << output_file_name << "\n";
return 0;
}
diff --git a/example/Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp b/example/Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
index 839a4c89..122160a2 100644
--- a/example/Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
+++ b/example/Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
@@ -30,6 +30,7 @@
#include <iostream>
#include <sstream>
#include <vector>
+#include <string>
int main(int argc, char** argv) {
std::cout << "This program computes persistent homology, by using " <<
@@ -40,9 +41,6 @@ int main(int argc, char** argv) {
"assume that the cells are in the lexicographical order. See CubicalOneSphere.txt or CubicalTwoSphere.txt for" <<
" example.\n" << std::endl;
- int p = 2;
- double min_persistence = 0;
-
if (argc != 2) {
std::cerr << "Wrong number of parameters. Please provide the name of a file with a Perseus style bitmap at " <<
"the input. The program will now terminate.\n";
@@ -58,16 +56,26 @@ int main(int argc, char** argv) {
typedef Gudhi::persistent_cohomology::Persistent_cohomology<Bitmap_cubical_complex, Field_Zp> Persistent_cohomology;
// Compute the persistence diagram of the complex
Persistent_cohomology pcoh(b, true);
+
+ int p = 2;
+ double min_persistence = 0;
pcoh.init_coefficients(p); // initializes the coefficient field for homology
pcoh.compute_persistent_cohomology(min_persistence);
- std::stringstream ss;
- ss << argv[1] << "_persistence";
- std::ofstream out(ss.str().c_str());
+ std::string output_file_name(argv[1]);
+ output_file_name += "_persistence";
+
+ std::size_t last_in_path = output_file_name.find_last_of("/\\");
+
+ if (last_in_path != std::string::npos) {
+ output_file_name = output_file_name.substr(last_in_path+1);
+ }
+
+ std::ofstream out(output_file_name.c_str());
pcoh.output_diagram(out);
out.close();
- std::cout << "Result in file: " << ss.str().c_str() << "\n";
+ std::cout << "Result in file: " << output_file_name << "\n";
return 0;
}
diff --git a/example/Bitmap_cubical_complex/CMakeLists.txt b/example/Bitmap_cubical_complex/CMakeLists.txt
index 241a11e5..a0401619 100644
--- a/example/Bitmap_cubical_complex/CMakeLists.txt
+++ b/example/Bitmap_cubical_complex/CMakeLists.txt
@@ -2,7 +2,6 @@ cmake_minimum_required(VERSION 2.6)
project(Bitmap_cubical_complex_examples)
add_executable ( Bitmap_cubical_complex Bitmap_cubical_complex.cpp )
-target_link_libraries(Bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Bitmap_cubical_complex ${TBB_LIBRARIES})
endif()
@@ -14,7 +13,6 @@ add_test(NAME Bitmap_cubical_complex_example_persistence_two_sphere COMMAND $<TA
"${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt")
add_executable ( Random_bitmap_cubical_complex Random_bitmap_cubical_complex.cpp )
-target_link_libraries(Random_bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Random_bitmap_cubical_complex ${TBB_LIBRARIES})
endif()
@@ -22,7 +20,6 @@ add_test(NAME Bitmap_cubical_complex_example_random COMMAND $<TARGET_FILE:Random
"2" "100" "100")
add_executable ( Bitmap_cubical_complex_periodic_boundary_conditions Bitmap_cubical_complex_periodic_boundary_conditions.cpp )
-target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${Boost_SYSTEM_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${TBB_LIBRARIES})
endif()
@@ -34,3 +31,7 @@ add_test(NAME Bitmap_cubical_complex_example_periodic_boundary_conditions_2d_tor
add_test(NAME Bitmap_cubical_complex_example_periodic_boundary_conditions_3d_torus
COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
"${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt")
+
+install(TARGETS Bitmap_cubical_complex DESTINATION bin)
+install(TARGETS Random_bitmap_cubical_complex DESTINATION bin)
+install(TARGETS Bitmap_cubical_complex_periodic_boundary_conditions DESTINATION bin)
diff --git a/example/Bottleneck_distance/CMakeLists.txt b/example/Bottleneck_distance/CMakeLists.txt
index 0d0bff45..eac617db 100644
--- a/example/Bottleneck_distance/CMakeLists.txt
+++ b/example/Bottleneck_distance/CMakeLists.txt
@@ -1,22 +1,38 @@
cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_examples)
-if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
add_executable (bottleneck_read_file_example bottleneck_read_file_example.cpp)
add_executable (bottleneck_basic_example bottleneck_basic_example.cpp)
- add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp)
- target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
if (TBB_FOUND)
target_link_libraries(bottleneck_read_file_example ${TBB_LIBRARIES})
target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES})
- target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES})
endif(TBB_FOUND)
add_test(NAME Bottleneck_distance_example_basic COMMAND $<TARGET_FILE:bottleneck_basic_example>)
+ add_test(NAME Bottleneck_read_file_example
+ COMMAND $<TARGET_FILE:bottleneck_read_file_example>
+ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/second.pers")
+
+ install(TARGETS bottleneck_read_file_example DESTINATION bin)
+ install(TARGETS bottleneck_basic_example DESTINATION bin)
+
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+
+# Eigen3 and CGAL > 4.7.0 is required for alpha complex
+# CGAL > 4.8.1 is required for bottleneck distance =>
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp)
+ target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
add_test(NAME Bottleneck_distance_example_alpha_rips_persistence_bottleneck
- COMMAND $<TARGET_FILE:alpha_rips_persistence_bottleneck_distance>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.15" "-m" "0.12" "-d" "3" "-p" "3")
+ COMMAND $<TARGET_FILE:alpha_rips_persistence_bottleneck_distance>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.15" "-m" "0.12" "-d" "3" "-p" "3")
+ install(TARGETS alpha_rips_persistence_bottleneck_distance DESTINATION bin)
+ if (TBB_FOUND)
+ target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/example/Bottleneck_distance/bottleneck_read_file_example.cpp b/example/Bottleneck_distance/bottleneck_read_file_example.cpp
index bde05825..24d73c57 100644
--- a/example/Bottleneck_distance/bottleneck_read_file_example.cpp
+++ b/example/Bottleneck_distance/bottleneck_read_file_example.cpp
@@ -20,53 +20,31 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#define CGAL_HAS_THREADS
-
#include <gudhi/Bottleneck.h>
+#include <gudhi/reader_utils.h>
#include <iostream>
#include <vector>
#include <utility> // for pair
-#include <fstream>
-#include <sstream>
#include <string>
-
-std::vector< std::pair<double, double> > read_diagram_from_file(const char* filename) {
- std::ifstream in;
- in.open(filename);
- std::vector< std::pair<double, double> > result;
- if (!in.is_open()) {
- std::cerr << "File : " << filename << " do not exist. The program will now terminate \n";
- throw "File do not exist \n";
- }
-
- std::string line;
- while (!in.eof()) {
- getline(in, line);
- if (line.length() != 0) {
- std::stringstream lineSS;
- lineSS << line;
- double beginn, endd;
- lineSS >> beginn;
- lineSS >> endd;
- result.push_back(std::make_pair(beginn, endd));
- }
- }
- in.close();
- return result;
-} // read_diagram_from_file
+#include <limits> // for numeric_limits
int main(int argc, char** argv) {
if (argc < 3) {
- std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file " <<
- "should contain a birth-death pair per line. Third, optional parameter is an error bound on a bottleneck" <<
- " distance (set by default to zero). The program will now terminate \n";
+ std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file" <<
+ " should contain a birth-death pair per line. Third, optional parameter is an error bound on a bottleneck" <<
+ " distance (set by default to the smallest positive double value). If you set the error bound to 0, be" <<
+ " aware this version is exact but expensive. The program will now terminate \n";
+ return -1;
}
- std::vector< std::pair< double, double > > diag1 = read_diagram_from_file(argv[1]);
- std::vector< std::pair< double, double > > diag2 = read_diagram_from_file(argv[2]);
- double tolerance = 0.;
+ std::vector<std::pair<double, double>> diag1 = Gudhi::read_persistence_intervals_in_dimension(argv[1]);
+ std::vector<std::pair<double, double>> diag2 = Gudhi::read_persistence_intervals_in_dimension(argv[2]);
+
+ double tolerance = std::numeric_limits<double>::min();
if (argc == 4) {
tolerance = atof(argv[3]);
}
double b = Gudhi::persistence_diagram::bottleneck_distance(diag1, diag2, tolerance);
std::cout << "The distance between the diagrams is : " << b << ". The tolerance is : " << tolerance << std::endl;
+
+ return 0;
}
diff --git a/example/Contraction/CMakeLists.txt b/example/Contraction/CMakeLists.txt
index 51a6832d..83594c0e 100644
--- a/example/Contraction/CMakeLists.txt
+++ b/example/Contraction/CMakeLists.txt
@@ -5,10 +5,6 @@ project(Contraction_examples)
add_executable(RipsContraction Rips_contraction.cpp)
add_executable(GarlandHeckbert Garland_heckbert.cpp)
-target_link_libraries(RipsContraction ${Boost_TIMER_LIBRARY} ${Boost_SYSTEM_LIBRARY})
-target_link_libraries(GarlandHeckbert ${Boost_TIMER_LIBRARY} ${Boost_SYSTEM_LIBRARY})
-
-
add_test(NAME Contraction_example_tore3D_0.2 COMMAND $<TARGET_FILE:RipsContraction>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0.2")
# TODO(DS) : These tests are too long under Windows
@@ -16,3 +12,6 @@ add_test(NAME Contraction_example_tore3D_0.2 COMMAND $<TARGET_FILE:RipsContracti
# "${CMAKE_SOURCE_DIR}/data/points/sphere3D_2646.off" "0.2")
#add_test(NAME Contraction_example_SO3_0.3 COMMAND $<TARGET_FILE:RipsContraction>
# "${CMAKE_SOURCE_DIR}/data/points/SO3_10000.off" "0.3")
+
+install(TARGETS RipsContraction DESTINATION bin)
+install(TARGETS GarlandHeckbert DESTINATION bin)
diff --git a/example/Contraction/Garland_heckbert.cpp b/example/Contraction/Garland_heckbert.cpp
index 8b5a6a6c..f0cde95e 100644
--- a/example/Contraction/Garland_heckbert.cpp
+++ b/example/Contraction/Garland_heckbert.cpp
@@ -29,8 +29,8 @@
#include <gudhi/Edge_contraction.h>
#include <gudhi/Skeleton_blocker.h>
#include <gudhi/Off_reader.h>
+#include <gudhi/Clock.h>
-#include <boost/timer/timer.hpp>
#include <iostream>
#include "Garland_heckbert/Error_quadric.h"
@@ -165,7 +165,7 @@ int main(int argc, char *argv[]) {
int num_contractions = atoi(argv[3]);
- boost::timer::auto_cpu_timer t;
+ Gudhi::Clock contraction_chrono("Time to simplify and enumerate simplices");
// constructs the contractor object with Garland Heckbert policies.
Complex_contractor contractor(complex,
@@ -182,6 +182,8 @@ int main(int argc, char *argv[]) {
complex.num_edges() << " edges and " <<
complex.num_triangles() << " triangles." << std::endl;
+ std::cout << contraction_chrono;
+
// write simplified complex
Gudhi::skeleton_blocker::Skeleton_blocker_off_writer<Complex> off_writer(argv[2], complex);
diff --git a/example/Contraction/Rips_contraction.cpp b/example/Contraction/Rips_contraction.cpp
index 8289b1d3..501b0e87 100644
--- a/example/Contraction/Rips_contraction.cpp
+++ b/example/Contraction/Rips_contraction.cpp
@@ -23,8 +23,8 @@
#include <gudhi/Skeleton_blocker.h>
#include <gudhi/Off_reader.h>
#include <gudhi/Point.h>
+#include <gudhi/Clock.h>
-#include <boost/timer/timer.hpp>
#include <iostream>
struct Geometry_trait {
@@ -68,7 +68,7 @@ int main(int argc, char *argv[]) {
build_rips(complex, atof(argv[2]));
- boost::timer::auto_cpu_timer t;
+ Gudhi::Clock contraction_chrono("Time to simplify and enumerate simplices");
std::cout << "Initial complex has " <<
complex.num_vertices() << " vertices and " <<
@@ -90,8 +90,7 @@ int main(int argc, char *argv[]) {
complex.num_blockers() << " blockers and " <<
num_simplices << " simplices" << std::endl;
-
- std::cout << "Time to simplify and enumerate simplices:\n";
+ std::cout << contraction_chrono;
return EXIT_SUCCESS;
}
diff --git a/example/Persistent_cohomology/CMakeLists.txt b/example/Persistent_cohomology/CMakeLists.txt
index 3c45e79b..f47de4c3 100644
--- a/example/Persistent_cohomology/CMakeLists.txt
+++ b/example/Persistent_cohomology/CMakeLists.txt
@@ -2,25 +2,23 @@ cmake_minimum_required(VERSION 2.6)
project(Persistent_cohomology_examples)
add_executable(plain_homology plain_homology.cpp)
-target_link_libraries(plain_homology ${Boost_SYSTEM_LIBRARY})
add_executable(persistence_from_simple_simplex_tree persistence_from_simple_simplex_tree.cpp)
-target_link_libraries(persistence_from_simple_simplex_tree ${Boost_SYSTEM_LIBRARY})
add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
-target_link_libraries(rips_distance_matrix_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+target_link_libraries(rips_distance_matrix_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_executable(rips_persistence rips_persistence.cpp)
-target_link_libraries(rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+target_link_libraries(rips_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_executable(rips_persistence_step_by_step rips_persistence_step_by_step.cpp)
-target_link_libraries(rips_persistence_step_by_step ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+target_link_libraries(rips_persistence_step_by_step ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_executable(rips_persistence_via_boundary_matrix rips_persistence_via_boundary_matrix.cpp)
-target_link_libraries(rips_persistence_via_boundary_matrix ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+target_link_libraries(rips_persistence_via_boundary_matrix ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_executable(persistence_from_file persistence_from_file.cpp)
-target_link_libraries(persistence_from_file ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+target_link_libraries(persistence_from_file ${Boost_PROGRAM_OPTIONS_LIBRARY})
if (TBB_FOUND)
target_link_libraries(plain_homology ${TBB_LIBRARIES})
@@ -47,27 +45,36 @@ add_test(NAME Persistent_cohomology_example_from_file_3_2_0 COMMAND $<TARGET_FIL
"${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/bunny_5000_complex.fsc" "-p" "2" "-m" "0")
add_test(NAME Persistent_cohomology_example_from_file_3_3_100 COMMAND $<TARGET_FILE:persistence_from_file>
"${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/bunny_5000_complex.fsc" "-p" "3" "-m" "100")
-
+
+install(TARGETS plain_homology DESTINATION bin)
+install(TARGETS persistence_from_simple_simplex_tree DESTINATION bin)
+install(TARGETS rips_distance_matrix_persistence DESTINATION bin)
+install(TARGETS rips_persistence DESTINATION bin)
+install(TARGETS rips_persistence_step_by_step DESTINATION bin)
+install(TARGETS rips_persistence_via_boundary_matrix DESTINATION bin)
+install(TARGETS persistence_from_file DESTINATION bin)
+
if(GMP_FOUND)
if(GMPXX_FOUND)
add_executable(rips_multifield_persistence rips_multifield_persistence.cpp )
target_link_libraries(rips_multifield_persistence
- ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
+ ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
if (TBB_FOUND)
target_link_libraries(rips_multifield_persistence ${TBB_LIBRARIES})
endif(TBB_FOUND)
- add_test(NAME Persistent_cohomology_example_multifield_2_71 COMMAND $<TARGET_FILE:rips_multifield_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "2" "-q" "71")
+ add_test(NAME Persistent_cohomology_example_multifield_2_71 COMMAND $<TARGET_FILE:rips_multifield_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "2" "-q" "71")
+ install(TARGETS rips_multifield_persistence DESTINATION bin)
endif(GMPXX_FOUND)
endif(GMP_FOUND)
if(CGAL_FOUND)
add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp)
- target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(alpha_complex_3d_persistence ${CGAL_LIBRARY})
add_executable(exact_alpha_complex_3d_persistence exact_alpha_complex_3d_persistence.cpp)
- target_link_libraries(exact_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(exact_alpha_complex_3d_persistence ${CGAL_LIBRARY})
add_executable(weighted_alpha_complex_3d_persistence weighted_alpha_complex_3d_persistence.cpp)
- target_link_libraries(weighted_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES})
@@ -81,16 +88,20 @@ if(CGAL_FOUND)
add_test(NAME Persistent_cohomology_example_weighted_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_alpha_complex_3d_persistence>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.weights" "2" "0.45")
+ install(TARGETS alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS exact_alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS weighted_alpha_complex_3d_persistence DESTINATION bin)
+
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
target_link_libraries(alpha_complex_persistence
- ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
- target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY})
add_executable(custom_persistence_sort custom_persistence_sort.cpp)
- target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(custom_persistence_sort ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
@@ -102,5 +113,10 @@ if(CGAL_FOUND)
add_test(NAME Persistent_cohomology_example_periodic_alpha_complex_3d COMMAND $<TARGET_FILE:periodic_alpha_complex_3d_persistence>
"${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "2" "0")
add_test(NAME Persistent_cohomology_example_custom_persistence_sort COMMAND $<TARGET_FILE:custom_persistence_sort>)
+
+ install(TARGETS alpha_complex_persistence DESTINATION bin)
+ install(TARGETS periodic_alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS custom_persistence_sort DESTINATION bin)
+
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
endif(CGAL_FOUND)
diff --git a/example/Persistent_cohomology/README b/example/Persistent_cohomology/README
index 2ac79398..794b94ae 100644
--- a/example/Persistent_cohomology/README
+++ b/example/Persistent_cohomology/README
@@ -121,7 +121,7 @@ N.B.: - alpha_complex_persistence accepts OFF files in d-Dimension.
3) 3D periodic special case
---------------------------
-./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off 3 1.0
+./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt 3 1.0
output:
Periodic Delaunay computed.
diff --git a/example/Persistent_cohomology/alpha_complex_3d_persistence.cpp b/example/Persistent_cohomology/alpha_complex_3d_persistence.cpp
index fd227b82..f63ff0f6 100644
--- a/example/Persistent_cohomology/alpha_complex_3d_persistence.cpp
+++ b/example/Persistent_cohomology/alpha_complex_3d_persistence.cpp
@@ -76,8 +76,9 @@ using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
using PCOH = Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp >;
void usage(const std::string& progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+ std::cerr << "Usage:\n" << progName << " path_to_OFF_file coeff_field_characteristic[integer " <<
+ "> 0] min_persistence[float >= -1.0]\n";
+ std::cerr << " path_to_OFF_file is the path to your points cloud in OFF format.\n";
exit(-1);
}
@@ -202,7 +203,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
@@ -216,7 +216,6 @@ int main(int argc, char * const argv[]) {
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
diff --git a/example/Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp b/example/Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp
index 8a335075..09561d03 100644
--- a/example/Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp
+++ b/example/Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp
@@ -77,8 +77,9 @@ using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >;
using PCOH = Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp >;
void usage(char * const progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+ std::cerr << "Usage:\n" << progName << " path_to_OFF_file coeff_field_characteristic[integer " <<
+ "> 0] min_persistence[float >= -1.0]\n";
+ std::cerr << " path_to_OFF_file is the path to your points cloud in OFF format.\n";
exit(-1);
}
@@ -204,7 +205,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
@@ -218,7 +218,6 @@ int main(int argc, char * const argv[]) {
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
diff --git a/example/Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp b/example/Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp
index 8928cfc2..8140a3c5 100644
--- a/example/Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp
+++ b/example/Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp
@@ -84,8 +84,16 @@ using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomolog
ST, Gudhi::persistent_cohomology::Field_Zp >;
void usage(char * const progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph path_to_iso_cuboid_3_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+ std::cerr << "Usage:\n" << progName << " path_to_OFF_file path_to_iso_cuboid_3_file coeff_field_characteristic[" <<
+ "integer > 0] min_persistence[float >= -1.0]\n" <<
+ " path_to_OFF_file is the path to your points cloud in OFF format.\n" <<
+ " path_to_iso_cuboid_3_file is the path to the iso cuboid file with the following format :\n" <<
+ " x_min y_min z_min x_max y_max z_max\n" <<
+ " In this example, the periodic cube will be " <<
+ "{ x = [x_min,x_max]; y = [y_min,y_max]; z = [z_min,z_max] }.\n" <<
+ " For more information, please refer to\n" <<
+ " https://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Iso__cuboid__3.html\n";
+
exit(-1);
}
@@ -221,7 +229,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
@@ -235,7 +242,6 @@ int main(int argc, char * const argv[]) {
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
diff --git a/example/Persistent_cohomology/persistence_from_file.cpp b/example/Persistent_cohomology/persistence_from_file.cpp
index 67235467..eafa3fd5 100644
--- a/example/Persistent_cohomology/persistence_from_file.cpp
+++ b/example/Persistent_cohomology/persistence_from_file.cpp
@@ -61,8 +61,7 @@ int main(int argc, char * argv[]) {
simplex_tree_stream >> simplex_tree;
std::cout << "The complex contains " << simplex_tree.num_simplices() << " simplices" << std::endl;
- std::cout << " - dimension " << simplex_tree.dimension() << " - filtration " << simplex_tree.filtration()
- << std::endl;
+ std::cout << " - dimension " << simplex_tree.dimension() << std::endl;
/*
std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
diff --git a/example/Persistent_cohomology/persistence_from_simple_simplex_tree.cpp b/example/Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
index 7ca9410a..8214d66a 100644
--- a/example/Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
+++ b/example/Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
@@ -143,11 +143,10 @@ int main(int argc, char * const argv[]) {
/* An edge [10,12,2] */
st.set_dimension(2);
- st.set_filtration(0.4);
std::cout << "The complex contains " << st.num_simplices() << " simplices - " << st.num_vertices() << " vertices "
<< std::endl;
- std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << " - dimension " << st.dimension() << std::endl;
std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:"
<< std::endl;
std::cout << "**************************************************************" << std::endl;
diff --git a/example/Persistent_cohomology/rips_distance_matrix_persistence.cpp b/example/Persistent_cohomology/rips_distance_matrix_persistence.cpp
index 8517e7f6..d38808c7 100644
--- a/example/Persistent_cohomology/rips_distance_matrix_persistence.cpp
+++ b/example/Persistent_cohomology/rips_distance_matrix_persistence.cpp
@@ -57,7 +57,7 @@ int main(int argc, char * argv[]) {
program_options(argc, argv, csv_matrix_file, filediag, threshold, dim_max, p, min_persistence);
- Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_matrix_file);
+ Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_matrix_file);
Rips_complex rips_complex_from_file(distances, threshold);
// Construct the Rips complex in a Simplex Tree
diff --git a/example/Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp b/example/Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp
index 34b90933..72268511 100644
--- a/example/Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp
+++ b/example/Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp
@@ -81,8 +81,13 @@ using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomolog
ST, Gudhi::persistent_cohomology::Field_Zp >;
void usage(char * const progName) {
- std::cerr << "Usage: " << progName <<
- " path_to_file_graph path_to_weight_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
+ std::cerr << "Usage:\n" << progName << " path_to_OFF_file path_to_weight_file coeff_field_characteristic[integer " <<
+ "> 0] min_persistence[float >= -1.0]\n";
+ std::cerr << " path_to_OFF_file is the path to your points cloud in OFF format.\n";
+ std::cerr << " path_to_weight_file is the path to the weights of your points cloud (one value per line.)\n";
+ std::cerr << " Weights values are explained on CGAL documentation:\n";
+ std::cerr << " https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title0\n";
+ std::cerr << " https://doc.cgal.org/latest/Triangulation_3/index.html#Triangulation3secclassRegulartriangulation\n";
exit(-1);
}
@@ -115,6 +120,7 @@ int main(int argc, char * const argv[]) {
if (weights_ifstr.good()) {
double weight = 0.0;
std::size_t index = 0;
+ wp.reserve(lp.size());
// Attempt read the weight in a double format, return false if it fails
while ((weights_ifstr >> weight) && (index < lp.size())) {
wp.push_back(Weighted_point_3(lp[index], weight));
@@ -130,7 +136,7 @@ int main(int argc, char * const argv[]) {
}
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
- Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL);
+ Alpha_shape_3 as(wp.begin(), wp.end(), 0, Alpha_shape_3::GENERAL);
#ifdef DEBUG_TRACES
std::cout << "Alpha shape computed in GENERAL mode" << std::endl;
#endif // DEBUG_TRACES
@@ -222,7 +228,6 @@ int main(int argc, char * const argv[]) {
else
std::cout << "This shall not happen" << std::endl;
}
- simplex_tree.set_filtration(filtration_max);
simplex_tree.set_dimension(dim_max);
#ifdef DEBUG_TRACES
@@ -236,7 +241,6 @@ int main(int argc, char * const argv[]) {
std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " ";
std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl;
std::cout << " Dimension = " << simplex_tree.dimension() << " ";
- std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl;
#endif // DEBUG_TRACES
#ifdef DEBUG_TRACES
diff --git a/example/Rips_complex/CMakeLists.txt b/example/Rips_complex/CMakeLists.txt
index 8aee79e2..2940f164 100644
--- a/example/Rips_complex/CMakeLists.txt
+++ b/example/Rips_complex/CMakeLists.txt
@@ -3,17 +3,13 @@ project(Rips_complex_examples)
# Point cloud
add_executable ( Rips_complex_example_from_off example_rips_complex_from_off_file.cpp )
-target_link_libraries(Rips_complex_example_from_off ${Boost_SYSTEM_LIBRARY})
add_executable ( Rips_complex_example_one_skeleton_from_points example_one_skeleton_rips_from_points.cpp )
-target_link_libraries(Rips_complex_example_one_skeleton_from_points ${Boost_SYSTEM_LIBRARY})
# Distance matrix
add_executable ( Rips_complex_example_one_skeleton_from_distance_matrix example_one_skeleton_rips_from_distance_matrix.cpp )
-target_link_libraries(Rips_complex_example_one_skeleton_from_distance_matrix ${Boost_SYSTEM_LIBRARY})
add_executable ( Rips_complex_example_from_csv_distance_matrix example_rips_complex_from_csv_distance_matrix_file.cpp )
-target_link_libraries(Rips_complex_example_from_csv_distance_matrix ${Boost_SYSTEM_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Rips_complex_example_from_off ${TBB_LIBRARIES})
@@ -56,3 +52,8 @@ if (DIFF_PATH)
${CMAKE_CURRENT_BINARY_DIR}/ripscsvreader_result_12_3.txt
${CMAKE_CURRENT_BINARY_DIR}/full_skeleton_rips_for_doc.txt)
endif()
+
+install(TARGETS Rips_complex_example_from_off DESTINATION bin)
+install(TARGETS Rips_complex_example_one_skeleton_from_points DESTINATION bin)
+install(TARGETS Rips_complex_example_one_skeleton_from_distance_matrix DESTINATION bin)
+install(TARGETS Rips_complex_example_from_csv_distance_matrix DESTINATION bin)
diff --git a/example/Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp b/example/Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp
index 7ae8126f..9e182f1e 100644
--- a/example/Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp
+++ b/example/Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp
@@ -32,7 +32,7 @@ int main(int argc, char **argv) {
// Init of a Rips complex from a distance matrix in a csv file
// Default separator is ';'
// ----------------------------------------------------------------------------
- Distance_matrix distances = read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
+ Distance_matrix distances = Gudhi::read_lower_triangular_matrix_from_csv_file<Filtration_value>(csv_file_name);
Rips_complex rips_complex_from_file(distances, threshold);
std::streambuf* streambufffer;
diff --git a/example/Simplex_tree/CMakeLists.txt b/example/Simplex_tree/CMakeLists.txt
index b1ea98d4..e22cc92c 100644
--- a/example/Simplex_tree/CMakeLists.txt
+++ b/example/Simplex_tree/CMakeLists.txt
@@ -19,14 +19,20 @@ add_test(NAME Simplex_tree_example_simple_simplex_tree COMMAND $<TARGET_FILE:Sim
add_executable ( Simplex_tree_example_mini_simplex_tree mini_simplex_tree.cpp )
add_test(NAME Simplex_tree_example_mini_simplex_tree COMMAND $<TARGET_FILE:Simplex_tree_example_mini_simplex_tree>)
+install(TARGETS Simplex_tree_example_from_cliques_of_graph DESTINATION bin)
+install(TARGETS Simplex_tree_example_simple_simplex_tree DESTINATION bin)
+install(TARGETS Simplex_tree_example_mini_simplex_tree DESTINATION bin)
# An example with Simplex-tree using CGAL alpha_shapes_3
if(GMP_FOUND AND CGAL_FOUND)
add_executable ( Simplex_tree_example_alpha_shapes_3_from_off example_alpha_shapes_3_simplex_tree_from_off_file.cpp )
- target_link_libraries(Simplex_tree_example_alpha_shapes_3_from_off ${GMP_LIBRARIES} ${CGAL_LIBRARY} ${Boost_SYSTEM_LIBRARY})
+ target_link_libraries(Simplex_tree_example_alpha_shapes_3_from_off ${GMP_LIBRARIES} ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Simplex_tree_example_alpha_shapes_3_from_off ${TBB_LIBRARIES})
endif()
add_test(NAME Simplex_tree_example_alpha_shapes_3_from_off COMMAND $<TARGET_FILE:Simplex_tree_example_alpha_shapes_3_from_off>
"${CMAKE_SOURCE_DIR}/data/points/bunny_5000.off")
+
+ install(TARGETS Simplex_tree_example_alpha_shapes_3_from_off DESTINATION bin)
+
endif()
diff --git a/example/Simplex_tree/simple_simplex_tree.cpp b/example/Simplex_tree/simple_simplex_tree.cpp
index 60f9a35e..d8318f03 100644
--- a/example/Simplex_tree/simple_simplex_tree.cpp
+++ b/example/Simplex_tree/simple_simplex_tree.cpp
@@ -185,13 +185,12 @@ int main(int argc, char * const argv[]) {
}
// ++ GENERAL VARIABLE SET
- simplexTree.set_filtration(FOURTH_FILTRATION_VALUE); // Max filtration value
simplexTree.set_dimension(2); // Max dimension = 2 -> (2,1,0)
std::cout << "********************************************************************\n";
// Display the Simplex_tree - Can not be done in the middle of 2 inserts
std::cout << "* The complex contains " << simplexTree.num_simplices() << " simplices\n";
- std::cout << " - dimension " << simplexTree.dimension() << " - filtration " << simplexTree.filtration() << "\n";
+ std::cout << " - dimension " << simplexTree.dimension() << "\n";
std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplexTree.filtration_simplex_range()) {
std::cout << " " << "[" << simplexTree.filtration(f_simplex) << "] ";
diff --git a/example/Skeleton_blocker/CMakeLists.txt b/example/Skeleton_blocker/CMakeLists.txt
index c887e408..de70f089 100644
--- a/example/Skeleton_blocker/CMakeLists.txt
+++ b/example/Skeleton_blocker/CMakeLists.txt
@@ -5,8 +5,10 @@ add_executable(Skeleton_blocker_example_from_simplices Skeleton_blocker_from_sim
add_executable(Skeleton_blocker_example_iteration Skeleton_blocker_iteration.cpp)
add_executable(Skeleton_blocker_example_link Skeleton_blocker_link.cpp)
-target_link_libraries(Skeleton_blocker_example_iteration ${Boost_TIMER_LIBRARY} ${Boost_SYSTEM_LIBRARY})
-
add_test(NAME Skeleton_blocker_example_from_simplices COMMAND $<TARGET_FILE:Skeleton_blocker_example_from_simplices>)
add_test(NAME Skeleton_blocker_example_iteration COMMAND $<TARGET_FILE:Skeleton_blocker_example_iteration>)
add_test(NAME Skeleton_blocker_example_link COMMAND $<TARGET_FILE:Skeleton_blocker_example_link>)
+
+install(TARGETS Skeleton_blocker_example_from_simplices DESTINATION bin)
+install(TARGETS Skeleton_blocker_example_iteration DESTINATION bin)
+install(TARGETS Skeleton_blocker_example_link DESTINATION bin)
diff --git a/example/Skeleton_blocker/Skeleton_blocker_iteration.cpp b/example/Skeleton_blocker/Skeleton_blocker_iteration.cpp
index 6a1bc480..08ff0264 100644
--- a/example/Skeleton_blocker/Skeleton_blocker_iteration.cpp
+++ b/example/Skeleton_blocker/Skeleton_blocker_iteration.cpp
@@ -21,8 +21,7 @@
*/
#include <gudhi/Skeleton_blocker.h>
-
-#include <boost/timer/timer.hpp>
+#include <gudhi/Clock.h>
#include <stdio.h>
#include <stdlib.h>
@@ -47,8 +46,7 @@ Complex build_complete_complex(int n) {
}
int main(int argc, char *argv[]) {
- boost::timer::auto_cpu_timer t;
-
+ Gudhi::Clock skbl_chrono("Time to build the complete complex, enumerate simplices and Euler Characteristic");
const int n = 15;
// build a full complex with n vertices and 2^n-1 simplices
@@ -82,5 +80,6 @@ int main(int argc, char *argv[]) {
std::cout << "Saw " << num_vertices << " vertices, " << num_edges << " edges and " << num_simplices << " simplices"
<< std::endl;
std::cout << "The Euler Characteristic is " << euler << std::endl;
+ std::cout << skbl_chrono;
return EXIT_SUCCESS;
}
diff --git a/example/Spatial_searching/CMakeLists.txt b/example/Spatial_searching/CMakeLists.txt
index f4b9f3cb..4cf3d863 100644
--- a/example/Spatial_searching/CMakeLists.txt
+++ b/example/Spatial_searching/CMakeLists.txt
@@ -6,4 +6,5 @@ if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
target_link_libraries(Spatial_searching_example_spatial_searching ${CGAL_LIBRARY})
add_test(NAME Spatial_searching_example_spatial_searching
COMMAND $<TARGET_FILE:Spatial_searching_example_spatial_searching>)
+ install(TARGETS Spatial_searching_example_spatial_searching DESTINATION bin)
endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/example/Spatial_searching/example_spatial_searching.cpp b/example/Spatial_searching/example_spatial_searching.cpp
index 14b324ae..034ad24a 100644
--- a/example/Spatial_searching/example_spatial_searching.cpp
+++ b/example/Spatial_searching/example_spatial_searching.cpp
@@ -24,29 +24,37 @@ int main(void) {
// 10-nearest neighbor query
std::cout << "10 nearest neighbors from points[20]:\n";
- auto knn_range = points_ds.query_k_nearest_neighbors(points[20], 10, true);
+ auto knn_range = points_ds.k_nearest_neighbors(points[20], 10, true);
for (auto const& nghb : knn_range)
std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
// Incremental nearest neighbor query
std::cout << "Incremental nearest neighbors:\n";
- auto inn_range = points_ds.query_incremental_nearest_neighbors(points[45]);
+ auto inn_range = points_ds.incremental_nearest_neighbors(points[45]);
// Get the neighbors in distance order until we hit the first point
for (auto ins_iterator = inn_range.begin(); ins_iterator->first != 0; ++ins_iterator)
std::cout << ins_iterator->first << " (sq. dist. = " << ins_iterator->second << ")\n";
- // 10-farthest neighbor query
- std::cout << "10 farthest neighbors from points[20]:\n";
- auto kfn_range = points_ds.query_k_farthest_neighbors(points[20], 10, true);
+ // 10-furthest neighbor query
+ std::cout << "10 furthest neighbors from points[20]:\n";
+ auto kfn_range = points_ds.k_furthest_neighbors(points[20], 10, true);
for (auto const& nghb : kfn_range)
std::cout << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
- // Incremental farthest neighbor query
- std::cout << "Incremental farthest neighbors:\n";
- auto ifn_range = points_ds.query_incremental_farthest_neighbors(points[45]);
+ // Incremental furthest neighbor query
+ std::cout << "Incremental furthest neighbors:\n";
+ auto ifn_range = points_ds.incremental_furthest_neighbors(points[45]);
// Get the neighbors in distance reverse order until we hit the first point
for (auto ifs_iterator = ifn_range.begin(); ifs_iterator->first != 0; ++ifs_iterator)
std::cout << ifs_iterator->first << " (sq. dist. = " << ifs_iterator->second << ")\n";
+ // All-near-neighbors search
+ std::cout << "All-near-neighbors search:\n";
+ std::vector<std::size_t> rs_result;
+ points_ds.all_near_neighbors(points[45], 0.5, std::back_inserter(rs_result));
+ K k;
+ for (auto const& p_idx : rs_result)
+ std::cout << p_idx << " (sq. dist. = " << k.squared_distance_d_object()(points[p_idx], points[45]) << ")\n";
+
return 0;
}
diff --git a/example/Subsampling/CMakeLists.txt b/example/Subsampling/CMakeLists.txt
index 71b8d2e8..34400b1e 100644
--- a/example/Subsampling/CMakeLists.txt
+++ b/example/Subsampling/CMakeLists.txt
@@ -14,4 +14,10 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
COMMAND $<TARGET_FILE:Subsampling_example_choose_n_farthest_points>)
add_test(NAME Subsampling_example_sparsify_point_set
COMMAND $<TARGET_FILE:Subsampling_example_sparsify_point_set>)
+
+ install(TARGETS Subsampling_example_pick_n_random_points DESTINATION bin)
+ install(TARGETS Subsampling_example_choose_n_farthest_points DESTINATION bin)
+ install(TARGETS Subsampling_example_custom_kernel DESTINATION bin)
+ install(TARGETS Subsampling_example_sparsify_point_set DESTINATION bin)
+
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/example/Tangential_complex/CMakeLists.txt b/example/Tangential_complex/CMakeLists.txt
index 339d0581..16d1339d 100644
--- a/example/Tangential_complex/CMakeLists.txt
+++ b/example/Tangential_complex/CMakeLists.txt
@@ -3,9 +3,9 @@ project(Tangential_complex_examples)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
add_executable( Tangential_complex_example_basic example_basic.cpp )
- target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY})
+ target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY})
add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp )
- target_link_libraries(Tangential_complex_example_with_perturb ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY})
+ target_link_libraries(Tangential_complex_example_with_perturb ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES})
target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES})
@@ -15,4 +15,7 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
COMMAND $<TARGET_FILE:Tangential_complex_example_basic>)
add_test(NAME Tangential_complex_example_with_perturb
COMMAND $<TARGET_FILE:Tangential_complex_example_with_perturb>)
+
+ install(TARGETS Tangential_complex_example_basic DESTINATION bin)
+ install(TARGETS Tangential_complex_example_with_perturb DESTINATION bin)
endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/example/Witness_complex/CMakeLists.txt b/example/Witness_complex/CMakeLists.txt
index 670651ce..cbc53902 100644
--- a/example/Witness_complex/CMakeLists.txt
+++ b/example/Witness_complex/CMakeLists.txt
@@ -2,27 +2,25 @@ cmake_minimum_required(VERSION 2.6)
project(Witness_complex_examples)
add_executable ( Witness_complex_example_nearest_landmark_table example_nearest_landmark_table.cpp )
-target_link_libraries(Witness_complex_example_nearest_landmark_table ${Boost_SYSTEM_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Witness_complex_example_nearest_landmark_table ${TBB_LIBRARIES})
endif()
add_test(NAME Witness_complex_example_nearest_landmark_table
COMMAND $<TARGET_FILE:Witness_complex_example_nearest_landmark_table>)
+install(TARGETS Witness_complex_example_nearest_landmark_table DESTINATION bin)
+
# CGAL and Eigen3 are required for Euclidean version of Witness
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
add_executable( Witness_complex_example_off example_witness_complex_off.cpp )
- target_link_libraries(Witness_complex_example_off ${Boost_SYSTEM_LIBRARY})
add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
- target_link_libraries(Witness_complex_example_strong_off ${Boost_SYSTEM_LIBRARY})
add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp )
- target_link_libraries(Witness_complex_example_sphere ${Boost_SYSTEM_LIBRARY})
add_executable ( Witness_complex_example_witness_persistence example_witness_complex_persistence.cpp )
- target_link_libraries(Witness_complex_example_witness_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ target_link_libraries(Witness_complex_example_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_executable ( Witness_complex_example_strong_witness_persistence example_strong_witness_persistence.cpp )
- target_link_libraries(Witness_complex_example_strong_witness_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ target_link_libraries(Witness_complex_example_strong_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Witness_complex_example_witness_persistence ${TBB_LIBRARIES})
@@ -43,4 +41,11 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
add_test(NAME Witness_complex_example_strong_test_torus_persistence
COMMAND $<TARGET_FILE:Witness_complex_example_strong_witness_persistence>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
+
+ install(TARGETS Witness_complex_example_off DESTINATION bin)
+ install(TARGETS Witness_complex_example_strong_off DESTINATION bin)
+ install(TARGETS Witness_complex_example_sphere DESTINATION bin)
+ install(TARGETS Witness_complex_example_witness_persistence DESTINATION bin)
+ install(TARGETS Witness_complex_example_strong_witness_persistence DESTINATION bin)
+
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/example/common/CMakeLists.txt b/example/common/CMakeLists.txt
index d5311b18..afe865d4 100644
--- a/example/common/CMakeLists.txt
+++ b/example/common/CMakeLists.txt
@@ -2,21 +2,27 @@ cmake_minimum_required(VERSION 2.6)
project(Common_examples)
add_executable ( vector_double_off_reader example_vector_double_points_off_reader.cpp )
-target_link_libraries(vector_double_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+target_link_libraries(vector_double_off_reader ${CGAL_LIBRARY})
add_test(NAME Common_example_vector_double_off_reader COMMAND $<TARGET_FILE:vector_double_off_reader>
"${CMAKE_SOURCE_DIR}/data/points/SO3_10000.off")
+install(TARGETS vector_double_off_reader DESTINATION bin)
+
if(CGAL_FOUND)
add_executable ( cgal_3D_off_reader example_CGAL_3D_points_off_reader.cpp )
- target_link_libraries(cgal_3D_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(cgal_3D_off_reader ${CGAL_LIBRARY})
add_test(NAME Common_example_vector_cgal_3D_off_reader COMMAND $<TARGET_FILE:cgal_3D_off_reader>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off")
- # need CGAL 4.7and Eigen3
+ install(TARGETS cgal_3D_off_reader DESTINATION bin)
+
+ # need CGAL 4.7 and Eigen3
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp )
- target_link_libraries(cgal_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ target_link_libraries(cgal_off_reader ${CGAL_LIBRARY})
add_test(NAME Common_example_vector_cgal_off_reader COMMAND $<TARGET_FILE:cgal_off_reader>
"${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off")
+ install(TARGETS cgal_off_reader DESTINATION bin)
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+
endif()
diff --git a/include/gudhi/Bitmap_cubical_complex.h b/include/gudhi/Bitmap_cubical_complex.h
index 5a87b9b8..f395de65 100644
--- a/include/gudhi/Bitmap_cubical_complex.h
+++ b/include/gudhi/Bitmap_cubical_complex.h
@@ -97,7 +97,7 @@ class Bitmap_cubical_complex : public T {
* with filtration on top dimensional cells.
**/
Bitmap_cubical_complex(const std::vector<unsigned>& dimensions,
- const std::vector<typename T::filtration_type>& top_dimensional_cells) :
+ const std::vector<Filtration_value>& top_dimensional_cells) :
T(dimensions, top_dimensional_cells),
key_associated_to_simplex(this->total_number_of_cells + 1) {
for (size_t i = 0; i != this->total_number_of_cells; ++i) {
@@ -111,13 +111,13 @@ class Bitmap_cubical_complex : public T {
/**
* Constructor that requires vector of elements of type unsigned, which gives number of top dimensional cells
- * in the following directions and vector of element of a type T::filtration_type
+ * in the following directions and vector of element of a type Filtration_value
* with filtration on top dimensional cells. The last parameter of the constructor is a vector of boolean of a length
* equal to the dimension of cubical complex.
* If the position i on this vector is true, then we impose periodic boundary conditions in this direction.
**/
Bitmap_cubical_complex(const std::vector<unsigned>& dimensions,
- const std::vector<typename T::filtration_type>& top_dimensional_cells,
+ const std::vector<Filtration_value>& top_dimensional_cells,
std::vector< bool > directions_in_which_periodic_b_cond_are_to_be_imposed) :
T(dimensions, top_dimensional_cells, directions_in_which_periodic_b_cond_are_to_be_imposed),
key_associated_to_simplex(this->total_number_of_cells + 1) {
@@ -170,20 +170,20 @@ class Bitmap_cubical_complex : public T {
if (globalDbg) {
std::cerr << "unsigned dimension(const Simplex_handle& sh)\n";
}
- if (sh != std::numeric_limits<Simplex_handle>::max()) return this->get_dimension_of_a_cell(sh);
+ if (sh != null_simplex()) return this->get_dimension_of_a_cell(sh);
return -1;
}
/**
* Return the filtration of a cell pointed by the Simplex_handle.
**/
- typename T::filtration_type filtration(Simplex_handle sh) {
+ Filtration_value filtration(Simplex_handle sh) {
if (globalDbg) {
- std::cerr << "T::filtration_type filtration(const Simplex_handle& sh)\n";
+ std::cerr << "Filtration_value filtration(const Simplex_handle& sh)\n";
}
// Returns the filtration value of a simplex.
- if (sh != std::numeric_limits<Simplex_handle>::max()) return this->data[sh];
- return std::numeric_limits<Simplex_handle>::max();
+ if (sh != null_simplex()) return this->data[sh];
+ return std::numeric_limits<Filtration_value>::infinity();
}
/**
@@ -203,7 +203,7 @@ class Bitmap_cubical_complex : public T {
if (globalDbg) {
std::cerr << "Simplex_key key(const Simplex_handle& sh)\n";
}
- if (sh != std::numeric_limits<Simplex_handle>::max()) {
+ if (sh != null_simplex()) {
return this->key_associated_to_simplex[sh];
}
return this->null_key();
@@ -216,7 +216,7 @@ class Bitmap_cubical_complex : public T {
if (globalDbg) {
std::cerr << "Simplex_handle simplex(Simplex_key key)\n";
}
- if (key != std::numeric_limits<Simplex_handle>::max()) {
+ if (key != null_key()) {
return this->simplex_associated_to_key[ key ];
}
return null_simplex();
@@ -229,7 +229,7 @@ class Bitmap_cubical_complex : public T {
if (globalDbg) {
std::cerr << "void assign_key(Simplex_handle& sh, Simplex_key key)\n";
}
- if (key == std::numeric_limits<Simplex_handle>::max()) return;
+ if (key == null_key()) return;
this->key_associated_to_simplex[sh] = key;
this->simplex_associated_to_key[key] = sh;
}
@@ -566,8 +566,9 @@ class is_before_in_filtration {
bool operator()(const typename Bitmap_cubical_complex<T>::Simplex_handle& sh1,
const typename Bitmap_cubical_complex<T>::Simplex_handle& sh2) const {
// Not using st_->filtration(sh1) because it uselessly tests for null_simplex.
- typename T::filtration_type fil1 = CC_->data[sh1];
- typename T::filtration_type fil2 = CC_->data[sh2];
+ typedef typename T::filtration_type Filtration_value;
+ Filtration_value fil1 = CC_->data[sh1];
+ Filtration_value fil2 = CC_->data[sh2];
if (fil1 != fil2) {
return fil1 < fil2;
}
diff --git a/include/gudhi/Bottleneck.h b/include/gudhi/Bottleneck.h
index b90a0ee0..8c97dce9 100644
--- a/include/gudhi/Bottleneck.h
+++ b/include/gudhi/Bottleneck.h
@@ -101,11 +101,11 @@ double bottleneck_distance_exact(Persistence_graph& g) {
*/
template<typename Persistence_diagram1, typename Persistence_diagram2>
double bottleneck_distance(const Persistence_diagram1 &diag1, const Persistence_diagram2 &diag2,
- double e = std::numeric_limits<double>::min()) {
+ double e = (std::numeric_limits<double>::min)()) {
Persistence_graph g(diag1, diag2, e);
if (g.bottleneck_alive() == std::numeric_limits<double>::infinity())
return std::numeric_limits<double>::infinity();
- return std::max(g.bottleneck_alive(), e == 0. ? bottleneck_distance_exact(g) : bottleneck_distance_approx(g, e));
+ return (std::max)(g.bottleneck_alive(), e == 0. ? bottleneck_distance_exact(g) : bottleneck_distance_approx(g, e));
}
} // namespace persistence_diagram
diff --git a/include/gudhi/Clock.h b/include/gudhi/Clock.h
index 77f196ca..b83de2f5 100644
--- a/include/gudhi/Clock.h
+++ b/include/gudhi/Clock.h
@@ -23,9 +23,9 @@
#ifndef CLOCK_H_
#define CLOCK_H_
-#include <boost/date_time/posix_time/posix_time.hpp>
-
+#include <iostream>
#include <string>
+#include <chrono>
namespace Gudhi {
@@ -33,20 +33,20 @@ class Clock {
public:
// Construct and start the timer
Clock(const std::string& msg_ = std::string())
- : startTime(boost::posix_time::microsec_clock::local_time()),
+ : startTime(std::chrono::system_clock::now()),
end_called(false),
msg(msg_) { }
// Restart the timer
void begin() const {
end_called = false;
- startTime = boost::posix_time::microsec_clock::local_time();
+ startTime = std::chrono::system_clock::now();
}
// Stop the timer
void end() const {
end_called = true;
- endTime = boost::posix_time::microsec_clock::local_time();
+ endTime = std::chrono::system_clock::now();
}
std::string message() const {
@@ -62,7 +62,7 @@ class Clock {
if (!clock.msg.empty())
stream << clock.msg << ": ";
- stream << clock.num_seconds() << "s";
+ stream << clock.num_seconds() << "s\n";
return stream;
}
@@ -71,15 +71,15 @@ class Clock {
// - or now otherwise. In this case, the timer is not stopped.
double num_seconds() const {
if (!end_called) {
- auto end = boost::posix_time::microsec_clock::local_time();
- return (end - startTime).total_milliseconds() / 1000.;
+ auto end = std::chrono::system_clock::now();
+ return std::chrono::duration_cast<std::chrono::milliseconds>(end-startTime).count() / 1000.;
} else {
- return (endTime - startTime).total_milliseconds() / 1000.;
+ return std::chrono::duration_cast<std::chrono::milliseconds>(endTime-startTime).count() / 1000.;
}
}
private:
- mutable boost::posix_time::ptime startTime, endTime;
+ mutable std::chrono::time_point<std::chrono::system_clock> startTime, endTime;
mutable bool end_called;
std::string msg;
};
diff --git a/include/gudhi/Euclidean_strong_witness_complex.h b/include/gudhi/Euclidean_strong_witness_complex.h
index fb669ef8..4f3cef4f 100644
--- a/include/gudhi/Euclidean_strong_witness_complex.h
+++ b/include/gudhi/Euclidean_strong_witness_complex.h
@@ -84,7 +84,7 @@ class Euclidean_strong_witness_complex
: landmarks_(std::begin(landmarks), std::end(landmarks)), landmark_tree_(landmarks_) {
nearest_landmark_table_.reserve(boost::size(witnesses));
for (auto w : witnesses)
- nearest_landmark_table_.push_back(landmark_tree_.query_incremental_nearest_neighbors(w));
+ nearest_landmark_table_.push_back(landmark_tree_.incremental_nearest_neighbors(w));
}
/** \brief Returns the point corresponding to the given vertex.
diff --git a/include/gudhi/Euclidean_witness_complex.h b/include/gudhi/Euclidean_witness_complex.h
index 6afe9a5d..ff8bb139 100644
--- a/include/gudhi/Euclidean_witness_complex.h
+++ b/include/gudhi/Euclidean_witness_complex.h
@@ -86,7 +86,7 @@ class Euclidean_witness_complex
: landmarks_(std::begin(landmarks), std::end(landmarks)), landmark_tree_(landmarks) {
nearest_landmark_table_.reserve(boost::size(witnesses));
for (auto w : witnesses)
- nearest_landmark_table_.push_back(landmark_tree_.query_incremental_nearest_neighbors(w));
+ nearest_landmark_table_.push_back(landmark_tree_.incremental_nearest_neighbors(w));
}
/** \brief Returns the point corresponding to the given vertex.
diff --git a/include/gudhi/Hasse_complex.h b/include/gudhi/Hasse_complex.h
index 8b06b771..e67f7609 100644
--- a/include/gudhi/Hasse_complex.h
+++ b/include/gudhi/Hasse_complex.h
@@ -30,6 +30,7 @@
#include <algorithm>
#include <utility> // for pair
#include <vector>
+#include <limits> // for infinity value
#ifdef GUDHI_USE_TBB
#include <tbb/parallel_for.h>
@@ -104,7 +105,6 @@ class Hasse_complex {
Hasse_complex(Complex_ds & cpx)
: complex_(cpx.num_simplices())
, vertices_()
- , threshold_(cpx.filtration())
, num_vertices_()
, dim_max_(cpx.dimension()) {
int size = complex_.size();
@@ -125,7 +125,6 @@ class Hasse_complex {
Hasse_complex()
: complex_()
, vertices_()
- , threshold_(0)
, num_vertices_(0)
, dim_max_(-1) { }
@@ -157,15 +156,11 @@ class Hasse_complex {
Filtration_value filtration(Simplex_handle sh) {
if (sh == null_simplex()) {
- return filtration();
+ return std::numeric_limits<Filtration_value>::infinity();
}
return complex_[sh].filtration_;
}
- Filtration_value filtration() {
- return threshold_;
- }
-
int dimension(Simplex_handle sh) {
if (complex_[sh].boundary_.empty()) return 0;
return complex_[sh].boundary_.size() - 1;
@@ -206,7 +201,6 @@ class Hasse_complex {
std::vector< Hasse_simp, Gudhi::no_init_allocator<Hasse_simp> > complex_;
std::vector<Simplex_handle> vertices_;
- Filtration_value threshold_;
size_t num_vertices_;
int dim_max_;
};
@@ -245,7 +239,6 @@ std::istream& operator>>(std::istream & is
}
hcpx.dim_max_ = max_dim;
- hcpx.threshold_ = max_fil;
return is;
}
diff --git a/include/gudhi/Kd_tree_search.h b/include/gudhi/Kd_tree_search.h
index 6728d56e..ef428002 100644
--- a/include/gudhi/Kd_tree_search.h
+++ b/include/gudhi/Kd_tree_search.h
@@ -27,6 +27,7 @@
#include <CGAL/Orthogonal_incremental_neighbor_search.h>
#include <CGAL/Search_traits.h>
#include <CGAL/Search_traits_adapter.h>
+#include <CGAL/Fuzzy_sphere.h>
#include <CGAL/property_map.h>
#include <boost/property_map/property_map.hpp>
@@ -41,19 +42,19 @@ namespace spatial_searching {
/**
* \class Kd_tree_search Kd_tree_search.h gudhi/Kd_tree_search.h
- * \brief Spatial tree data structure to perform (approximate) nearest and farthest neighbor search.
+ * \brief Spatial tree data structure to perform (approximate) nearest and furthest neighbor search.
*
* \ingroup spatial_searching
*
* \details
* The class Kd_tree_search is a tree-based data structure, based on
* <a target="_blank" href="http://doc.cgal.org/latest/Spatial_searching/index.html">CGAL dD spatial searching data structures</a>.
- * It provides a simplified API to perform (approximate) nearest and farthest neighbor searches. Contrary to CGAL default behavior, the tree
+ * It provides a simplified API to perform (approximate) nearest and furthest neighbor searches. Contrary to CGAL default behavior, the tree
* does not store the points themselves, but stores indices.
*
- * There are two types of queries: the <i>k-nearest or k-farthest neighbor query</i>, where <i>k</i> is fixed and the <i>k</i> nearest
- * or farthest points are computed right away,
- * and the <i>incremental nearest or farthest neighbor query</i>, where no number of neighbors is provided during the call, as the
+ * There are two types of queries: the <i>k-nearest or k-furthest neighbor query</i>, where <i>k</i> is fixed and the <i>k</i> nearest
+ * or furthest points are computed right away,
+ * and the <i>incremental nearest or furthest neighbor query</i>, where no number of neighbors is provided during the call, as the
* neighbors will be computed incrementally when the iterator on the range is incremented.
*
* \tparam Search_traits must be a model of the <a target="_blank"
@@ -87,11 +88,15 @@ class Kd_tree_search {
std::ptrdiff_t,
Point_property_map,
Traits_base> STraits;
+ typedef CGAL::Distance_adapter<
+ std::ptrdiff_t,
+ Point_property_map,
+ CGAL::Euclidean_distance<Traits_base> > Orthogonal_distance;
typedef CGAL::Orthogonal_k_neighbor_search<STraits> K_neighbor_search;
typedef typename K_neighbor_search::Tree Tree;
typedef typename K_neighbor_search::Distance Distance;
- /// \brief The range returned by a k-nearest or k-farthest neighbor search.
+ /// \brief The range returned by a k-nearest or k-furthest neighbor search.
/// Its value type is `std::pair<std::size_t, FT>` where `first` is the index
/// of a point P and `second` is the squared distance between P and the query point.
typedef K_neighbor_search KNS_range;
@@ -99,11 +104,12 @@ class Kd_tree_search {
typedef CGAL::Orthogonal_incremental_neighbor_search<
STraits, Distance, CGAL::Sliding_midpoint<STraits>, Tree>
Incremental_neighbor_search;
- /// \brief The range returned by an incremental nearest or farthest neighbor search.
+ /// \brief The range returned by an incremental nearest or furthest neighbor search.
/// Its value type is `std::pair<std::size_t, FT>` where `first` is the index
/// of a point P and `second` is the squared distance between P and the query point.
typedef Incremental_neighbor_search INS_range;
+ typedef CGAL::Fuzzy_sphere<STraits> Fuzzy_sphere;
/// \brief Constructor
/// @param[in] points Const reference to the point range. This range
/// is not copied, so it should not be destroyed or modified afterwards.
@@ -164,9 +170,9 @@ class Kd_tree_search {
/// @param[in] k Number of nearest points to search.
/// @param[in] sorted Indicates if the computed sequence of k-nearest neighbors needs to be sorted.
/// @param[in] eps Approximation factor.
- /// @return A range containing the k-nearest neighbors.
- KNS_range query_k_nearest_neighbors(const
- Point &p,
+ /// @return A range (whose `value_type` is `std::size_t`) containing the k-nearest neighbors.
+ KNS_range k_nearest_neighbors(
+ Point const& p,
unsigned int k,
bool sorted = true,
FT eps = FT(0)) const {
@@ -179,8 +185,7 @@ class Kd_tree_search {
k,
eps,
true,
- CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
- std::begin(m_points)), sorted);
+ Orthogonal_distance(std::begin(m_points)), sorted);
return search;
}
@@ -188,10 +193,11 @@ class Kd_tree_search {
/// \brief Search incrementally for the nearest neighbors from a query point.
/// @param[in] p The query point.
/// @param[in] eps Approximation factor.
- /// @return A range containing the neighbors sorted by their distance to p.
+ /// @return A range (whose `value_type` is `std::size_t`) containing the
+ /// neighbors sorted by their distance to p.
/// All the neighbors are not computed by this function, but they will be
/// computed incrementally when the iterator on the range is incremented.
- INS_range query_incremental_nearest_neighbors(const Point &p, FT eps = FT(0)) const {
+ INS_range incremental_nearest_neighbors(Point const& p, FT eps = FT(0)) const {
// Initialize the search structure, and search all N points
// Note that we need to pass the Distance explicitly since it needs to
// know the property map
@@ -200,20 +206,19 @@ class Kd_tree_search {
p,
eps,
true,
- CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
- std::begin(m_points)) );
+ Orthogonal_distance(std::begin(m_points)) );
return search;
}
- /// \brief Search for the k-farthest points from a query point.
+ /// \brief Search for the k-furthest points from a query point.
/// @param[in] p The query point.
- /// @param[in] k Number of farthest points to search.
- /// @param[in] sorted Indicates if the computed sequence of k-farthest neighbors needs to be sorted.
+ /// @param[in] k Number of furthest points to search.
+ /// @param[in] sorted Indicates if the computed sequence of k-furthest neighbors needs to be sorted.
/// @param[in] eps Approximation factor.
- /// @return A range containing the k-farthest neighbors.
- KNS_range query_k_farthest_neighbors(const
- Point &p,
+ /// @return A range (whose `value_type` is `std::size_t`) containing the k-furthest neighbors.
+ KNS_range k_furthest_neighbors(
+ Point const& p,
unsigned int k,
bool sorted = true,
FT eps = FT(0)) const {
@@ -226,19 +231,19 @@ class Kd_tree_search {
k,
eps,
false,
- CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
- std::begin(m_points)), sorted);
+ Orthogonal_distance(std::begin(m_points)), sorted);
return search;
}
- /// \brief Search incrementally for the farthest neighbors from a query point.
+ /// \brief Search incrementally for the furthest neighbors from a query point.
/// @param[in] p The query point.
/// @param[in] eps Approximation factor.
- /// @return A range containing the neighbors sorted by their distance to p.
+ /// @return A range (whose `value_type` is `std::size_t`)
+ /// containing the neighbors sorted by their distance to p.
/// All the neighbors are not computed by this function, but they will be
/// computed incrementally when the iterator on the range is incremented.
- INS_range query_incremental_farthest_neighbors(const Point &p, FT eps = FT(0)) const {
+ INS_range incremental_furthest_neighbors(Point const& p, FT eps = FT(0)) const {
// Initialize the search structure, and search all N points
// Note that we need to pass the Distance explicitly since it needs to
// know the property map
@@ -247,12 +252,30 @@ class Kd_tree_search {
p,
eps,
false,
- CGAL::Distance_adapter<std::ptrdiff_t, Point_property_map, CGAL::Euclidean_distance<Traits_base> >(
- std::begin(m_points)) );
+ Orthogonal_distance(std::begin(m_points)) );
return search;
}
+ /// \brief Search for all the neighbors in a ball.
+ /// @param[in] p The query point.
+ /// @param[in] radius The search radius
+ /// @param[out] it The points that lie inside the sphere of center `p` and radius `radius`.
+ /// Note: `it` is used this way: `*it++ = each_point`.
+ /// @param[in] eps Approximation factor.
+ template <typename OutputIterator>
+ void all_near_neighbors(Point const& p,
+ FT radius,
+ OutputIterator it,
+ FT eps = FT(0)) const {
+ m_tree.search(it, Fuzzy_sphere(p, radius, eps, m_tree.traits()));
+ }
+
+ int tree_depth() const
+ {
+ return m_tree.root()->depth();
+ }
+
private:
Point_range const& m_points;
Tree m_tree;
diff --git a/include/gudhi/Neighbors_finder.h b/include/gudhi/Neighbors_finder.h
index bdc47578..a6b9b021 100644
--- a/include/gudhi/Neighbors_finder.h
+++ b/include/gudhi/Neighbors_finder.h
@@ -44,16 +44,16 @@ struct Square_query {
typedef Internal_point Point_d;
typedef double FT;
bool contains(Point_d p) const {
- return std::abs(p.x()-c.x())<=size && std::abs(p.y()-c.y())<=size;
+ return std::abs(p.x()-c.x()) <= size && std::abs(p.y()-c.y()) <= size;
}
- bool inner_range_intersects(CGAL::Kd_tree_rectangle<FT,D> const&r) const {
+ bool inner_range_intersects(CGAL::Kd_tree_rectangle<FT, D> const&r) const {
return
r.max_coord(0) >= c.x() - size &&
r.min_coord(0) <= c.x() + size &&
r.max_coord(1) >= c.y() - size &&
r.min_coord(1) <= c.y() + size;
}
- bool outer_range_contains(CGAL::Kd_tree_rectangle<FT,D> const&r) const {
+ bool outer_range_contains(CGAL::Kd_tree_rectangle<FT, D> const&r) const {
return
r.min_coord(0) >= c.x() - size &&
r.max_coord(0) <= c.x() + size &&
@@ -146,7 +146,7 @@ inline int Neighbors_finder::pull_near(int u_point_index) {
// Is the query point near to a V point in the plane ?
Internal_point u_point = g.get_u_point(u_point_index);
auto neighbor = kd_t.search_any_point(Square_query{u_point, r});
- if(!neighbor)
+ if (!neighbor)
return null_point_index();
tmp = neighbor->point_index;
auto point = g.get_v_point(tmp);
diff --git a/include/gudhi/Persistence_graph.h b/include/gudhi/Persistence_graph.h
index 44f4b827..622b0691 100644
--- a/include/gudhi/Persistence_graph.h
+++ b/include/gudhi/Persistence_graph.h
@@ -102,7 +102,7 @@ Persistence_graph::Persistence_graph(const Persistence_diagram1 &diag1,
b_alive = std::numeric_limits<double>::infinity();
} else {
for (auto it_u = u_alive.cbegin(), it_v = v_alive.cbegin(); it_u != u_alive.cend(); ++it_u, ++it_v)
- b_alive = std::max(b_alive, std::fabs(*it_u - *it_v));
+ b_alive = (std::max)(b_alive, std::fabs(*it_u - *it_v));
}
}
@@ -129,7 +129,7 @@ inline double Persistence_graph::distance(int u_point_index, int v_point_index)
return 0.;
Internal_point p_u = get_u_point(u_point_index);
Internal_point p_v = get_v_point(v_point_index);
- return std::max(std::fabs(p_u.x() - p_v.x()), std::fabs(p_u.y() - p_v.y()));
+ return (std::max)(std::fabs(p_u.x() - p_v.x()), std::fabs(p_u.y() - p_v.y()));
}
inline int Persistence_graph::size() const {
@@ -175,9 +175,9 @@ inline Internal_point Persistence_graph::get_v_point(int v_point_index) const {
inline double Persistence_graph::diameter_bound() const {
double max = 0.;
for (auto it = u.cbegin(); it != u.cend(); it++)
- max = std::max(max, it->y());
+ max = (std::max)(max, it->y());
for (auto it = v.cbegin(); it != v.cend(); it++)
- max = std::max(max, it->y());
+ max = (std::max)(max, it->y());
return max;
}
diff --git a/include/gudhi/Persistent_cohomology.h b/include/gudhi/Persistent_cohomology.h
index 672fda48..e0a147b3 100644
--- a/include/gudhi/Persistent_cohomology.h
+++ b/include/gudhi/Persistent_cohomology.h
@@ -591,10 +591,17 @@ class Persistent_cohomology {
std::ofstream diagram_out(diagram_name.c_str());
cmp_intervals_by_length cmp(cpx_);
std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp);
+ bool has_infinity = std::numeric_limits<Filtration_value>::has_infinity;
for (auto pair : persistent_pairs_) {
- diagram_out << cpx_->dimension(get<0>(pair)) << " "
- << cpx_->filtration(get<0>(pair)) << " "
- << cpx_->filtration(get<1>(pair)) << std::endl;
+ // Special case on windows, inf is "1.#INF"
+ if (has_infinity && cpx_->filtration(get<1>(pair)) == std::numeric_limits<Filtration_value>::infinity()) {
+ diagram_out << cpx_->dimension(get<0>(pair)) << " "
+ << cpx_->filtration(get<0>(pair)) << " inf" << std::endl;
+ } else {
+ diagram_out << cpx_->dimension(get<0>(pair)) << " "
+ << cpx_->filtration(get<0>(pair)) << " "
+ << cpx_->filtration(get<1>(pair)) << std::endl;
+ }
}
}
diff --git a/include/gudhi/Simplex_tree.h b/include/gudhi/Simplex_tree.h
index 317bce23..37b3ea97 100644
--- a/include/gudhi/Simplex_tree.h
+++ b/include/gudhi/Simplex_tree.h
@@ -289,7 +289,6 @@ class Simplex_tree {
/** \brief Constructs an empty simplex tree. */
Simplex_tree()
: null_vertex_(-1),
- threshold_(0),
root_(nullptr, null_vertex_),
filtration_vect_(),
dimension_(-1) { }
@@ -297,7 +296,6 @@ class Simplex_tree {
/** \brief User-defined copy constructor reproduces the whole tree structure. */
Simplex_tree(const Simplex_tree& simplex_source)
: null_vertex_(simplex_source.null_vertex_),
- threshold_(simplex_source.threshold_),
root_(nullptr, null_vertex_ , simplex_source.root_.members_),
filtration_vect_(),
dimension_(simplex_source.dimension_) {
@@ -323,12 +321,10 @@ class Simplex_tree {
/** \brief User-defined move constructor moves the whole tree structure. */
Simplex_tree(Simplex_tree && old)
: null_vertex_(std::move(old.null_vertex_)),
- threshold_(std::move(old.threshold_)),
root_(std::move(old.root_)),
filtration_vect_(std::move(old.filtration_vect_)),
dimension_(std::move(old.dimension_)) {
old.dimension_ = -1;
- old.threshold_ = 0;
old.root_ = Siblings(nullptr, null_vertex_);
}
@@ -356,7 +352,6 @@ class Simplex_tree {
/** \brief Checks if two simplex trees are equal. */
bool operator==(Simplex_tree& st2) {
if ((null_vertex_ != st2.null_vertex_) ||
- (threshold_ != st2.threshold_) ||
(dimension_ != st2.dimension_))
return false;
return rec_equal(&root_, &st2.root_);
@@ -407,14 +402,14 @@ class Simplex_tree {
/** \brief Returns the filtration value of a simplex.
*
- * Called on the null_simplex, returns INFINITY.
+ * Called on the null_simplex, it returns infinity.
* If SimplexTreeOptions::store_filtration is false, returns 0.
*/
static Filtration_value filtration(Simplex_handle sh) {
if (sh != null_simplex()) {
return sh->second.filtration();
} else {
- return INFINITY;
+ return std::numeric_limits<Filtration_value>::infinity();
}
}
@@ -427,11 +422,6 @@ class Simplex_tree {
sh->second.assign_filtration(fv);
}
- /** \brief Returns an upper bound of the filtration values of the simplices. */
- Filtration_value filtration() const {
- return threshold_;
- }
-
/** \brief Returns a Simplex_handle different from all Simplex_handles
* associated to the simplices in the simplicial complex.
*
@@ -757,11 +747,6 @@ class Simplex_tree {
return &root_;
}
- /** Set an upper bound for the filtration values. */
- void set_filtration(Filtration_value fil) {
- threshold_ = fil;
- }
-
/** Set a dimension for the simplicial complex. */
void set_dimension(int dimension) {
dimension_ = dimension;
@@ -1215,8 +1200,6 @@ class Simplex_tree {
private:
Vertex_handle null_vertex_;
- /** \brief Upper bound on the filtration values of the simplices.*/
- Filtration_value threshold_;
/** \brief Total number of simplices in the complex, without the empty simplex.*/
/** \brief Set of simplex tree Nodes representing the vertices.*/
Siblings root_;
@@ -1244,7 +1227,6 @@ std::istream& operator>>(std::istream & is, Simplex_tree<T...> & st) {
typedef Simplex_tree<T...> ST;
std::vector<typename ST::Vertex_handle> simplex;
typename ST::Filtration_value fil;
- typename ST::Filtration_value max_fil = 0;
int max_dim = -1;
while (read_simplex(is, simplex, fil)) {
// read all simplices in the file as a list of vertices
@@ -1253,15 +1235,11 @@ std::istream& operator>>(std::istream & is, Simplex_tree<T...> & st) {
if (max_dim < dim) {
max_dim = dim;
}
- if (max_fil < fil) {
- max_fil = fil;
- }
// insert every simplex in the simplex tree
st.insert_simplex(simplex, fil);
simplex.clear();
}
st.set_dimension(max_dim);
- st.set_filtration(max_fil);
return is;
}
diff --git a/include/gudhi/Strong_witness_complex.h b/include/gudhi/Strong_witness_complex.h
index a973ddb7..6f4bcf60 100644
--- a/include/gudhi/Strong_witness_complex.h
+++ b/include/gudhi/Strong_witness_complex.h
@@ -105,10 +105,6 @@ class Strong_witness_complex {
<< "non-negative.\n";
return false;
}
- if (limit_dimension < 0) {
- std::cerr << "Strong witness complex cannot create complex - limit dimension must be non-negative.\n";
- return false;
- }
for (auto w : nearest_landmark_table_) {
ActiveWitness aw(w);
typeVectorVertex simplex;
diff --git a/include/gudhi/Tangential_complex.h b/include/gudhi/Tangential_complex.h
index cfc82eb1..a5cefd6a 100644
--- a/include/gudhi/Tangential_complex.h
+++ b/include/gudhi/Tangential_complex.h
@@ -1059,7 +1059,6 @@ class Tangential_complex {
Triangulation &triangulation, bool verbose = false) {
int tangent_space_dim = tsb.dimension();
const Tr_traits &local_tr_traits = triangulation.geom_traits();
- Tr_vertex_handle center_vertex;
// Kernel functor & objects
typename K::Squared_distance_d k_sqdist = m_k.squared_distance_d_object();
@@ -1084,7 +1083,7 @@ class Tangential_complex {
proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits);
}
- center_vertex = triangulation.insert(proj_wp);
+ Tr_vertex_handle center_vertex = triangulation.insert(proj_wp);
center_vertex->data() = i;
if (verbose)
std::cerr << "* Inserted point #" << i << "\n";
@@ -1094,8 +1093,8 @@ class Tangential_complex {
std::size_t num_inserted_points = 1;
#endif
// const int NUM_NEIGHBORS = 150;
- // KNS_range ins_range = m_points_ds.query_k_nearest_neighbors(center_pt, NUM_NEIGHBORS);
- INS_range ins_range = m_points_ds.query_incremental_nearest_neighbors(center_pt);
+ // KNS_range ins_range = m_points_ds.k_nearest_neighbors(center_pt, NUM_NEIGHBORS);
+ INS_range ins_range = m_points_ds.incremental_nearest_neighbors(center_pt);
// While building the local triangulation, we keep the radius
// of the sphere "star sphere" centered at "center_vertex"
@@ -1204,7 +1203,7 @@ class Tangential_complex {
Point center_point = compute_perturbed_point(i);
// Among updated point, what is the closer from our center point?
std::size_t closest_pt_index =
- updated_pts_ds.query_k_nearest_neighbors(center_point, 1, false).begin()->first;
+ updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
typename K::Construct_weighted_point_d k_constr_wp =
m_k.construct_weighted_point_d_object();
@@ -1316,11 +1315,10 @@ class Tangential_complex {
m_k.compute_coordinate_d_object();
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors(
- p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points_for_tse;
#else
- KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points;
#endif
@@ -1414,11 +1412,10 @@ class Tangential_complex {
const Point &p = m_points[*it_index];
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors(
- p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points_for_tse;
#else
- KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false);
+ KNS_range kns_range = m_points_ds.k_nearest_neighbors(p, num_pts_for_pca, false);
const Points &points_for_pca = m_points;
#endif
@@ -1823,7 +1820,6 @@ class Tangential_complex {
bool is_inconsistent = false;
Star const& star = m_stars[tr_index];
- Tr_vertex_handle center_vh = m_triangulations[tr_index].center_vertex();
// For each incident simplex
Star::const_iterator it_inc_simplex = star.begin();
@@ -1950,7 +1946,6 @@ class Tangential_complex {
bool is_star_inconsistent = false;
Triangulation const& tr = it_tr->tr();
- Tr_vertex_handle center_vh = it_tr->center_vertex();
if (tr.current_dimension() < m_intrinsic_dim)
continue;
diff --git a/include/gudhi/Witness_complex.h b/include/gudhi/Witness_complex.h
index 63f03687..bcfe8484 100644
--- a/include/gudhi/Witness_complex.h
+++ b/include/gudhi/Witness_complex.h
@@ -106,10 +106,6 @@ class Witness_complex {
std::cerr << "Witness complex cannot create complex - squared relaxation parameter must be non-negative.\n";
return false;
}
- if (limit_dimension < 0) {
- std::cerr << "Witness complex cannot create complex - limit dimension must be non-negative.\n";
- return false;
- }
ActiveWitnessList active_witnesses;
Landmark_id k = 0; /* current dimension in iterative construction */
for (auto w : nearest_landmark_table_)
diff --git a/include/gudhi/pick_n_random_points.h b/include/gudhi/pick_n_random_points.h
index f0e3f1f1..8c90b6bf 100644
--- a/include/gudhi/pick_n_random_points.h
+++ b/include/gudhi/pick_n_random_points.h
@@ -52,7 +52,7 @@ typename OutputIterator>
void pick_n_random_points(Point_container const &points,
std::size_t final_size,
OutputIterator output_it) {
-#ifdef GUDHI_SUBS_PROFILING
+#ifdef GUDHI_SUBSAMPLING_PROFILING
Gudhi::Clock t;
#endif
@@ -72,7 +72,7 @@ void pick_n_random_points(Point_container const &points,
for (int l : landmarks)
*output_it++ = points[l];
-#ifdef GUDHI_SUBS_PROFILING
+#ifdef GUDHI_SUBSAMPLING_PROFILING
t.end();
std::cerr << "Random landmark choice took " << t.num_seconds()
<< " seconds." << std::endl;
diff --git a/include/gudhi/random_point_generators.h b/include/gudhi/random_point_generators.h
index 2ec465ef..9df77760 100644
--- a/include/gudhi/random_point_generators.h
+++ b/include/gudhi/random_point_generators.h
@@ -282,6 +282,38 @@ std::vector<typename Kernel::Point_d> generate_points_on_sphere_d(std::size_t nu
}
template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_in_ball_d(std::size_t num_points, int dim, double radius) {
+ typedef typename Kernel::Point_d Point;
+ Kernel k;
+ CGAL::Random rng;
+ CGAL::Random_points_in_ball_d<Point> generator(dim, radius);
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ Point p = *generator++;
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+template <typename Kernel>
+std::vector<typename Kernel::Point_d> generate_points_in_cube_d(std::size_t num_points, int dim, double radius) {
+ typedef typename Kernel::Point_d Point;
+ Kernel k;
+ CGAL::Random rng;
+ CGAL::Random_points_in_cube_d<Point> generator(dim, radius);
+ std::vector<Point> points;
+ points.reserve(num_points);
+ for (std::size_t i = 0; i < num_points;) {
+ Point p = *generator++;
+ points.push_back(p);
+ ++i;
+ }
+ return points;
+}
+
+template <typename Kernel>
std::vector<typename Kernel::Point_d> generate_points_on_two_spheres_d(std::size_t num_points, int dim, double radius,
double distance_between_centers,
double radius_noise_percentage = 0.) {
diff --git a/include/gudhi/reader_utils.h b/include/gudhi/reader_utils.h
index 97a87edd..90be4fc7 100644
--- a/include/gudhi/reader_utils.h
+++ b/include/gudhi/reader_utils.h
@@ -1,8 +1,8 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
- * Author(s): Clement Maria, Pawel Dlotko
+ * Author(s): Clement Maria, Pawel Dlotko, Clement Jamin
*
* Copyright (C) 2014 INRIA
*
@@ -24,7 +24,9 @@
#define READER_UTILS_H_
#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Debug_utils.h>
+#include <boost/function_output_iterator.hpp>
#include <boost/graph/adjacency_list.hpp>
#include <iostream>
@@ -34,6 +36,9 @@
#include <string>
#include <vector>
#include <utility> // for pair
+#include <tuple> // for std::make_tuple
+
+namespace Gudhi {
// Keep this file tag for Doxygen to parse the code, otherwise, functions are not documented.
// It is required for global functions and variables.
@@ -50,7 +55,7 @@
* X21 X22 ... X2d<br>
* etc<br>
*/
-inline void read_points(std::string file_name, std::vector< std::vector< double > > & points) {
+inline void read_points(std::string file_name, std::vector<std::vector<double>>& points) {
std::ifstream in_file(file_name.c_str(), std::ios::in);
if (!in_file.is_open()) {
std::cerr << "Unable to open file " << file_name << std::endl;
@@ -60,14 +65,13 @@ inline void read_points(std::string file_name, std::vector< std::vector< double
std::string line;
double x;
while (getline(in_file, line)) {
- std::vector< double > point;
+ std::vector<double> point;
std::istringstream iss(line);
while (iss >> x) {
point.push_back(x);
}
// Check for empty lines
- if (!point.empty())
- points.push_back(point);
+ if (!point.empty()) points.push_back(point);
}
in_file.close();
}
@@ -88,17 +92,20 @@ inline void read_points(std::string file_name, std::vector< std::vector< double
* Every simplex must appear exactly once.
* Simplices of dimension more than 1 are ignored.
*/
-template< typename Graph_t, typename Filtration_value, typename Vertex_handle >
+template <typename Graph_t, typename Filtration_value, typename Vertex_handle>
Graph_t read_graph(std::string file_name) {
std::ifstream in_(file_name.c_str(), std::ios::in);
if (!in_.is_open()) {
- std::cerr << "Unable to open file " << file_name << std::endl;
+ std::string error_str("read_graph - Unable to open file ");
+ error_str.append(file_name);
+ std::cerr << error_str << std::endl;
+ throw std::invalid_argument(error_str);
}
- typedef std::pair< Vertex_handle, Vertex_handle > Edge_t;
- std::vector< Edge_t > edges;
- std::vector< Filtration_value > edges_fil;
- std::map< Vertex_handle, Filtration_value > vertices;
+ typedef std::pair<Vertex_handle, Vertex_handle> Edge_t;
+ std::vector<Edge_t> edges;
+ std::vector<Filtration_value> edges_fil;
+ std::map<Vertex_handle, Filtration_value> vertices;
std::string line;
int dim;
@@ -108,8 +115,7 @@ Graph_t read_graph(std::string file_name) {
std::istringstream iss(line);
while (iss >> dim) {
switch (dim) {
- case 0:
- {
+ case 0: {
iss >> u;
iss >> fil;
vertices[u] = fil;
@@ -118,8 +124,7 @@ Graph_t read_graph(std::string file_name) {
}
break;
}
- case 1:
- {
+ case 1: {
iss >> u;
iss >> v;
iss >> fil;
@@ -127,16 +132,13 @@ Graph_t read_graph(std::string file_name) {
edges_fil.push_back(fil);
break;
}
- default:
- {
- break;
- }
+ default: { break; }
}
}
}
in_.close();
- if ((size_t) (max_h + 1) != vertices.size()) {
+ if ((size_t)(max_h + 1) != vertices.size()) {
std::cerr << "Error: vertices must be labeled from 0 to n-1 \n";
}
@@ -164,8 +166,8 @@ Graph_t read_graph(std::string file_name) {
* Every simplex must appear exactly once.
* Simplices of dimension more than 1 are ignored.
*/
-template< typename Vertex_handle, typename Filtration_value >
-bool read_simplex(std::istream & in_, std::vector< Vertex_handle > & simplex, Filtration_value & fil) {
+template <typename Vertex_handle, typename Filtration_value>
+bool read_simplex(std::istream& in_, std::vector<Vertex_handle>& simplex, Filtration_value& fil) {
int dim = 0;
if (!(in_ >> dim)) return false;
Vertex_handle v;
@@ -189,8 +191,8 @@ bool read_simplex(std::istream & in_, std::vector< Vertex_handle > & simplex, Fi
* The key of a simplex is its position in the filtration order and also the number of its row in the file.
* Dimi ki1 ki2 ... kiDimi Fili means that the ith simplex in the filtration has dimension Dimi, filtration value
* fil1 and simplices with key ki1 ... kiDimi in its boundary.*/
-template< typename Simplex_key, typename Filtration_value >
-bool read_hasse_simplex(std::istream & in_, std::vector< Simplex_key > & boundary, Filtration_value & fil) {
+template <typename Simplex_key, typename Filtration_value>
+bool read_hasse_simplex(std::istream& in_, std::vector<Simplex_key>& boundary, Filtration_value& fil) {
int dim;
if (!(in_ >> dim)) return false;
if (dim == 0) {
@@ -209,7 +211,7 @@ bool read_hasse_simplex(std::istream & in_, std::vector< Simplex_key > & boundar
/**
* @brief Read a lower triangular distance matrix from a csv file. We assume that the .csv store the whole
* (square) matrix.
- *
+ *
* @author Pawel Dlotko
*
* Square matrix file format:<br>
@@ -226,13 +228,13 @@ bool read_hasse_simplex(std::istream & in_, std::vector< Simplex_key > & boundar
* Dj1;Dj2;...;Dj(j-1);<br>
*
**/
-template< typename Filtration_value >
-std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from_csv_file(const std::string& filename,
- const char separator = ';') {
+template <typename Filtration_value>
+std::vector<std::vector<Filtration_value>> read_lower_triangular_matrix_from_csv_file(const std::string& filename,
+ const char separator = ';') {
#ifdef DEBUG_TRACES
std::cout << "Using procedure read_lower_triangular_matrix_from_csv_file \n";
#endif // DEBUG_TRACES
- std::vector< std::vector< Filtration_value > > result;
+ std::vector<std::vector<Filtration_value>> result;
std::ifstream in;
in.open(filename.c_str());
if (!in.is_open()) {
@@ -243,7 +245,7 @@ std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from
// the first line is emtpy, so we ignore it:
std::getline(in, line);
- std::vector< Filtration_value > values_in_this_line;
+ std::vector<Filtration_value> values_in_this_line;
result.push_back(values_in_this_line);
int number_of_line = 0;
@@ -251,11 +253,10 @@ std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from
// first, read the file line by line to a string:
while (std::getline(in, line)) {
// if line is empty, break
- if (line.size() == 0)
- break;
+ if (line.size() == 0) break;
// if the last element of a string is comma:
- if (line[ line.size() - 1 ] == separator) {
+ if (line[line.size() - 1] == separator) {
// then shrink the string by one
line.pop_back();
}
@@ -268,7 +269,7 @@ std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from
// and now read the doubles.
int number_of_entry = 0;
- std::vector< Filtration_value > values_in_this_line;
+ std::vector<Filtration_value> values_in_this_line;
while (iss.good()) {
double entry;
iss >> entry;
@@ -277,7 +278,7 @@ std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from
}
++number_of_entry;
}
- if (!values_in_this_line.empty())result.push_back(values_in_this_line);
+ if (!values_in_this_line.empty()) result.push_back(values_in_this_line);
++number_of_line;
}
in.close();
@@ -295,4 +296,74 @@ std::vector< std::vector< Filtration_value > > read_lower_triangular_matrix_from
return result;
} // read_lower_triangular_matrix_from_csv_file
+/**
+Reads a file containing persistence intervals.
+Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+The output iterator `out` is used this way: `*out++ = std::make_tuple(dim, birth, death);`
+where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+Note: the function does not check that birth <= death.
+**/
+template <typename OutputIterator>
+void read_persistence_intervals_and_dimension(std::string const& filename, OutputIterator out) {
+ std::ifstream in(filename);
+ if (!in.is_open()) {
+ std::string error_str("read_persistence_intervals_and_dimension - Unable to open file ");
+ error_str.append(filename);
+ std::cerr << error_str << std::endl;
+ throw std::invalid_argument(error_str);
+ }
+
+ while (!in.eof()) {
+ std::string line;
+ getline(in, line);
+ if (line.length() != 0 && line[0] != '#') {
+ double numbers[4];
+ int n = sscanf(line.c_str(), "%lf %lf %lf %lf", &numbers[0], &numbers[1], &numbers[2], &numbers[3]);
+ if (n >= 2) {
+ int dim = (n >= 3 ? static_cast<int>(numbers[n - 3]) : -1);
+ *out++ = std::make_tuple(dim, numbers[n - 2], numbers[n - 1]);
+ }
+ }
+ }
+}
+
+/**
+Reads a file containing persistence intervals.
+Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+The return value is an `std::map<dim, std::vector<std::pair<birth, death>>>`
+where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+Note: the function does not check that birth <= death.
+**/
+inline std::map<int, std::vector<std::pair<double, double>>> read_persistence_intervals_grouped_by_dimension(
+ std::string const& filename) {
+ std::map<int, std::vector<std::pair<double, double>>> ret;
+ read_persistence_intervals_and_dimension(
+ filename, boost::make_function_output_iterator([&ret](std::tuple<int, double, double> t) {
+ ret[get<0>(t)].push_back(std::make_pair(get<1>(t), get<2>(t)));
+ }));
+ return ret;
+}
+
+/**
+Reads a file containing persistence intervals.
+Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+If `only_this_dim` = -1, dimension is ignored and all lines are returned.
+If `only_this_dim` is >= 0, only the lines where dimension = `only_this_dim`
+(or where dimension is not specified) are returned.
+The return value is an `std::vector<std::pair<birth, death>>`
+where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+Note: the function does not check that birth <= death.
+**/
+inline std::vector<std::pair<double, double>> read_persistence_intervals_in_dimension(std::string const& filename,
+ int only_this_dim = -1) {
+ std::vector<std::pair<double, double>> ret;
+ read_persistence_intervals_and_dimension(
+ filename, boost::make_function_output_iterator([only_this_dim, &ret](std::tuple<int, double, double> t) {
+ if (only_this_dim == get<0>(t) || only_this_dim == -1) ret.emplace_back(get<1>(t), get<2>(t));
+ }));
+ return ret;
+}
+
+} // namespace Gudhi
+
#endif // READER_UTILS_H_
diff --git a/include/gudhi/sparsify_point_set.h b/include/gudhi/sparsify_point_set.h
index 507f8c79..7d3b97fb 100644
--- a/include/gudhi/sparsify_point_set.h
+++ b/include/gudhi/sparsify_point_set.h
@@ -83,7 +83,7 @@ sparsify_point_set(
*output_it++ = *it_pt;
- auto ins_range = points_ds.query_incremental_nearest_neighbors(*it_pt);
+ auto ins_range = points_ds.incremental_nearest_neighbors(*it_pt);
// If another point Q is closer that min_squared_dist, mark Q to be dropped
for (auto const& neighbor : ins_range) {
diff --git a/utilities/common/CMakeLists.txt b/utilities/common/CMakeLists.txt
new file mode 100644
index 00000000..b3e4b436
--- /dev/null
+++ b/utilities/common/CMakeLists.txt
@@ -0,0 +1,17 @@
+cmake_minimum_required(VERSION 2.6)
+project(off_file_from_shape_generator)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ add_executable ( off_file_from_shape_generator off_file_from_shape_generator.cpp )
+ add_test(NAME off_file_from_shape_generator_on_sphere_1000_3_15.2 COMMAND $<TARGET_FILE:off_file_from_shape_generator>
+ "on" "sphere" "onSphere.off" "1000" "3" "15.2")
+ add_test(NAME off_file_from_shape_generator_in_sphere_100_2 COMMAND $<TARGET_FILE:off_file_from_shape_generator>
+ "in" "sphere" "inSphere.off" "100" "2")
+
+ # on cube is not available in CGAL
+ add_test(NAME off_file_from_shape_generator_in_cube_10000_3_5.8 COMMAND $<TARGET_FILE:off_file_from_shape_generator>
+ "in" "cube" "inCube.off" "10000" "3" "5.8")
+
+ install(TARGETS off_file_from_shape_generator DESTINATION bin)
+
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/utilities/common/README b/utilities/common/README
new file mode 100644
index 00000000..dc841521
--- /dev/null
+++ b/utilities/common/README
@@ -0,0 +1,19 @@
+======================= off_file_from_shape_generator ==================================
+
+Example of use :
+
+*** on|in sphere|cube|curve|torus|klein generator
+
+./off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2
+
+ => generates a onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2
+
+./off_file_from_shape_generator in sphere inSphere.off 100 2
+
+ => generates a inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default)
+
+./off_file_from_shape_generator in cube inCube.off 10000 3 5.8
+
+ => generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8
+
+!! Warning: hypegenerator on cube is not available !!
diff --git a/data/points/generator/hypergenerator.cpp b/utilities/common/off_file_from_shape_generator.cpp
index 5831de18..afcd558c 100644
--- a/data/points/generator/hypergenerator.cpp
+++ b/utilities/common/off_file_from_shape_generator.cpp
@@ -20,8 +20,9 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <gudhi/random_point_generators.h>
+
#include <CGAL/Epick_d.h>
-#include <CGAL/point_generators_d.h>
#include <CGAL/algorithm.h>
#include <CGAL/assertions.h>
@@ -76,10 +77,20 @@ int main(int argc, char **argv) {
usage(argv[0]);
}
- bool sphere = false;
+ enum class Data_shape { sphere, cube, curve, torus, klein, undefined};
+
+ Data_shape shape = Data_shape::undefined;
if (memcmp(argv[2], "sphere", sizeof("sphere")) == 0) {
- sphere = true;
- } else if (memcmp(argv[2], "cube", sizeof("cube")) != 0) {
+ shape = Data_shape::sphere;
+ } else if (memcmp(argv[2], "cube", sizeof("cube")) == 0) {
+ shape = Data_shape::cube;
+ } else if (memcmp(argv[2], "curve", sizeof("curve")) == 0) {
+ shape = Data_shape::curve;
+ } else if (memcmp(argv[2], "torus", sizeof("torus")) == 0) {
+ shape = Data_shape::torus;
+ } else if (memcmp(argv[2], "klein", sizeof("klein")) == 0) {
+ shape = Data_shape::klein;
+ } else {
std::cerr << "Error: " << argv[2] << " is not correct" << std::endl;
usage(argv[0]);
}
@@ -94,25 +105,70 @@ int main(int argc, char **argv) {
}
if (diagram_out.is_open()) {
- // Instanciate a random point generator
- CGAL::Random rng(0);
// Generate "points_number" random points in a vector
std::vector<Point> points;
if (in) {
- if (sphere) {
- CGAL::Random_points_in_ball_d<Point> rand_it(dimension, radius, rng);
- CGAL::cpp11::copy_n(rand_it, points_number, std::back_inserter(points));
- } else {
- CGAL::Random_points_in_cube_d<Point> rand_it(dimension, radius, rng);
- CGAL::cpp11::copy_n(rand_it, points_number, std::back_inserter(points));
+ switch (shape) {
+ case Data_shape::sphere:
+ points = Gudhi::generate_points_in_ball_d<K>(points_number, dimension, radius);
+ break;
+ case Data_shape::cube:
+ points = Gudhi::generate_points_in_ball_d<K>(points_number, dimension, radius);
+ break;
+ case Data_shape::curve:
+ std::cerr << "Sorry: in curve is not available" << std::endl;
+ usage(argv[0]);
+ break;
+ case Data_shape::torus:
+ std::cerr << "Sorry: in torus is not available" << std::endl;
+ usage(argv[0]);
+ break;
+ case Data_shape::klein:
+ std::cerr << "Sorry: in klein is not available" << std::endl;
+ usage(argv[0]);
+ break;
+ default:
+ usage(argv[0]);
+ break;
}
} else { // means "on"
- if (sphere) {
- CGAL::Random_points_on_sphere_d<Point> rand_it(dimension, radius, rng);
- CGAL::cpp11::copy_n(rand_it, points_number, std::back_inserter(points));
- } else {
- std::cerr << "Sorry: on cube is not available" << std::endl;
- usage(argv[0]);
+ switch (shape) {
+ case Data_shape::sphere:
+ points = Gudhi::generate_points_on_sphere_d<K>(points_number, dimension, radius);
+ break;
+ case Data_shape::cube:
+ std::cerr << "Sorry: on cube is not available" << std::endl;
+ usage(argv[0]);
+ break;
+ case Data_shape::curve:
+ points = Gudhi::generate_points_on_moment_curve<K>(points_number, dimension, -radius/2., radius/2.);
+ break;
+ case Data_shape::torus:
+ if (dimension == 3)
+ points = Gudhi::generate_points_on_torus_3D<K>(points_number, dimension, radius, radius/2.);
+ else
+ points = Gudhi::generate_points_on_torus_d<K>(points_number, dimension, true);
+ break;
+ case Data_shape::klein:
+ switch (dimension) {
+ case 3:
+ points = Gudhi::generate_points_on_klein_bottle_3D<K>(points_number, radius, radius/2., true);
+ break;
+ case 4:
+ points = Gudhi::generate_points_on_klein_bottle_4D<K>(points_number, radius, radius/2., 0., true);
+ break;
+ case 5:
+ points = Gudhi::generate_points_on_klein_bottle_variant_5D<K>(points_number, radius, radius/2., true);
+ break;
+ default:
+ std::cerr << "Sorry: on klein is only available for dimension 3, 4 and 5" << std::endl;
+ usage(argv[0]);
+ break;
+ }
+ break;
+ default:
+ usage(argv[0]);
+ break;
}
}