summaryrefslogtreecommitdiff
path: root/src/python
diff options
context:
space:
mode:
Diffstat (limited to 'src/python')
-rw-r--r--src/python/CMakeLists.txt432
-rw-r--r--src/python/CONVENTIONS9
-rw-r--r--src/python/README3
-rw-r--r--src/python/doc/_templates/layout.html276
-rw-r--r--src/python/doc/alpha_complex_ref.rst14
-rw-r--r--src/python/doc/alpha_complex_sum.inc20
-rw-r--r--src/python/doc/alpha_complex_user.rst211
-rw-r--r--src/python/doc/bottleneck_distance_sum.inc14
-rw-r--r--src/python/doc/bottleneck_distance_user.rst67
-rw-r--r--src/python/doc/citation.rst19
-rwxr-xr-xsrc/python/doc/conf.py203
-rw-r--r--src/python/doc/cubical_complex_ref.rst13
-rw-r--r--src/python/doc/cubical_complex_sum.inc14
-rw-r--r--src/python/doc/cubical_complex_user.rst168
-rw-r--r--src/python/doc/euclidean_strong_witness_complex_ref.rst14
-rw-r--r--src/python/doc/euclidean_witness_complex_ref.rst14
-rw-r--r--src/python/doc/examples.rst30
-rw-r--r--src/python/doc/fileformats.rst127
-rw-r--r--src/python/doc/img/graphical_tools_representation.pngbin0 -> 10846 bytes
-rw-r--r--src/python/doc/index.rst86
-rw-r--r--src/python/doc/installation.rst242
-rw-r--r--src/python/doc/nerve_gic_complex_ref.rst14
-rw-r--r--src/python/doc/nerve_gic_complex_sum.inc16
-rw-r--r--src/python/doc/nerve_gic_complex_user.rst315
-rw-r--r--src/python/doc/periodic_cubical_complex_ref.rst13
-rw-r--r--src/python/doc/persistence_graphical_tools_ref.rst11
-rw-r--r--src/python/doc/persistence_graphical_tools_sum.inc14
-rw-r--r--src/python/doc/persistence_graphical_tools_user.rst73
-rw-r--r--src/python/doc/persistent_cohomology_sum.inc26
-rw-r--r--src/python/doc/persistent_cohomology_user.rst120
-rwxr-xr-xsrc/python/doc/python3-sphinx-build.py11
-rw-r--r--src/python/doc/reader_utils_ref.rst15
-rw-r--r--src/python/doc/rips_complex_ref.rst14
-rw-r--r--src/python/doc/rips_complex_sum.inc16
-rw-r--r--src/python/doc/rips_complex_user.rst347
-rw-r--r--src/python/doc/simplex_tree_ref.rst14
-rw-r--r--src/python/doc/simplex_tree_sum.inc13
-rw-r--r--src/python/doc/simplex_tree_user.rst72
-rw-r--r--src/python/doc/strong_witness_complex_ref.rst14
-rw-r--r--src/python/doc/tangential_complex_ref.rst14
-rw-r--r--src/python/doc/tangential_complex_sum.inc14
-rw-r--r--src/python/doc/tangential_complex_user.rst204
-rw-r--r--src/python/doc/todos.rst9
-rw-r--r--src/python/doc/witness_complex_ref.rst14
-rw-r--r--src/python/doc/witness_complex_sum.inc18
-rw-r--r--src/python/doc/witness_complex_user.rst135
-rwxr-xr-xsrc/python/example/alpha_complex_diagram_persistence_from_off_file_example.py68
-rwxr-xr-xsrc/python/example/alpha_complex_from_points_example.py55
-rwxr-xr-xsrc/python/example/alpha_rips_persistence_bottleneck_distance.py105
-rwxr-xr-xsrc/python/example/bottleneck_basic_example.py37
-rwxr-xr-xsrc/python/example/coordinate_graph_induced_complex.py68
-rwxr-xr-xsrc/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py83
-rwxr-xr-xsrc/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py82
-rwxr-xr-xsrc/python/example/functional_graph_induced_complex.py69
-rwxr-xr-xsrc/python/example/gudhi_graphical_tools_example.py43
-rwxr-xr-xsrc/python/example/nerve_of_a_covering.py70
-rwxr-xr-xsrc/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py74
-rwxr-xr-xsrc/python/example/random_cubical_complex_persistence_example.py49
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py87
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py63
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_off_file_example.py72
-rwxr-xr-xsrc/python/example/rips_complex_from_points_example.py27
-rwxr-xr-xsrc/python/example/rips_persistence_diagram.py30
-rwxr-xr-xsrc/python/example/simplex_tree_example.py51
-rwxr-xr-xsrc/python/example/sparse_rips_persistence_diagram.py32
-rwxr-xr-xsrc/python/example/tangential_complex_plain_homology_from_off_file_example.py64
-rwxr-xr-xsrc/python/example/voronoi_graph_induced_complex.py65
-rwxr-xr-xsrc/python/example/witness_complex_from_nearest_landmark_table.py36
-rw-r--r--src/python/gudhi/__init__.py1
-rw-r--r--src/python/gudhi/__init__.py.in40
-rw-r--r--src/python/gudhi/alpha_complex.pyx116
-rw-r--r--src/python/gudhi/bottleneck.pyx49
-rw-r--r--src/python/gudhi/cubical_complex.pyx188
-rw-r--r--src/python/gudhi/euclidean_strong_witness_complex.pyx92
-rw-r--r--src/python/gudhi/euclidean_witness_complex.pyx92
-rw-r--r--src/python/gudhi/nerve_gic.pyx412
-rw-r--r--src/python/gudhi/off_reader.pyx38
-rw-r--r--src/python/gudhi/periodic_cubical_complex.pyx190
-rw-r--r--src/python/gudhi/persistence_graphical_tools.py423
-rw-r--r--src/python/gudhi/reader_utils.pyx87
-rw-r--r--src/python/gudhi/rips_complex.pyx103
-rw-r--r--src/python/gudhi/simplex_tree.pxd56
-rw-r--r--src/python/gudhi/simplex_tree.pyx508
-rw-r--r--src/python/gudhi/strong_witness_complex.pyx78
-rw-r--r--src/python/gudhi/subsampling.pyx130
-rw-r--r--src/python/gudhi/tangential_complex.pyx173
-rw-r--r--src/python/gudhi/witness_complex.pyx78
-rw-r--r--src/python/include/Alpha_complex_interface.h70
-rw-r--r--src/python/include/Bottleneck_distance_interface.h41
-rw-r--r--src/python/include/Cubical_complex_interface.h50
-rw-r--r--src/python/include/Euclidean_strong_witness_complex_interface.h81
-rw-r--r--src/python/include/Euclidean_witness_complex_interface.h80
-rw-r--r--src/python/include/Nerve_gic_interface.h49
-rw-r--r--src/python/include/Off_reader_interface.h30
-rw-r--r--src/python/include/Persistent_cohomology_interface.h109
-rw-r--r--src/python/include/Reader_utils_interface.h44
-rw-r--r--src/python/include/Rips_complex_interface.h70
-rw-r--r--src/python/include/Simplex_tree_interface.h142
-rw-r--r--src/python/include/Strong_witness_complex_interface.h61
-rw-r--r--src/python/include/Subsampling_interface.h107
-rw-r--r--src/python/include/Tangential_complex_interface.h109
-rw-r--r--src/python/include/Witness_complex_interface.h62
-rw-r--r--src/python/setup.py.in53
-rwxr-xr-xsrc/python/test/test_alpha_complex.py90
-rwxr-xr-xsrc/python/test/test_bottleneck_distance.py23
-rwxr-xr-xsrc/python/test/test_cover_complex.py85
-rwxr-xr-xsrc/python/test/test_cubical_complex.py98
-rwxr-xr-xsrc/python/test/test_euclidean_witness_complex.py95
-rwxr-xr-xsrc/python/test/test_reader_utils.py126
-rwxr-xr-xsrc/python/test/test_rips_complex.py133
-rwxr-xr-xsrc/python/test/test_simplex_tree.py250
-rwxr-xr-xsrc/python/test/test_subsampling.py179
-rwxr-xr-xsrc/python/test/test_tangential_complex.py55
-rwxr-xr-xsrc/python/test/test_witness_complex.py62
114 files changed, 10040 insertions, 0 deletions
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
new file mode 100644
index 00000000..9e128d30
--- /dev/null
+++ b/src/python/CMakeLists.txt
@@ -0,0 +1,432 @@
+project(Cython)
+
+function( add_GUDHI_PYTHON_lib THE_LIB )
+ if(EXISTS ${THE_LIB})
+ get_filename_component(THE_LIB_FILE_NAME ${THE_LIB} NAME_WE)
+ if(WIN32)
+ message("++ ${THE_LIB} => THE_LIB_FILE_NAME = ${THE_LIB_FILE_NAME}")
+ set(GUDHI_PYTHON_LIBRARIES "${GUDHI_PYTHON_LIBRARIES}'${THE_LIB_FILE_NAME}', " PARENT_SCOPE)
+ else(WIN32)
+ STRING(REGEX REPLACE "lib" "" UNIX_LIB_FILE_NAME ${THE_LIB_FILE_NAME})
+ message("++ ${THE_LIB} => UNIX_LIB_FILE_NAME = ${UNIX_LIB_FILE_NAME}")
+ set(GUDHI_PYTHON_LIBRARIES "${GUDHI_PYTHON_LIBRARIES}'${UNIX_LIB_FILE_NAME}', " PARENT_SCOPE)
+ endif(WIN32)
+ endif(EXISTS ${THE_LIB})
+endfunction( add_GUDHI_PYTHON_lib )
+
+# THE_TEST is the python test file name (without .py extension) containing tests functions
+function( add_gudhi_py_test THE_TEST )
+ if(PYTEST_FOUND)
+ # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
+ # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
+ add_test(NAME ${THE_TEST}_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+ endif()
+endfunction( add_gudhi_py_test )
+
+# Set gudhi.__debug_info__
+# WARNING : to be done before setup.py.in configure_file
+function( add_gudhi_debug_info DEBUG_INFO )
+ set(GUDHI_PYTHON_DEBUG_INFO "${GUDHI_PYTHON_DEBUG_INFO} \"${DEBUG_INFO}\\n\" \\\n" PARENT_SCOPE)
+endfunction( add_gudhi_debug_info )
+
+if(PYTHONINTERP_FOUND)
+ if(CYTHON_FOUND)
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'off_reader', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'simplex_tree', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'rips_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'cubical_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'periodic_cubical_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'persistence_graphical_tools', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'reader_utils', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'witness_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'strong_witness_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'bottleneck', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'nerve_gic', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'subsampling', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'tangential_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'alpha_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_witness_complex', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_strong_witness_complex', ")
+
+ add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}")
+ add_gudhi_debug_info("Cython version ${CYTHON_VERSION}")
+ if(PYTEST_FOUND)
+ add_gudhi_debug_info("Pytest version ${PYTEST_VERSION}")
+ endif()
+ if(MATPLOTLIB_FOUND)
+ add_gudhi_debug_info("Matplotlib version ${MATPLOTLIB_VERSION}")
+ endif()
+ if(NUMPY_FOUND)
+ add_gudhi_debug_info("Numpy version ${NUMPY_VERSION}")
+ endif()
+ if(SCIPY_FOUND)
+ add_gudhi_debug_info("Scipy version ${SCIPY_VERSION}")
+ endif()
+
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_SYSTEM_NO_DEPRECATED', ")
+
+ # Gudhi and CGAL compilation option
+ if(MSVC)
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'/fp:strict', ")
+ else(MSVC)
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-std=c++11', ")
+ endif(MSVC)
+ if(CMAKE_COMPILER_IS_GNUCXX)
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ")
+ endif(CMAKE_COMPILER_IS_GNUCXX)
+ if (CMAKE_CXX_COMPILER_ID MATCHES Intel)
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-fp-model strict', ")
+ endif(CMAKE_CXX_COMPILER_ID MATCHES Intel)
+ if (DEBUG_TRACES)
+ # For programs to be more verbose
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DDEBUG_TRACES', ")
+ endif()
+
+ if (EIGEN3_FOUND)
+ add_gudhi_debug_info("Eigen3 version ${EIGEN3_VERSION}")
+ # No problem, even if no CGAL found
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
+ endif (EIGEN3_FOUND)
+
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'off_reader', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'simplex_tree', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'rips_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'cubical_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'periodic_cubical_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'reader_utils', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'witness_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'strong_witness_complex', ")
+ if (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'bottleneck', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'nerve_gic', ")
+ endif ()
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'subsampling', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'tangential_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'alpha_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ")
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ")
+ endif ()
+
+ if(CGAL_FOUND)
+ can_cgal_use_cxx11_thread_local()
+ if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
+ if(CMAKE_BUILD_TYPE MATCHES Debug)
+ add_GUDHI_PYTHON_lib("${Boost_THREAD_LIBRARY_DEBUG}")
+ else()
+ add_GUDHI_PYTHON_lib("${Boost_THREAD_LIBRARY_RELEASE}")
+ endif()
+ message("** Add Boost ${Boost_LIBRARY_DIRS}")
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
+ endif()
+ # Add CGAL compilation args
+ if(CGAL_HEADER_ONLY)
+ add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ")
+ else(CGAL_HEADER_ONLY)
+ add_gudhi_debug_info("CGAL version ${CGAL_VERSION}")
+ add_GUDHI_PYTHON_lib("${CGAL_LIBRARY}")
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ")
+ message("** Add CGAL ${CGAL_LIBRARIES_DIR}")
+ # If CGAL is not header only, CGAL library may link with boost system,
+ if(CMAKE_BUILD_TYPE MATCHES Debug)
+ add_GUDHI_PYTHON_lib("${Boost_SYSTEM_LIBRARY_DEBUG}")
+ else()
+ add_GUDHI_PYTHON_lib("${Boost_SYSTEM_LIBRARY_RELEASE}")
+ endif()
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
+ message("** Add Boost ${Boost_LIBRARY_DIRS}")
+ endif(CGAL_HEADER_ONLY)
+ # GMP and GMPXX are not required, but if present, CGAL will link with them.
+ if(GMP_FOUND)
+ add_gudhi_debug_info("GMP_LIBRARIES = ${GMP_LIBRARIES}")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ")
+ add_GUDHI_PYTHON_lib("${GMP_LIBRARIES}")
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ")
+ message("** Add gmp ${GMP_LIBRARIES_DIR}")
+ if(GMPXX_FOUND)
+ add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
+ add_GUDHI_PYTHON_lib("${GMPXX_LIBRARIES}")
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ")
+ message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}")
+ endif(GMPXX_FOUND)
+ endif(GMP_FOUND)
+ endif(CGAL_FOUND)
+
+ # Specific for Mac
+ if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-mmacosx-version-min=10.12', ")
+ set(GUDHI_PYTHON_EXTRA_LINK_ARGS "${GUDHI_PYTHON_EXTRA_LINK_ARGS}'-mmacosx-version-min=10.12', ")
+ endif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+
+ # Loop on INCLUDE_DIRECTORIES PROPERTY
+ get_property(GUDHI_INCLUDE_DIRECTORIES DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES)
+ foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+ set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${GUDHI_INCLUDE_DIRECTORY}', ")
+ endforeach()
+ set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${CMAKE_SOURCE_DIR}/${GUDHI_PYTHON_PATH}/include', ")
+
+ if (TBB_FOUND AND WITH_GUDHI_USE_TBB)
+ add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
+ if(CMAKE_BUILD_TYPE MATCHES Debug)
+ add_GUDHI_PYTHON_lib("${TBB_DEBUG_LIBRARY}")
+ add_GUDHI_PYTHON_lib("${TBB_MALLOC_DEBUG_LIBRARY}")
+ else()
+ add_GUDHI_PYTHON_lib("${TBB_RELEASE_LIBRARY}")
+ add_GUDHI_PYTHON_lib("${TBB_MALLOC_RELEASE_LIBRARY}")
+ endif()
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ")
+ message("** Add tbb ${TBB_LIBRARY_DIRS}")
+ set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ")
+ endif()
+
+ if(UNIX AND WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS)
+ set( GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}")
+ endif(UNIX AND WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS)
+
+ # Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention
+ configure_file(setup.py.in "${CMAKE_CURRENT_BINARY_DIR}/setup.py" @ONLY)
+
+ # Generate gudhi/__init__.py
+ file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
+ configure_file("gudhi/__init__.py.in" "${CMAKE_CURRENT_BINARY_DIR}/gudhi/__init__.py" @ONLY)
+
+ # Other .py files
+ file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
+
+ add_custom_command(
+ OUTPUT gudhi.so
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/setup.py" "build_ext" "--inplace")
+
+ add_custom_target(python ALL DEPENDS gudhi.so
+ COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests")
+
+ install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py install)")
+
+ # Test examples
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ # Bottleneck and Alpha
+ add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
+ -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
+
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ # Tangential
+ add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
+ --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
+
+ add_gudhi_py_test(test_tangential_complex)
+
+ # Witness complex AND Subsampling
+ add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+
+ add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ endif()
+
+ # Subsampling
+ add_gudhi_py_test(test_subsampling)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ if (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+ # Bottleneck
+ add_test(NAME bottleneck_basic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
+
+ add_gudhi_py_test(test_bottleneck_distance)
+
+ # Cover complex
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ add_test(NAME cover_complex_nerve_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py"
+ -f human.off -c 2 -r 10 -g 0.3)
+
+ add_test(NAME cover_complex_coordinate_gic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py"
+ -f human.off -c 0 -v)
+
+ add_test(NAME cover_complex_functional_gic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py"
+ -o lucky_cat.off
+ -f lucky_cat_PCA1 -v)
+
+ add_test(NAME cover_complex_voronoi_gic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py"
+ -f human.off -n 700 -v)
+
+ add_gudhi_py_test(test_cover_complex)
+ endif (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ # Alpha
+ add_test(NAME alpha_complex_from_points_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
+
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
+ endif()
+
+ add_gudhi_py_test(test_alpha_complex)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ # Euclidean witness
+ add_gudhi_py_test(test_euclidean_witness_complex)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+
+ # Cubical
+ add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
+ --no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt)
+
+ if(NUMPY_FOUND)
+ add_test(NAME random_cubical_complex_persistence_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
+ 10 10 10)
+ endif()
+
+ add_gudhi_py_test(test_cubical_complex)
+
+ # Rips
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
+
+ add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
+ endif()
+
+ add_test(NAME rips_complex_from_points_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_from_points_example.py)
+
+ add_gudhi_py_test(test_rips_complex)
+
+ # Simplex tree
+ add_test(NAME simplex_tree_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/simplex_tree_example.py)
+
+ add_gudhi_py_test(test_simplex_tree)
+
+ # Witness
+ add_test(NAME witness_complex_from_nearest_landmark_table_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/witness_complex_from_nearest_landmark_table.py)
+
+ add_gudhi_py_test(test_witness_complex)
+
+ # Reader utils
+ add_gudhi_py_test(test_reader_utils)
+
+ # Documentation generation is available through sphinx - requires all modules
+ if(SPHINX_PATH)
+ if(MATPLOTLIB_FOUND)
+ if(NUMPY_FOUND)
+ if(SCIPY_FOUND)
+ if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
+ # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
+ # Images and biblio warnings because not found on developper version
+ if (GUDHI_PYTHON_PATH STREQUAL "src/python")
+ set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
+ endif()
+ # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
+ DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
+
+ add_test(NAME sphinx_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
+
+ # Set missing or not modules
+ set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES")
+ else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ else(SCIPY_FOUND)
+ message("++ Python documentation module will not be compiled because scipy was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(SCIPY_FOUND)
+ else(NUMPY_FOUND)
+ message("++ Python documentation module will not be compiled because numpy was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(NUMPY_FOUND)
+ else(MATPLOTLIB_FOUND)
+ message("++ Python documentation module will not be compiled because matplotlib was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(MATPLOTLIB_FOUND)
+ else(SPHINX_PATH)
+ message("++ Python documentation module will not be compiled because sphinx and sphinxcontrib-bibtex were not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(SPHINX_PATH)
+
+
+ # Set missing or not modules
+ set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES")
+ else(CYTHON_FOUND)
+ message("++ Python module will not be compiled because cython was not found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(CYTHON_FOUND)
+else(PYTHONINTERP_FOUND)
+ message("++ Python module will not be compiled because no Python interpreter was found")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+endif(PYTHONINTERP_FOUND)
diff --git a/src/python/CONVENTIONS b/src/python/CONVENTIONS
new file mode 100644
index 00000000..804e97f3
--- /dev/null
+++ b/src/python/CONVENTIONS
@@ -0,0 +1,9 @@
+Gudhi is following PEP8 conventions.
+
+Please refer to:
+https://www.python.org/dev/peps/pep-0008/
+
+A summary:
+ - modules (filenames) should have short, all-lowercase names, and they can contain underscores.
+ - packages (directories) should have short, all-lowercase names, preferably without underscores.
+ - classes should use the CapWords convention. \ No newline at end of file
diff --git a/src/python/README b/src/python/README
new file mode 100644
index 00000000..7d2c4491
--- /dev/null
+++ b/src/python/README
@@ -0,0 +1,3 @@
+
+If you do not want to install the package, just launch the following command to help Python to find the compiled package :
+$> export PYTHONPATH=`pwd`:$PYTHONPATH
diff --git a/src/python/doc/_templates/layout.html b/src/python/doc/_templates/layout.html
new file mode 100644
index 00000000..fe64fb3d
--- /dev/null
+++ b/src/python/doc/_templates/layout.html
@@ -0,0 +1,276 @@
+{#
+ basic/layout.html
+ ~~~~~~~~~~~~~~~~~
+
+ Master layout template for Sphinx themes.
+
+ :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+#}
+{%- block doctype -%}
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+{%- endblock %}
+{%- set reldelim1 = reldelim1 is not defined and ' &raquo;' or reldelim1 %}
+{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %}
+{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and
+ (sidebars != []) %}
+{%- set url_root = pathto('', 1) %}
+{# XXX necessary? #}
+{%- if url_root == '#' %}{% set url_root = '' %}{% endif %}
+{%- if not embedded and docstitle %}
+ {%- set titlesuffix = " &mdash; "|safe + docstitle|e %}
+{%- else %}
+ {%- set titlesuffix = "" %}
+{%- endif %}
+
+{%- macro relbar() %}
+ <div class="related" role="navigation" aria-label="related navigation">
+ <h3>{{ _('Navigation') }}</h3>
+ <ul>
+ {%- for rellink in rellinks %}
+ <li class="right" {% if loop.first %}style="margin-right: 10px"{% endif %}>
+ <a href="{{ pathto(rellink[0]) }}" title="{{ rellink[1]|striptags|e }}"
+ {{ accesskey(rellink[2]) }}>{{ rellink[3] }}</a>
+ {%- if not loop.first %}{{ reldelim2 }}{% endif %}</li>
+ {%- endfor %}
+ {%- block rootrellink %}
+ <li class="nav-item nav-item-0"><a href="{{ pathto(master_doc) }}">{{ shorttitle|e }}</a>{{ reldelim1 }}</li>
+ {%- endblock %}
+ {%- for parent in parents %}
+ <li class="nav-item nav-item-{{ loop.index }}"><a href="{{ parent.link|e }}" {% if loop.last %}{{ accesskey("U") }}{% endif %}>{{ parent.title }}</a>{{ reldelim1 }}</li>
+ {%- endfor %}
+ {%- block relbaritems %} {% endblock %}
+ </ul>
+ </div>
+{%- endmacro %}
+
+{%- macro sidebar() %}
+ {%- if render_sidebar %}
+ <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+ <div class="sphinxsidebarwrapper">
+ {%- block sidebarlogo %}
+ {%- if logo %}
+ <p class="logo"><a href="{{ pathto(master_doc) }}">
+ <img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
+ </a></p>
+ {%- endif %}
+ {%- endblock %}
+ <h2><a href="index.html">GUDHI</a></h2>
+ <h2><a href="fileformats.html">File formats</a></h2>
+ <h2><a href="installation.html">GUDHI installation</a></h2>
+ <h2><a href="citation.html">Acknowledging the GUDHI library</a></h2>
+ <h2><a href="genindex.html">Index</a></h2>
+ <h2><a href="examples.html">Examples</a></h2>
+ {%- if sidebars != None %}
+ {#- new style sidebar: explicitly include/exclude templates #}
+ {%- for sidebartemplate in sidebars %}
+ {%- include sidebartemplate %}
+ {%- endfor %}
+ {%- else %}
+ {#- old style sidebars: using blocks -- should be deprecated #}
+ {%- block sidebartoc %}
+ {%- include "localtoc.html" %}
+ {%- endblock %}
+ {%- block sidebarrel %}
+ {%- include "relations.html" %}
+ {%- endblock %}
+ {%- block sidebarsourcelink %}
+ {%- include "sourcelink.html" %}
+ {%- endblock %}
+ {%- if customsidebar %}
+ {%- include customsidebar %}
+ {%- endif %}
+ {%- block sidebarsearch %}
+ {%- include "searchbox.html" %}
+ {%- endblock %}
+ {%- endif %}
+ </div>
+ </div>
+ {%- endif %}
+{%- endmacro %}
+
+{%- macro script() %}
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT: '{{ url_root }}',
+ VERSION: '{{ release|e }}',
+ COLLAPSE_INDEX: false,
+ FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
+ HAS_SOURCE: {{ has_source|lower }}
+ };
+ </script>
+ {%- for scriptfile in script_files %}
+ <script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
+ {%- endfor %}
+{%- endmacro %}
+
+{%- macro css() %}
+<!-- GUDHI website css for header BEGIN -->
+<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
+<!-- GUDHI website css for header END -->
+ <link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
+ <link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
+ {%- for cssfile in css_files %}
+ <link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
+ {%- endfor %}
+{%- endmacro %}
+<!-- GUDHI website html class for header BEGIN -->
+<html xmlns="http://www.w3.org/1999/xhtml" class="no-js" lang="en">
+<!-- GUDHI website html class for header END -->
+ <head>
+ <meta http-equiv="Content-Type" content="text/html; charset={{ encoding }}" />
+ {{ metatags }}
+ {%- block htmltitle %}
+ <title>{{ title|striptags|e }}{{ titlesuffix }}</title>
+ {%- endblock %}
+ {{ css() }}
+ {%- if not embedded %}
+ {{ script() }}
+ {%- if use_opensearch %}
+ <link rel="search" type="application/opensearchdescription+xml"
+ title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}"
+ href="{{ pathto('_static/opensearch.xml', 1) }}"/>
+ {%- endif %}
+ {%- if favicon %}
+ <link rel="shortcut icon" href="{{ pathto('_static/' + favicon, 1) }}"/>
+ {%- endif %}
+ {%- endif %}
+{%- block linktags %}
+ {%- if hasdoc('about') %}
+ <link rel="author" title="{{ _('About these documents') }}" href="{{ pathto('about') }}" />
+ {%- endif %}
+ {%- if hasdoc('genindex') %}
+ <link rel="index" title="{{ _('Index') }}" href="{{ pathto('genindex') }}" />
+ {%- endif %}
+ {%- if hasdoc('search') %}
+ <link rel="search" title="{{ _('Search') }}" href="{{ pathto('search') }}" />
+ {%- endif %}
+ {%- if hasdoc('copyright') %}
+ <link rel="copyright" title="{{ _('Copyright') }}" href="{{ pathto('copyright') }}" />
+ {%- endif %}
+ <link rel="top" title="{{ docstitle|e }}" href="{{ pathto(master_doc) }}" />
+ {%- if parents %}
+ <link rel="up" title="{{ parents[-1].title|striptags|e }}" href="{{ parents[-1].link|e }}" />
+ {%- endif %}
+ {%- if next %}
+ <link rel="next" title="{{ next.title|striptags|e }}" href="{{ next.link|e }}" />
+ {%- endif %}
+ {%- if prev %}
+ <link rel="prev" title="{{ prev.title|striptags|e }}" href="{{ prev.link|e }}" />
+ {%- endif %}
+{%- endblock %}
+{%- block extrahead %} {% endblock %}
+ </head>
+ <body role="document">
+ <!-- GUDHI website header BEGIN -->
+ <div id="navigation" class="sticky">
+ <nav class="top-bar" role="navigation" data-topbar>
+ <ul class="title-area">
+ <li class="name">
+ <h1 class="show-for-small-only"><a href="" class="icon-tree"> GUDHI library</a></h1>
+ </li>
+ <!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
+ <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
+ </ul>
+ <section class="top-bar-section">
+ <ul class="right">
+ <li class="divider"></li>
+ <li><a href="/contact/">Contact</a></li>
+ </ul>
+ <ul class="left">
+ <li><a href="/"> <img src="/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Project</a>
+ <ul class="dropdown">
+ <li><a href="/people/">People</a></li>
+ <li><a href="/keepintouch/">Keep in touch</a></li>
+ <li><a href="/partners/">Partners and Funding</a></li>
+ <li><a href="/relatedprojects/">Related projects</a></li>
+ <li><a href="/theyaretalkingaboutus/">They are talking about us</a></li>
+ <li><a href="/inaction/">GUDHI in action</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Download</a>
+ <ul class="dropdown">
+ <li><a href="/licensing/">Licensing</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5253/library-latest.zip" target="_blank">Get the latest sources</a></li>
+ <li><a href="/conda/">Conda package</a></li>
+ <li><a href="/dockerfile/">Dockerfile</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li class="has-dropdown">
+ <a href="#">Documentation</a>
+ <ul class="dropdown">
+ <li><a href="/introduction/">Introduction</a></li>
+ <li><a href="https://gudhi.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
+ <li><a href="https://gudhi.inria.fr/doc/latest/">C++ documentation</a></li>
+ <li><a href="https://gudhi.inria.fr/python/latest/installation.html">Python installation manual</a></li>
+ <li><a href="https://gudhi.inria.fr/python/latest/">Python documentation</a></li>
+ <li><a href="/utils/">Utilities</a></li>
+ <li><a href="/tutorials/">Tutorials</a></li>
+ </ul>
+ </li>
+ <li class="divider"></li>
+ <li><a href="/interfaces/">Interfaces</a></li>
+ <li class="divider"></li>
+ </ul>
+ </section>
+ </nav>
+ </div><!-- /#navigation -->
+ <!-- GUDHI website header BEGIN -->
+
+
+{%- block header %}{% endblock %}
+
+{%- block relbar1 %}{% endblock %}
+
+{%- block content %}
+ {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %}
+
+ <div class="document">
+ {%- block document %}
+ <div class="documentwrapper">
+ {%- if render_sidebar %}
+ <div class="bodywrapper">
+ {%- endif %}
+ <div class="body" role="main">
+ {% block body %} {% endblock %}
+ </div>
+ {%- if render_sidebar %}
+ </div>
+ {%- endif %}
+ </div>
+ {%- endblock %}
+
+ {%- block sidebar2 %}{{ sidebar() }}{% endblock %}
+ <div class="clearer"></div>
+ </div>
+{%- endblock %}
+
+{%- block relbar2 %}{% endblock %}
+
+{%- block footer %}
+ <div class="footer" role="contentinfo">
+ {%- if show_copyright %}
+ {%- if hasdoc('copyright') %}
+ {% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
+ {%- else %}
+ {% trans copyright=copyright|e %} {{ copyright }}.{% endtrans %}
+ {%- endif %}
+ {%- endif %}
+ {%- if last_updated %}
+ {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
+ {%- endif %}
+ {%- if show_sphinx %}
+ {% trans sphinx_version=sphinx_version|e %}Created using <a href="http://sphinx-doc.org/">Sphinx</a> {{ sphinx_version }}.{% endtrans %}
+ {%- endif %}
+ </div>
+{%- endblock %}
+ </body>
+</html>
+
diff --git a/src/python/doc/alpha_complex_ref.rst b/src/python/doc/alpha_complex_ref.rst
new file mode 100644
index 00000000..7da79543
--- /dev/null
+++ b/src/python/doc/alpha_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+==============================
+Alpha complex reference manual
+==============================
+
+.. autoclass:: gudhi.AlphaComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.AlphaComplex.__init__
diff --git a/src/python/doc/alpha_complex_sum.inc b/src/python/doc/alpha_complex_sum.inc
new file mode 100644
index 00000000..c5ba9dc7
--- /dev/null
+++ b/src/python/doc/alpha_complex_sum.inc
@@ -0,0 +1,20 @@
+.. table::
+ :widths: 30 50 20
+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
+ | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau |
+ | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | |
+ | :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | The filtration value of each simplex is computed as the square of the | |
+ | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | is then said to be Gabriel), and as the minimum of the filtration | |
+ | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+ | | otherwise. All simplices that have a filtration value strictly | |
+ | | greater than a given alpha squared value are not inserted into the | |
+ | | complex. | |
+ | | | |
+ | | This package requires having CGAL version 4.7 or higher (4.8.1 is | |
+ | | advised for better performance). | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
+ | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` |
+ +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst
new file mode 100644
index 00000000..f9662a6d
--- /dev/null
+++ b/src/python/doc/alpha_complex_user.rst
@@ -0,0 +1,211 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Alpha complex user manual
+=========================
+Definition
+----------
+
+.. include:: alpha_complex_sum.inc
+
+Alpha_complex is constructing a :doc:`Simplex_tree <simplex_tree_ref>` using
+`Delaunay Triangulation <http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations>`_
+:cite:`cgal:hdj-t-15b` from `CGAL <http://www.cgal.org/>`_ (the Computational Geometry Algorithms Library
+:cite:`cgal:eb-15b`).
+
+Remarks
+^^^^^^^
+When Alpha_complex is constructed with an infinite value of :math:`\alpha`, the complex is a Delaunay complex.
+
+Example from points
+-------------------
+
+This example builds the Delaunay triangulation from the given points, and initializes the alpha complex with it:
+
+.. testcode::
+
+ import gudhi
+ alpha_complex = gudhi.AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]])
+
+ simplex_tree = alpha_complex.create_simplex_tree()
+ result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+The output is:
+
+.. testoutput::
+
+ Alpha complex is of dimension 2 - 25 simplices - 7 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [5] -> 0.00
+ [6] -> 0.00
+ [2, 3] -> 6.25
+ [4, 5] -> 7.25
+ [0, 2] -> 8.50
+ [0, 1] -> 9.25
+ [1, 3] -> 10.00
+ [1, 2] -> 11.25
+ [1, 2, 3] -> 12.50
+ [0, 1, 2] -> 13.00
+ [5, 6] -> 13.25
+ [2, 4] -> 20.00
+ [4, 6] -> 22.74
+ [4, 5, 6] -> 22.74
+ [3, 6] -> 30.25
+ [2, 6] -> 36.50
+ [2, 3, 6] -> 36.50
+ [2, 4, 6] -> 37.24
+ [0, 4] -> 59.71
+ [0, 2, 4] -> 59.71
+
+
+Algorithm
+---------
+
+Data structure
+^^^^^^^^^^^^^^
+
+In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation.
+(The filtration value is set to NaN, which stands for unknown value):
+
+.. figure::
+ ../../doc/Alpha_complex/alpha_complex_doc.png
+ :figclass: align-center
+ :alt: Simplex tree structure construction example
+
+ Simplex tree structure construction example
+
+Filtration value computation algorithm
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ **for** i : dimension :math:`\rightarrow` 0 **do**
+ **for all** :math:`\sigma` of dimension i
+ **if** filtration(:math:`\sigma`) is NaN **then**
+ filtration(:math:`\sigma`) = :math:`\alpha^2(\sigma)`
+ **end if**
+
+ *//propagate alpha filtration value*
+
+ **for all** :math:`\tau` face of :math:`\sigma`
+ **if** filtration(:math:`\tau`) is not NaN **then**
+ filtration(:math:`\tau`) = filtration(:math:`\sigma`)
+ **end if**
+ **end for**
+ **end for**
+ **end for**
+
+ make_filtration_non_decreasing()
+
+ prune_above_filtration()
+
+Dimension 2
+^^^^^^^^^^^
+
+From the example above, it means the algorithm looks into each triangle ([0,1,2], [0,2,4], [1,2,3], ...),
+computes the filtration value of the triangle, and then propagates the filtration value as described
+here:
+
+.. figure::
+ ../../doc/Alpha_complex/alpha_complex_doc_420.png
+ :figclass: align-center
+ :alt: Filtration value propagation example
+
+ Filtration value propagation example
+
+Dimension 1
+^^^^^^^^^^^
+
+Then, the algorithm looks into each edge ([0,1], [0,2], [1,2], ...),
+computes the filtration value of the edge (in this case, propagation will have no effect).
+
+Dimension 0
+^^^^^^^^^^^
+
+Finally, the algorithm looks into each vertex ([0], [1], [2], [3], [4], [5] and [6]) and
+sets the filtration value (0 in case of a vertex - propagation will have no effect).
+
+Non decreasing filtration values
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not
+quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
+We fix that up by calling `Simplex_tree::make_filtration_non_decreasing()` (cf.
+`C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
+
+Prune above given filtration value
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()`
+in the `C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_). Note that this does not provide any kind
+of speed-up, since we always first build the full filtered complex, so it is recommended not to use `max_alpha_square`.
+In the following example, a threshold of 59 is used.
+
+
+Example from OFF file
+^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the Delaunay triangulation from the points given by an OFF file, and initializes the alpha complex
+with it.
+
+
+Then, it is asked to display information about the alpha complex:
+
+.. testcode::
+
+ import gudhi
+ alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \
+ '/data/points/alphacomplexdoc.off')
+ simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0)
+ result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+the program output is:
+
+.. testoutput::
+
+ Alpha complex is of dimension 2 - 23 simplices - 7 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [5] -> 0.00
+ [6] -> 0.00
+ [2, 3] -> 6.25
+ [4, 5] -> 7.25
+ [0, 2] -> 8.50
+ [0, 1] -> 9.25
+ [1, 3] -> 10.00
+ [1, 2] -> 11.25
+ [1, 2, 3] -> 12.50
+ [0, 1, 2] -> 13.00
+ [5, 6] -> 13.25
+ [2, 4] -> 20.00
+ [4, 6] -> 22.74
+ [4, 5, 6] -> 22.74
+ [3, 6] -> 30.25
+ [2, 6] -> 36.50
+ [2, 3, 6] -> 36.50
+ [2, 4, 6] -> 37.24
+
+CGAL citations
+==============
+
+.. bibliography:: ../../biblio/how_to_cite_cgal.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/python/doc/bottleneck_distance_sum.inc b/src/python/doc/bottleneck_distance_sum.inc
new file mode 100644
index 00000000..6eb0ac19
--- /dev/null
+++ b/src/python/doc/bottleneck_distance_sum.inc
@@ -0,0 +1,14 @@
+.. table::
+ :widths: 30 50 20
+
+ +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
+ | .. figure:: | Bottleneck distance measures the similarity between two persistence | :Author: François Godi |
+ | ../../doc/Bottleneck_distance/perturb_pd.png | diagrams. It's the shortest distance b for which there exists a | |
+ | :figclass: align-center | perfect matching between the points of the two diagrams (+ all the | :Introduced in: GUDHI 2.0.0 |
+ | | diagonal points) such that any couple of matched points are at | |
+ | Bottleneck distance is the length of | distance at most b, where the distance between points is the sup | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | the longest edge | norm in :math:`\mathbb{R}^2`. | |
+ | | | :Requires: `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+ +-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
+ | * :doc:`bottleneck_distance_user` | |
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/bottleneck_distance_user.rst b/src/python/doc/bottleneck_distance_user.rst
new file mode 100644
index 00000000..9435c7f1
--- /dev/null
+++ b/src/python/doc/bottleneck_distance_user.rst
@@ -0,0 +1,67 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Bottleneck distance user manual
+===============================
+Definition
+----------
+
+.. include:: bottleneck_distance_sum.inc
+
+This implementation is based on ideas from "Geometry Helps in Bottleneck Matching and Related Problems"
+:cite:`DBLP:journals/algorithmica/EfratIK01`. Another relevant publication, although it was not used is
+"Geometry Helps to Compare Persistence Diagrams" :cite:`Kerber:2017:GHC:3047249.3064175`.
+
+Function
+--------
+.. autofunction:: gudhi.bottleneck_distance
+
+Distance computation
+--------------------
+
+The following example explains how the distance is computed:
+
+.. testcode::
+
+ import gudhi
+
+ message = "Bottleneck distance = " + '%.1f' % gudhi.bottleneck_distance([[0., 0.]], [[0., 13.]])
+ print(message)
+
+.. testoutput::
+
+ Bottleneck distance = 6.5
+
+.. figure::
+ ../../doc/Bottleneck_distance/bottleneck_distance_example.png
+ :figclass: align-center
+
+ The point (0, 13) is at distance 6.5 from the diagonal and more
+ specifically from the point (6.5, 6.5)
+
+
+Basic example
+-------------
+
+This other example computes the bottleneck distance from 2 persistence diagrams:
+
+.. testcode::
+
+ import gudhi
+
+ diag1 = [[2.7, 3.7],[9.6, 14.],[34.2, 34.974], [3.,float('Inf')]]
+ diag2 = [[2.8, 4.45],[9.5, 14.1],[3.2,float('Inf')]]
+
+ message = "Bottleneck distance approximation = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2, 0.1)
+ print(message)
+
+ message = "Bottleneck distance value = " + '%.2f' % gudhi.bottleneck_distance(diag1, diag2)
+ print(message)
+
+The output is:
+
+.. testoutput::
+
+ Bottleneck distance approximation = 0.81
+ Bottleneck distance value = 0.75
diff --git a/src/python/doc/citation.rst b/src/python/doc/citation.rst
new file mode 100644
index 00000000..117eb9dd
--- /dev/null
+++ b/src/python/doc/citation.rst
@@ -0,0 +1,19 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Acknowledging the GUDHI library
+###############################
+
+We kindly ask users to cite the GUDHI library as appropriately as possible in
+their papers, and to mention the use of the GUDHI library on the web pages of
+their projects using GUDHI and provide us with links to these web pages. Feel
+free to contact us in case you have any question or remark on this topic.
+
+We provide GUDHI bibtex entries for the modules of the User and Reference
+Manual, as well as for publications directly related to the GUDHI library.
+
+GUDHI bibtex
+************
+
+.. literalinclude:: ../../biblio/how_to_cite_gudhi.bib
diff --git a/src/python/doc/conf.py b/src/python/doc/conf.py
new file mode 100755
index 00000000..e4c718c3
--- /dev/null
+++ b/src/python/doc/conf.py
@@ -0,0 +1,203 @@
+# -*- coding: utf-8 -*-
+#
+# GUDHI documentation build configuration file, created by
+# sphinx-quickstart on Thu Jun 30 09:55:51 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# Path to Gudhi.so from source path
+sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'matplotlib.sphinxext.plot_directive',
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.mathjax',
+ 'sphinx.ext.ifconfig',
+ 'sphinx.ext.viewcode',
+ 'sphinxcontrib.bibtex',
+]
+
+todo_include_todos = True
+# plot option : do not show hyperlinks (Source code, png, hires.png, pdf)
+plot_html_show_source_link = False
+plot_html_show_formats = False
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+import gudhi
+
+# General information about the project.
+project = gudhi.__name__
+copyright = gudhi.__copyright__ + ' - MIT'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = gudhi.__version__
+# The full version, including alpha/beta/rc tags.
+#release = '2.0.1-rc1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build', '*.inc']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'classic'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+ "sidebarbgcolor": "#A1ADCD",
+ "sidebartextcolor": "black",
+ "sidebarlinkcolor": "#334D5C",
+ "body_max_width": "100%",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo =
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon =
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {'installation': 'installation.html'}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'GUDHIdoc'
+
diff --git a/src/python/doc/cubical_complex_ref.rst b/src/python/doc/cubical_complex_ref.rst
new file mode 100644
index 00000000..1fe9d5fb
--- /dev/null
+++ b/src/python/doc/cubical_complex_ref.rst
@@ -0,0 +1,13 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Cubical complex reference manual
+################################
+
+.. autoclass:: gudhi.CubicalComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.CubicalComplex.__init__
diff --git a/src/python/doc/cubical_complex_sum.inc b/src/python/doc/cubical_complex_sum.inc
new file mode 100644
index 00000000..f200e695
--- /dev/null
+++ b/src/python/doc/cubical_complex_sum.inc
@@ -0,0 +1,14 @@
+.. table::
+ :widths: 30 50 20
+
+ +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+
+ | .. figure:: | The cubical complex is an example of a structured complex useful in | :Author: Pawel Dlotko |
+ | ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png | computational mathematics (specially rigorous numerics) and image | |
+ | :alt: Cubical complex representation | analysis. | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | | |
+ | | | :Copyright: MIT |
+ | | | |
+ +--------------------------------------------------------------------------+----------------------------------------------------------------------+-----------------------------+
+ | * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
+ | | * :doc:`periodic_cubical_complex_ref` |
+ +--------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/cubical_complex_user.rst b/src/python/doc/cubical_complex_user.rst
new file mode 100644
index 00000000..b13b500e
--- /dev/null
+++ b/src/python/doc/cubical_complex_user.rst
@@ -0,0 +1,168 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Cubical complex user manual
+===========================
+Definition
+----------
+
+===================================== ===================================== =====================================
+:Author: Pawel Dlotko :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3
+===================================== ===================================== =====================================
+
++---------------------------------------------+----------------------------------------------------------------------+
+| :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++---------------------------------------------+----------------------------------------------------------------------+
+
+The cubical complex is an example of a structured complex useful in computational mathematics (specially rigorous
+numerics) and image analysis.
+
+An *elementary interval* is an interval of a form :math:`[n,n+1]`, or :math:`[n,n]`, for :math:`n \in \mathcal{Z}`.
+The first one is called *non-degenerate*, while the second one is a *degenerate* interval. A
+*boundary of a elementary interval* is a chain :math:`\partial [n,n+1] = [n+1,n+1]-[n,n]` in case of
+non-degenerated elementary interval and :math:`\partial [n,n] = 0` in case of degenerate elementary interval. An
+*elementary cube* :math:`C` is a product of elementary intervals, :math:`C=I_1 \times \ldots \times I_n`.
+*Embedding dimension* of a cube is n, the number of elementary intervals (degenerate or not) in the product.
+A *dimension of a cube* :math:`C=I_1 \times ... \times I_n` is the number of non degenerate elementary
+intervals in the product. A *boundary of a cube* :math:`C=I_1 \times \ldots \times I_n` is a chain obtained
+in the following way:
+
+.. math::
+
+ \partial C = (\partial I_1 \times \ldots \times I_n) + (I_1 \times \partial I_2 \times \ldots \times I_n) +
+ \ldots + (I_1 \times I_2 \times \ldots \times \partial I_n).
+
+A *cubical complex* :math:`\mathcal{K}` is a collection of cubes closed under operation of taking boundary
+(i.e. boundary of every cube from the collection is in the collection). A cube :math:`C` in cubical complex
+:math:`\mathcal{K}` is *maximal* if it is not in a boundary of any other cube in :math:`\mathcal{K}`. A
+*support* of a cube :math:`C` is the set in :math:`\mathbb{R}^n` occupied by :math:`C` (:math:`n` is the embedding
+dimension of :math:`C`).
+
+Cubes may be equipped with a filtration values in which case we have filtered cubical complex. All the cubical
+complexes considered in this implementation are filtered cubical complexes (although, the range of a filtration may
+be a set of two elements).
+
+For further details and theory of cubical complexes, please consult :cite:`kaczynski2004computational` as well as the
+following paper :cite:`peikert2012topological`.
+
+Data structure.
+---------------
+
+The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in
+:math:`\mathbb{R}^n`. This extra assumption allows for a memory efficient way of storing cubical complexes in a form
+of so called bitmaps. Let
+:math:`R = [b_1,e_1] \times \ldots \times [b_n,e_n]`, for :math:`b_1,...b_n,e_1,...,e_n \in \mathbb{Z}`,
+:math:`b_i \leq d_i` be the considered rectangular region and let :math:`\mathcal{K}` be a filtered
+cubical complex having the rectangle :math:`R` as its support. Note that the structure of the coordinate system gives
+a way a lexicographical ordering of cells of :math:`\mathcal{K}`. This ordering is a base of the presented
+bitmap-based implementation. In this implementation, the whole cubical complex is stored as a vector of the values
+of filtration. This, together with dimension of :math:`\mathcal{K}` and the sizes of :math:`\mathcal{K}` in all
+directions, allows to determine, dimension, neighborhood, boundary and coboundary of every cube
+:math:`C \in \mathcal{K}`.
+
+.. figure::
+ ../../doc/Bitmap_cubical_complex/Cubical_complex_representation.png
+ :alt: Cubical complex.
+ :figclass: align-center
+
+ Cubical complex.
+
+Note that the cubical complex in the figure above is, in a natural way, a product of one dimensional cubical
+complexes in :math:`\mathbb{R}`. The number of all cubes in each direction is equal :math:`2n+1`, where :math:`n` is
+the number of maximal cubes in the considered direction. Let us consider a cube at the position :math:`k` in the
+bitmap.
+Knowing the sizes of the bitmap, by a series of modulo operation, we can determine which elementary intervals are
+present in the product that gives the cube :math:`C`. In a similar way, we can compute boundary and the coboundary of
+each cube. Further details can be found in the literature.
+
+Input Format.
+-------------
+
+In the current implantation, filtration is given at the maximal cubes, and it is then extended by the lower star
+filtration to all cubes. There are a number of constructors that can be used to construct cubical complex by users
+who want to use the code directly. They can be found in the :doc:`cubical_complex_ref`.
+Currently one input from a text file is used. It uses a format inspired from the Perseus software
+`Perseus software <http://www.sas.upenn.edu/~vnanda/perseus/>`_ by Vidit Nanda.
+
+.. note::
+ While Perseus assume the filtration of all maximal cubes to be non-negative, over here we do not enforce this and
+ we allow any filtration values. As a consequence one cannot use ``-1``'s to indicate missing cubes. If you have
+ missing cubes in your complex, please set their filtration to :math:`+\infty` (aka. ``inf`` in the file).
+
+The file format is described in details in :ref:`Perseus file format` file format section.
+
+.. testcode::
+
+ import gudhi
+ cubical_complex = gudhi.CubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/cubicalcomplexdoc.txt')
+ result_str = 'Cubical complex is of dimension ' + repr(cubical_complex.dimension()) + ' - ' + \
+ repr(cubical_complex.num_simplices()) + ' simplices.'
+ print(result_str)
+
+the program output is:
+
+.. testoutput::
+
+ Cubical complex is of dimension 2 - 49 simplices.
+
+Periodic boundary conditions.
+-----------------------------
+
+Often one would like to impose periodic boundary conditions to the cubical complex (cf.
+:doc:`periodic_cubical_complex_ref`).
+Let :math:`I_1\times ... \times I_n` be a box that is decomposed with a cubical complex :math:`\mathcal{K}`.
+Imposing periodic boundary conditions in the direction i, means that the left and the right side of a complex
+:math:`\mathcal{K}` are considered the same. In particular, if for a bitmap :math:`\mathcal{K}` periodic boundary
+conditions are imposed in all directions, then complex :math:`\mathcal{K}` became n-dimensional torus. One can use
+various constructors from the file Bitmap_cubical_complex_periodic_boundary_conditions_base.h to construct cubical
+complex with periodic boundary conditions.
+
+One can also use Perseus style input files (see :doc:`Perseus <fileformats>`) for the specific periodic case:
+
+.. testcode::
+
+ import gudhi
+ periodic_cc = gudhi.PeriodicCubicalComplex(perseus_file=gudhi.__root_source_dir__ + \
+ '/data/bitmap/periodiccubicalcomplexdoc.txt')
+ result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \
+ repr(periodic_cc.num_simplices()) + ' simplices.'
+ print(result_str)
+
+the program output is:
+
+.. testoutput::
+
+ Periodic cubical complex is of dimension 2 - 42 simplices.
+
+Or it can be defined as follows:
+
+.. testcode::
+
+ from gudhi import PeriodicCubicalComplex as pcc
+ periodic_cc = pcc(dimensions=[3,3],
+ top_dimensional_cells= [0, 0, 0, 0, 1, 0, 0, 0, 0],
+ periodic_dimensions=[True, False])
+ result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \
+ repr(periodic_cc.num_simplices()) + ' simplices.'
+ print(result_str)
+
+the program output is:
+
+.. testoutput::
+
+ Periodic cubical complex is of dimension 2 - 42 simplices.
+
+Examples.
+---------
+
+End user programs are available in python/example/ folder.
+
+Bibliography
+============
+
+.. bibliography:: ../../biblio/bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/python/doc/euclidean_strong_witness_complex_ref.rst b/src/python/doc/euclidean_strong_witness_complex_ref.rst
new file mode 100644
index 00000000..1a602cd5
--- /dev/null
+++ b/src/python/doc/euclidean_strong_witness_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+=================================================
+Euclidean strong witness complex reference manual
+=================================================
+
+.. autoclass:: gudhi.EuclideanStrongWitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.EuclideanStrongWitnessComplex.__init__
diff --git a/src/python/doc/euclidean_witness_complex_ref.rst b/src/python/doc/euclidean_witness_complex_ref.rst
new file mode 100644
index 00000000..28daf965
--- /dev/null
+++ b/src/python/doc/euclidean_witness_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+==========================================
+Euclidean witness complex reference manual
+==========================================
+
+.. autoclass:: gudhi.EuclideanWitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.EuclideanWitnessComplex.__init__
diff --git a/src/python/doc/examples.rst b/src/python/doc/examples.rst
new file mode 100644
index 00000000..edbc2f72
--- /dev/null
+++ b/src/python/doc/examples.rst
@@ -0,0 +1,30 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Examples
+########
+
+.. only:: builder_html
+
+ * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+ * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>`
+ * :download:`alpha_rips_persistence_bottleneck_distance.py <../example/alpha_rips_persistence_bottleneck_distance.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`sparse_rips_persistence_diagram.py <../example/sparse_rips_persistence_diagram.py>`
+ * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
+ * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>`
+ * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>`
+ * :download:`voronoi_graph_induced_complex.py <../example/voronoi_graph_induced_complex.py>`
+ * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>`
diff --git a/src/python/doc/fileformats.rst b/src/python/doc/fileformats.rst
new file mode 100644
index 00000000..345dfdba
--- /dev/null
+++ b/src/python/doc/fileformats.rst
@@ -0,0 +1,127 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+File formats
+############
+
+OFF file format
+***************
+
+OFF files must be conform to format described here:
+http://www.geomview.org/docs/html/OFF.html
+
+OFF files are mainly used as point cloud inputs. Here is an example of 7 points
+in a 3-dimensional space. As edges and faces are not used for point set, there
+is no need to specify them (just set their numbers to 0):
+
+.. literalinclude:: ../../data/points/alphacomplexdoc.off
+
+.. centered:: ../../points/alphacomplexdoc.off
+
+For dimensions bigger than 3, the dimension can be set like here::
+
+ # Dimension is no more 3
+ nOFF
+ # dimension 4 - 7 vertices - 0 face - 0 edge
+ 4 7 0 0
+ # Point set:
+ 1.0 1.0 0.0 0.0
+ 7.0 0.0 0.0 0.0
+ 4.0 6.0 0.0 0.0
+ 9.0 6.0 0.0 0.0
+ 0.0 14.0 0.0 0.0
+ 2.0 19.0 0.0 0.0
+ 9.0 17.0 0.0 0.0
+
+Persistence Diagram
+*******************
+
+Such a file, whose extension is usually ``.pers``, contains a list of
+persistence intervals.
+
+Lines starting with ``#`` are ignored (comments).
+
+Other lines might contain 2, 3 or 4 values (the number of values on each line
+must be the same for all lines)::
+
+ [[field] dimension] birth death
+
+Here is a simple sample file::
+
+ # Persistence diagram example
+ 2 2.7 3.7
+ 2 9.6 14.
+ # Some comments
+ 3 34.2 34.974
+ 4 3. inf
+
+Other sample files can be found in the `data/persistence_diagram` folder.
+
+Such files can be generated with
+:meth:`gudhi.SimplexTree.write_persistence_diagram`, read with
+:meth:`gudhi.read_persistence_intervals_grouped_by_dimension`, or
+:meth:`gudhi.read_persistence_intervals_in_dimension` and displayed with
+:meth:`gudhi.plot_persistence_barcode` or
+:meth:`gudhi.plot_persistence_diagram`.
+
+Iso-cuboid
+**********
+
+Such a file describes an iso-oriented cuboid with diagonal opposite vertices
+(min_x, min_y, min_z,...) and (max_x, max_y, max_z, ...). The format is::
+
+ min_x min_y [min_z ...]
+ max_x max_y [max_z ...]
+
+Here is a simple sample file in the 3D case::
+
+ -1. -1. -1.
+ 1. 1. 1.
+
+
+.. _Perseus file format:
+
+Perseus
+*******
+
+This file format is a format inspired from the
+`Perseus software <http://www.sas.upenn.edu/~vnanda/perseus/>`_ by Vidit Nanda.
+The first line contains a number d begin the dimension of the bitmap (2 in the
+example below). Next d lines are the numbers of top dimensional cubes in each
+dimensions (3 and 3 in the example below). Next, in lexicographical order, the
+filtration of top dimensional cubes is given (1 4 6 8 20 4 7 6 5 in the example
+below).
+
+.. figure::
+ ../../doc/Bitmap_cubical_complex/exampleBitmap.png
+ :alt: Example of a input data.
+ :figclass: align-center
+
+ Example of a input data.
+
+The input file for the following complex is:
+
+.. literalinclude:: ../../data/bitmap/cubicalcomplexdoc.txt
+
+.. centered:: ../../data/bitmap/cubicalcomplexdoc.txt
+
+To indicate periodic boundary conditions in a given direction, then number of
+top dimensional cells in this direction have to be multiplied by -1. For
+instance:
+
+.. literalinclude:: ../../data/bitmap/periodiccubicalcomplexdoc.txt
+
+.. centered:: ../../data/bitmap/periodiccubicalcomplexdoc.txt
+
+
+Indicate that we have imposed periodic boundary conditions in the direction x,
+but not in the direction y.
+
+Other sample files can be found in the `data/bitmap` folder.
+
+.. note::
+ Unlike in Perseus format the filtration on the maximal cubes can be any
+ double precision number. Consequently one cannot mark the cubes that are
+ not present with ``-1``'s. To do that please set their filtration value to
+ :math:`+\infty` (aka. ``inf`` in the file). \ No newline at end of file
diff --git a/src/python/doc/img/graphical_tools_representation.png b/src/python/doc/img/graphical_tools_representation.png
new file mode 100644
index 00000000..9759f7ba
--- /dev/null
+++ b/src/python/doc/img/graphical_tools_representation.png
Binary files differ
diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst
new file mode 100644
index 00000000..e379bc23
--- /dev/null
+++ b/src/python/doc/index.rst
@@ -0,0 +1,86 @@
+GUDHI Python module documentation
+#################################
+
+.. figure::
+ ../../doc/common/Gudhi_banner.png
+ :alt: Gudhi banner
+ :figclass: align-center
+
+Complexes
+*********
+
+Cubical complexes
+=================
+
+.. include:: cubical_complex_sum.inc
+
+Simplicial complexes
+====================
+
+Alpha complex
+-------------
+
+.. include:: alpha_complex_sum.inc
+
+Rips complex
+-------------
+
+.. include:: rips_complex_sum.inc
+
+Witness complex
+---------------
+
+.. include:: witness_complex_sum.inc
+
+Cover complexes
+===============
+
+.. include:: nerve_gic_complex_sum.inc
+
+Data structures and basic operations
+************************************
+
+Data structures
+===============
+
+Simplex tree
+------------
+
+.. include:: simplex_tree_sum.inc
+
+Topological descriptors computation
+***********************************
+
+Persistence cohomology
+======================
+
+.. include:: persistent_cohomology_sum.inc
+
+Manifold reconstruction
+***********************
+
+Tangential complex
+==================
+
+.. include:: tangential_complex_sum.inc
+
+
+Topological descriptors tools
+*****************************
+
+Bottleneck distance
+===================
+
+.. include:: bottleneck_distance_sum.inc
+
+Persistence graphical tools
+===========================
+
+.. include:: persistence_graphical_tools_sum.inc
+
+Bibliography
+************
+
+.. bibliography:: ../../biblio/bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst
new file mode 100644
index 00000000..d8b6f861
--- /dev/null
+++ b/src/python/doc/installation.rst
@@ -0,0 +1,242 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Installation
+############
+
+Conda
+*****
+The easiest way to install the Python version of GUDHI is using
+`conda <https://gudhi.inria.fr/licensing/>`_.
+
+Compiling
+*********
+The library uses c++11 and requires `Boost <https://www.boost.org/>`_ ≥ 1.56.0,
+`CMake <https://www.cmake.org/>`_ ≥ 3.1 to generate makefiles,
+`NumPy <http://numpy.org>`_ and `Cython <https://www.cython.org/>`_ to compile
+the GUDHI Python module.
+It is a multi-platform library and compiles on Linux, Mac OSX and Visual
+Studio 2015.
+
+On `Windows <https://wiki.python.org/moin/WindowsCompilers>`_ , only Python
+≥ 3.5 are available because of the required Visual Studio version.
+
+On other systems, if you have several Python/python installed, the version 2.X
+will be used by default, but you can force it by adding
+:code:`-DPython_ADDITIONAL_VERSIONS=3` to the cmake command.
+
+GUDHI Python module compilation
+===============================
+
+To build the GUDHI Python module, run the following commands in a terminal:
+
+.. code-block:: bash
+
+ cd /path-to-gudhi/
+ mkdir build
+ cd build/
+ cmake ..
+ cd python
+ make
+
+GUDHI Python module installation
+================================
+
+Once the compilation succeeds, one can add the GUDHI Python module path to the
+PYTHONPATH:
+
+.. code-block:: bash
+
+ # For windows, you have to set PYTHONPATH environment variable
+ export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/python'
+
+Or install it definitely in your Python packages folder:
+
+.. code-block:: bash
+
+ cd /path-to-gudhi/build/python
+ # May require sudo or administrator privileges
+ make install
+
+
+Test suites
+===========
+
+To test your build, `py.test <http://doc.pytest.org>`_ is optional. Run the
+following command in a terminal:
+
+.. code-block:: bash
+
+ cd /path-to-gudhi/build/python
+ # For windows, you have to set PYTHONPATH environment variable
+ export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/python'
+ make test
+
+Debugging issues
+================
+
+If tests fail, please check your PYTHONPATH and try to :code:`import gudhi`
+and check the errors.
+The problem can come from a third-party library bad link or installation.
+
+If :code:`import gudhi` succeeds, please have a look to debug information:
+
+.. code-block:: python
+
+ import gudhi
+ print(gudhi.__debug_info__)
+
+You shall have something like:
+
+.. code-block:: none
+
+ Python version 2.7.15
+ Cython version 0.26.1
+ Numpy version 1.14.1
+ Eigen3 version 3.1.1
+ Installed modules are: off_reader;simplex_tree;rips_complex;
+ cubical_complex;periodic_cubical_complex;reader_utils;witness_complex;
+ strong_witness_complex;alpha_complex;
+ Missing modules are: bottleneck_distance;nerve_gic;subsampling;
+ tangential_complex;persistence_graphical_tools;
+ euclidean_witness_complex;euclidean_strong_witness_complex;
+ CGAL version 4.7.1000
+ GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so
+ GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
+ TBB version 9107 found and used
+
+Here, you can see that bottleneck_distance, nerve_gic, subsampling and
+tangential_complex are missing because of the CGAL version.
+persistence_graphical_tools is not available as matplotlib is not
+available.
+Unitary tests cannot be run as pytest is missing.
+
+A complete configuration would be :
+
+.. code-block:: none
+
+ Python version 3.6.5
+ Cython version 0.28.2
+ Pytest version 3.3.2
+ Matplotlib version 2.2.2
+ Numpy version 1.14.5
+ Eigen3 version 3.3.4
+ Installed modules are: off_reader;simplex_tree;rips_complex;
+ cubical_complex;periodic_cubical_complex;persistence_graphical_tools;
+ reader_utils;witness_complex;strong_witness_complex;
+ persistence_graphical_tools;bottleneck_distance;nerve_gic;subsampling;
+ tangential_complex;alpha_complex;euclidean_witness_complex;
+ euclidean_strong_witness_complex;
+ CGAL header only version 4.11.0
+ GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so
+ GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
+ TBB version 9107 found and used
+
+Documentation
+=============
+
+To build the documentation, `sphinx-doc <http://www.sphinx-doc.org>`_ and
+`sphinxcontrib-bibtex <https://sphinxcontrib-bibtex.readthedocs.io>`_ are
+required. As the documentation is auto-tested, `CGAL`_, `Eigen3`_,
+`Matplotlib`_, `NumPy`_ and `SciPy`_ are also mandatory to build the
+documentation.
+
+Run the following commands in a terminal:
+
+.. code-block:: bash
+
+ cd /path-to-gudhi/build/python
+ make sphinx
+
+Optional third-party library
+****************************
+
+CGAL
+====
+
+Some GUDHI modules (cf. :doc:`modules list </index>`), and few examples
+require CGAL, a C++ library that provides easy access to efficient and
+reliable geometric algorithms.
+
+
+The procedure to install this library
+according to your operating system is detailed
+`here <http://doc.cgal.org/latest/Manual/installation.html>`_.
+
+The following examples requires CGAL version ≥ 4.11.0:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+ * :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+Eigen
+=====
+
+Some GUDHI modules (cf. :doc:`modules list </index>`), and few examples
+require `Eigen <http://eigen.tuxfamily.org/>`_, a C++ template
+library for linear algebra: matrices, vectors, numerical solvers, and related
+algorithms.
+
+The following examples require `Eigen <http://eigen.tuxfamily.org/>`_ version ≥ 3.1.0:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+Matplotlib
+==========
+
+The :doc:`persistence graphical tools </persistence_graphical_tools_user>`
+module requires `Matplotlib <http://matplotlib.org>`_, a Python 2D plotting
+library which produces publication quality figures in a variety of hardcopy
+formats and interactive environments across platforms.
+
+The following examples require the `Matplotlib <http://matplotlib.org>`_:
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+ * :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+SciPy
+=====
+
+The :doc:`persistence graphical tools </persistence_graphical_tools_user>`
+module requires `SciPy <http://scipy.org>`_, a Python-based ecosystem of
+open-source software for mathematics, science, and engineering.
+
+Threading Building Blocks
+=========================
+
+`Intel® TBB <https://www.threadingbuildingblocks.org/>`_ lets you easily write
+parallel C++ programs that take full advantage of multicore performance, that
+are portable and composable, and that have future-proof scalability.
+
+Having Intel® TBB installed is recommended to parallelize and accelerate some
+GUDHI computations.
+
+Bug reports and contributions
+*****************************
+
+Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
+
+ Contact: gudhi-users@lists.gforge.inria.fr
+
+GUDHI is open to external contributions. If you want to join our development team, please contact us.
diff --git a/src/python/doc/nerve_gic_complex_ref.rst b/src/python/doc/nerve_gic_complex_ref.rst
new file mode 100644
index 00000000..abde2e8c
--- /dev/null
+++ b/src/python/doc/nerve_gic_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+================================
+Cover complexes reference manual
+================================
+
+.. autoclass:: gudhi.CoverComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.CoverComplex.__init__
diff --git a/src/python/doc/nerve_gic_complex_sum.inc b/src/python/doc/nerve_gic_complex_sum.inc
new file mode 100644
index 00000000..d633c4ff
--- /dev/null
+++ b/src/python/doc/nerve_gic_complex_sum.inc
@@ -0,0 +1,16 @@
+.. table::
+ :widths: 30 50 20
+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+
+ | .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. | :Author: Mathieu Carrière |
+ | ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information | |
+ | :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, | :Introduced in: GUDHI 2.3.0 |
+ | :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering | |
+ | | the image of a scalar-valued function defined on the data. | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | | |
+ | | | :Requires: `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+ | | | |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+------------------------------------------------------------------+
+ | * :doc:`nerve_gic_complex_user` | * :doc:`nerve_gic_complex_ref` |
+ +----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/nerve_gic_complex_user.rst b/src/python/doc/nerve_gic_complex_user.rst
new file mode 100644
index 00000000..9101f45d
--- /dev/null
+++ b/src/python/doc/nerve_gic_complex_user.rst
@@ -0,0 +1,315 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Cover complexes user manual
+===========================
+Definition
+----------
+
+.. include:: nerve_gic_complex_sum.inc
+
+Visualizations of the simplicial complexes can be done with either
+neato (from `graphviz <http://www.graphviz.org/>`_),
+`geomview <http://www.geomview.org/>`_,
+`KeplerMapper <https://github.com/MLWave/kepler-mapper>`_.
+Input point clouds are assumed to be OFF files (cf. :doc:`fileformats`).
+
+Covers
+------
+
+Nerves and Graph Induced Complexes require a cover C of the input point cloud P,
+that is a set of subsets of P whose union is P itself.
+Very often, this cover is obtained from the preimage of a family of intervals covering
+the image of some scalar-valued function f defined on P. This family is parameterized
+by its resolution, which can be either the number or the length of the intervals,
+and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values).
+
+Nerves
+------
+
+Nerve definition
+^^^^^^^^^^^^^^^^
+
+Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover
+is the simplicial complex that has one k-simplex per k-fold intersection of cover elements.
+See also `Wikipedia <https://en.wikipedia.org/wiki/Nerve_of_a_covering>`_.
+
+.. figure::
+ ../../doc/Nerve_GIC/nerve.png
+ :figclass: align-center
+ :alt: Nerve of a double torus
+
+ Nerve of a double torus
+
+Example
+^^^^^^^
+
+This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off).
+The cover C comes from the preimages of intervals (10 intervals with gain 0.3)
+covering the height function (coordinate 2),
+which are then refined into their connected components using the triangulation of the .OFF file.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+ nerve_complex.set_verbose(True)
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/human.off')):
+ nerve_complex.set_type('Nerve')
+ nerve_complex.set_color_from_coordinate(2)
+ nerve_complex.set_function_from_coordinate(2)
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_resolution_with_interval_number(10)
+ nerve_complex.set_gain(0.3)
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.write_info()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ result_str = 'Nerve is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
+
+the program output is:
+
+.. code-block:: none
+
+ Min function value = -0.979672 and Max function value = 0.816414
+ Interval 0 = [-0.979672, -0.761576]
+ Interval 1 = [-0.838551, -0.581967]
+ Interval 2 = [-0.658942, -0.402359]
+ Interval 3 = [-0.479334, -0.22275]
+ Interval 4 = [-0.299725, -0.0431414]
+ Interval 5 = [-0.120117, 0.136467]
+ Interval 6 = [0.059492, 0.316076]
+ Interval 7 = [0.239101, 0.495684]
+ Interval 8 = [0.418709, 0.675293]
+ Interval 9 = [0.598318, 0.816414]
+ Computing preimages...
+ Computing connected components...
+ 5 interval(s) in dimension 0:
+ [-0.909111, 0.0081753]
+ [-0.171433, 0.367393]
+ [-0.171433, 0.367393]
+ [-0.909111, 0.745853]
+ 0 interval(s) in dimension 1:
+
+.. testoutput::
+
+ Nerve is of dimension 1 - 41 simplices - 21 vertices.
+ [0]
+ [1]
+ [4]
+ [1, 4]
+ [2]
+ [0, 2]
+ [8]
+ [2, 8]
+ [5]
+ [4, 5]
+ [9]
+ [8, 9]
+ [13]
+ [5, 13]
+ [14]
+ [9, 14]
+ [19]
+ [13, 19]
+ [25]
+ [32]
+ [20]
+ [20, 32]
+ [33]
+ [25, 33]
+ [26]
+ [14, 26]
+ [19, 26]
+ [42]
+ [26, 42]
+ [34]
+ [33, 34]
+ [27]
+ [20, 27]
+ [35]
+ [27, 35]
+ [34, 35]
+ [35, 42]
+ [44]
+ [35, 44]
+ [54]
+ [44, 54]
+
+
+The program also writes a file ../../data/points/human.off_sc.txt. The first
+three lines in this file are the location of the input point cloud and the
+function used to compute the cover.
+The fourth line contains the number of vertices nv and edges ne of the Nerve.
+The next nv lines represent the vertices. Each line contains the vertex ID,
+the number of data points it contains, and their average color function value.
+Finally, the next ne lines represent the edges, characterized by the ID of
+their vertices.
+
+Using KeplerMapper, one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/nervevisu.jpg
+ :figclass: align-center
+ :alt: Visualization with KeplerMapper
+
+ Visualization with KeplerMapper
+
+Graph Induced Complexes (GIC)
+-----------------------------
+
+GIC definition
+^^^^^^^^^^^^^^
+
+Again, assume you are given a cover C of your point cloud P. Moreover, assume
+you are also given a graph G built on top of P. Then, for any clique in G
+whose nodes all belong to different elements of C, the GIC includes a
+corresponding simplex, whose dimension is the number of nodes in the clique
+minus one.
+See :cite:`Dey13` for more details.
+
+.. figure::
+ ../../doc/Nerve_GIC/GIC.jpg
+ :figclass: align-center
+ :alt: GIC of a point cloud
+
+ GIC of a point cloud
+
+Example with cover from Voronoï
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the GIC of a point cloud sampled on a 3D human shape
+(human.off).
+We randomly subsampled 100 points in the point cloud, which act as seeds of
+a geodesic Voronoï diagram. Each cell of the diagram is then an element of C.
+The graph G (used to compute both the geodesics for Voronoï and the GIC)
+comes from the triangulation of the human shape. Note that the resulting
+simplicial complex is in dimension 3 in this example.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/human.off')):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate()
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_cover_from_Voronoi(700)
+ nerve_complex.find_simplices()
+ nerve_complex.plot_off()
+
+the program outputs SC.off. Using e.g.
+
+.. code-block:: none
+
+ geomview ../../data/points/human.off_sc.off
+
+one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/gicvoronoivisu.jpg
+ :figclass: align-center
+ :alt: Visualization with Geomview
+
+ Visualization with Geomview
+
+Functional GIC
+^^^^^^^^^^^^^^
+
+If one restricts to the cliques in G whose nodes all belong to preimages of
+consecutive intervals (assuming the cover of the height function is minimal,
+i.e. no more than two intervals can intersect at a time), the GIC is of
+dimension one, i.e. a graph.
+We call this graph the functional GIC. See :cite:`Carriere16` for more details.
+
+Example
+^^^^^^^
+
+Functional GIC comes with automatic selection of the Rips threshold,
+the resolution and the gain of the function cover. See :cite:`Carriere17c` for
+more details. In this example, we compute the functional GIC of a Klein bottle
+embedded in R^5, where the graph G comes from a Rips complex with automatic
+threshold, and the cover C comes from the preimages of intervals covering the
+first coordinate, with automatic resolution and gain. Note that automatic
+threshold, resolution and gain can be computed as well for the Nerve.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/KleinBottle5D.off')):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate(0)
+ nerve_complex.set_function_from_coordinate(0)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+
+the program outputs SC.dot. Using e.g.
+
+.. code-block:: none
+
+ neato ../../data/points/KleinBottle5D.off_sc.dot -Tpdf -o ../../data/points/KleinBottle5D.off_sc.pdf
+
+one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/coordGICvisu2.jpg
+ :figclass: align-center
+ :alt: Visualization with neato
+
+ Visualization with neato
+
+where nodes are colored by the filter function values and, for each node, the
+first number is its ID and the second is the number of data points that its
+contain.
+
+We also provide an example on a set of 72 pictures taken around the same object
+(lucky_cat.off).
+The function is now the first eigenfunction given by PCA, whose values are
+written in a file (lucky_cat_PCA1). Threshold, resolution and gain are
+automatically selected as before.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/COIL_database/lucky_cat.off')):
+ nerve_complex.set_type('GIC')
+ pca_file = gudhi.__root_source_dir__ + \
+ '/data/points/COIL_database/lucky_cat_PCA1'
+ nerve_complex.set_color_from_file(pca_file)
+ nerve_complex.set_function_from_file(pca_file)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+
+the program outputs again SC.dot which gives the following visualization after using neato:
+
+.. figure::
+ ../../doc/Nerve_GIC/funcGICvisu.jpg
+ :figclass: align-center
+ :alt: Visualization with neato
+
+ Visualization with neato
diff --git a/src/python/doc/periodic_cubical_complex_ref.rst b/src/python/doc/periodic_cubical_complex_ref.rst
new file mode 100644
index 00000000..4b831647
--- /dev/null
+++ b/src/python/doc/periodic_cubical_complex_ref.rst
@@ -0,0 +1,13 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Periodic cubical complex reference manual
+#########################################
+
+.. autoclass:: gudhi.PeriodicCubicalComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.PeriodicCubicalComplex.__init__
diff --git a/src/python/doc/persistence_graphical_tools_ref.rst b/src/python/doc/persistence_graphical_tools_ref.rst
new file mode 100644
index 00000000..0b0038d9
--- /dev/null
+++ b/src/python/doc/persistence_graphical_tools_ref.rst
@@ -0,0 +1,11 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+============================================
+Persistence graphical tools reference manual
+============================================
+
+.. autofunction:: gudhi.plot_persistence_barcode
+.. autofunction:: gudhi.plot_persistence_diagram
+.. autofunction:: gudhi.plot_persistence_density
diff --git a/src/python/doc/persistence_graphical_tools_sum.inc b/src/python/doc/persistence_graphical_tools_sum.inc
new file mode 100644
index 00000000..0cdf8072
--- /dev/null
+++ b/src/python/doc/persistence_graphical_tools_sum.inc
@@ -0,0 +1,14 @@
+.. table::
+ :widths: 30 50 20
+
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
+ | .. figure:: | These graphical tools comes on top of persistence results and allows | :Author: Vincent Rouvreau |
+ | img/graphical_tools_representation.png | the user to build easily persistence barcode, diagram or density. | |
+ | | | :Introduced in: GUDHI 2.0.0 |
+ | | | |
+ | | | :Copyright: MIT |
+ | | | |
+ | | | :Requires: matplotlib, numpy and scipy |
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
+ | * :doc:`persistence_graphical_tools_user` | * :doc:`persistence_graphical_tools_ref` |
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/persistence_graphical_tools_user.rst b/src/python/doc/persistence_graphical_tools_user.rst
new file mode 100644
index 00000000..b2124fdd
--- /dev/null
+++ b/src/python/doc/persistence_graphical_tools_user.rst
@@ -0,0 +1,73 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Persistence graphical tools user manual
+=======================================
+Definition
+----------
+.. include:: persistence_graphical_tools_sum.inc
+
+
+Show persistence as a barcode
+-----------------------------
+
+.. note::
+ this function requires matplotlib and numpy to be available
+
+This function can display the persistence result as a barcode:
+
+.. plot::
+ :include-source:
+
+ import gudhi
+
+ off_file = gudhi.__root_source_dir__ + '/data/points/tore3D_300.off'
+ point_cloud = gudhi.read_off(off_file=off_file)
+
+ rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.7)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
+ diag = simplex_tree.persistence(min_persistence=0.4)
+
+ plot = gudhi.plot_persistence_barcode(diag)
+ plot.show()
+
+Show persistence as a diagram
+-----------------------------
+
+.. note::
+ this function requires matplotlib and numpy to be available
+
+This function can display the persistence result as a diagram:
+
+.. plot::
+ :include-source:
+
+ import gudhi
+
+ # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method
+ persistence_file=gudhi.__root_source_dir__ + \
+ '/data/persistence_diagram/rips_on_tore3D_1307.pers'
+ plt = gudhi.plot_persistence_diagram(persistence_file=persistence_file,
+ legend=True)
+ plt.show()
+
+Persistence density
+-------------------
+
+.. note::
+ this function requires matplotlib, numpy and scipy to be available
+
+If you want more information on a specific dimension, for instance:
+
+.. plot::
+ :include-source:
+
+ import gudhi
+
+ # rips_on_tore3D_1307.pers obtained from write_persistence_diagram method
+ persistence_file=gudhi.__root_source_dir__ + \
+ '/data/persistence_diagram/rips_on_tore3D_1307.pers'
+ plt = gudhi.plot_persistence_density(persistence_file=persistence_file,
+ max_intervals=0, dimension=1, legend=True)
+ plt.show()
diff --git a/src/python/doc/persistent_cohomology_sum.inc b/src/python/doc/persistent_cohomology_sum.inc
new file mode 100644
index 00000000..4d7b077e
--- /dev/null
+++ b/src/python/doc/persistent_cohomology_sum.inc
@@ -0,0 +1,26 @@
+.. table::
+ :widths: 30 50 20
+
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
+ | .. figure:: | The theory of homology consists in attaching to a topological space | :Author: Clément Maria |
+ | ../../doc/Persistent_cohomology/3DTorus_poch.png | a sequence of (homology) groups, capturing global topological | |
+ | :figclass: align-center | features like connected components, holes, cavities, etc. Persistent | :Introduced in: GUDHI 2.0.0 |
+ | | homology studies the evolution -- birth, life and death -- of these | |
+ | Rips Persistent Cohomology on a 3D | features when the topological space is changing. Consequently, the | :Copyright: MIT |
+ | Torus | theory is essentially composed of three elements: topological spaces, | |
+ | | their homology groups and an evolution scheme. | |
+ | | | |
+ | | Computation of persistent cohomology using the algorithm of | |
+ | | :cite:`DBLP:journals/dcg/SilvaMV11` and | |
+ | | :cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed | |
+ | | Annotation Matrix implementation of | |
+ | | :cite:`DBLP:conf/esa/BoissonnatDM13`. | |
+ | | | |
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------+-----------------------------------------------+
+ | * :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence |
+ | | feature for reference: |
+ | | |
+ | | * :doc:`simplex_tree_ref` |
+ | | * :doc:`cubical_complex_ref` |
+ | | * :doc:`periodic_cubical_complex_ref` |
+ +-----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/persistent_cohomology_user.rst b/src/python/doc/persistent_cohomology_user.rst
new file mode 100644
index 00000000..de83cda1
--- /dev/null
+++ b/src/python/doc/persistent_cohomology_user.rst
@@ -0,0 +1,120 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Persistent cohomology user manual
+=================================
+Definition
+----------
+===================================== ===================================== =====================================
+:Author: Clément Maria :Introduced in: GUDHI PYTHON 2.0.0 :Copyright: GPL v3
+===================================== ===================================== =====================================
+
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
+| :doc:`persistent_cohomology_user` | Please refer to each data structure that contains persistence |
+| | feature for reference: |
+| | |
+| | * :doc:`simplex_tree_ref` |
+| | * :doc:`cubical_complex_ref` |
+| | * :doc:`periodic_cubical_complex_ref` |
++-----------------------------------------------------------------+-----------------------------------------------------------------------+
+
+
+Computation of persistent cohomology using the algorithm of :cite:`DBLP:journals/dcg/SilvaMV11` and
+:cite:`DBLP:journals/corr/abs-1208-5018` and the Compressed Annotation Matrix implementation of
+:cite:`DBLP:conf/esa/BoissonnatDM13`.
+
+The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing global
+topological features like connected components, holes, cavities, etc. Persistent homology studies the evolution --
+birth, life and death -- of these features when the topological space is changing. Consequently, the theory is
+essentially composed of three elements:
+
+* topological spaces
+* their homology groups
+* an evolution scheme.
+
+Topological Spaces
+------------------
+
+Topological spaces are represented by simplicial complexes.
+Let :math:`V = \{1, \cdots ,|V|\}` be a set of *vertices*.
+A *simplex* :math:`\sigma` is a subset of vertices :math:`\sigma \subseteq V`.
+A *simplicial complex* :math:`\mathbf{K}` on :math:`V` is a collection of simplices :math:`\{\sigma\}`,
+:math:`\sigma \subseteq V`, such that :math:`\tau \subseteq \sigma \in \mathbf{K} \Rightarrow \tau \in \mathbf{K}`.
+The dimension :math:`n=|\sigma|-1` of :math:`\sigma` is its number of elements minus 1.
+A *filtration* of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying
+:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`.
+
+Homology
+--------
+
+For a ring :math:`\mathcal{R}`, the group of *n-chains*, denoted :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})`, of
+:math:`\mathbf{K}` is the group of formal sums of n-simplices with :math:`\mathcal{R}` coefficients. The
+*boundary operator* is a linear operator
+:math:`\partial_n: \mathbf{C}_n(\mathbf{K},\mathcal{R}) \rightarrow \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R})`
+such that :math:`\partial_n \sigma = \partial_n [v_0, \cdots , v_n] = \sum_{i=0}^n (-1)^{i}[v_0,\cdots ,\widehat{v_i}, \cdots,v_n]`,
+where :math:`\widehat{v_i}` means :math:`v_i` is omitted from the list. The chain groups form a sequence:
+
+.. math::
+
+ \cdots \ \ \mathbf{C}_n(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_n\ }
+ \mathbf{C}_{n-1}(\mathbf{K},\mathcal{R}) \xrightarrow{\partial_{n-1}} \cdots \xrightarrow{\ \partial_2 \ }
+ \mathbf{C}_1(\mathbf{K},\mathcal{R}) \xrightarrow{\ \partial_1 \ } \mathbf{C}_0(\mathbf{K},\mathcal{R})
+
+of finitely many groups :math:`\mathbf{C}_n(\mathbf{K},\mathcal{R})` and homomorphisms :math:`\partial_n`, indexed by
+the dimension :math:`n \geq 0`. The boundary operators satisfy the property :math:`\partial_n \circ \partial_{n+1}=0`
+for every :math:`n > 0` and we define the homology groups:
+
+.. math::
+
+ \mathbf{H}_n(\mathbf{K},\mathcal{R}) = \ker \partial_n / \mathrm{im} \ \partial_{n+1}
+
+We refer to :cite:`Munkres-elementsalgtop1984` for an introduction to homology
+theory and to :cite:`DBLP:books/daglib/0025666` for an introduction to persistent homology.
+
+Indexing Scheme
+---------------
+
+"Changing" a simplicial complex consists in applying a simplicial map. An *indexing scheme* is a directed graph
+together with a traversal order, such that two consecutive nodes in the graph are connected by an arrow (either forward
+or backward).
+The nodes represent simplicial complexes and the directed edges simplicial maps.
+
+From the computational point of view, there are two types of indexing schemes of interest in persistent homology:
+
+* linear ones
+ :math:`\bullet \longrightarrow \bullet \longrightarrow \cdots \longrightarrow \bullet \longrightarrow \bullet`
+ in persistent homology :cite:`DBLP:journals/dcg/ZomorodianC05`,
+* zigzag ones
+ :math:`\bullet \longrightarrow \bullet \longleftarrow \cdots \longrightarrow \bullet \longleftarrow \bullet`
+ in zigzag persistent homology :cite:`DBLP:journals/focm/CarlssonS10`.
+
+These indexing schemes have a natural left-to-right traversal order, and we describe them with ranges and iterators.
+In the current release of the Gudhi library, only the linear case is implemented.
+
+In the following, we consider the case where the indexing scheme is induced by a filtration.
+
+Ordering the simplices by increasing filtration values (breaking ties so as a simplex appears after its subsimplices of
+same filtration value) provides an indexing scheme.
+
+Examples
+--------
+
+We provide several example files: run these examples with -h for details on their use.
+
+.. only:: builder_html
+
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
+
+Bibliography
+============
+
+.. bibliography:: ../../biblio/bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/python/doc/python3-sphinx-build.py b/src/python/doc/python3-sphinx-build.py
new file mode 100755
index 00000000..84d158cf
--- /dev/null
+++ b/src/python/doc/python3-sphinx-build.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python3
+
+"""
+Emulate sphinx-build for python3
+"""
+
+from sys import exit, argv
+from sphinx import main
+
+if __name__ == '__main__':
+ exit(main(argv))
diff --git a/src/python/doc/reader_utils_ref.rst b/src/python/doc/reader_utils_ref.rst
new file mode 100644
index 00000000..f3ecebad
--- /dev/null
+++ b/src/python/doc/reader_utils_ref.rst
@@ -0,0 +1,15 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+=============================
+Reader utils reference manual
+=============================
+
+.. autofunction:: gudhi.read_off
+
+.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file
+
+.. autofunction:: gudhi.read_persistence_intervals_grouped_by_dimension
+
+.. autofunction:: gudhi.read_persistence_intervals_in_dimension
diff --git a/src/python/doc/rips_complex_ref.rst b/src/python/doc/rips_complex_ref.rst
new file mode 100644
index 00000000..22b5616c
--- /dev/null
+++ b/src/python/doc/rips_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+=============================
+Rips complex reference manual
+=============================
+
+.. autoclass:: gudhi.RipsComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.RipsComplex.__init__
diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc
new file mode 100644
index 00000000..857c6893
--- /dev/null
+++ b/src/python/doc/rips_complex_sum.inc
@@ -0,0 +1,16 @@
+.. table::
+ :widths: 30 50 20
+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+
+ | .. figure:: | Rips complex is a simplicial complex constructed from a one skeleton | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse |
+ | ../../doc/Rips_complex/rips_complex_representation.png | graph. | |
+ | :figclass: align-center | | :Introduced in: GUDHI 2.0.0 |
+ | | The filtration value of each edge is computed from a user-given | |
+ | | distance function and is inserted until a user-given threshold | :Copyright: MIT |
+ | | value. | |
+ | | | |
+ | | This complex can be built from a point cloud and a distance function, | |
+ | | or from a distance matrix. | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+
+ | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` |
+ +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst
new file mode 100644
index 00000000..3f6b960d
--- /dev/null
+++ b/src/python/doc/rips_complex_user.rst
@@ -0,0 +1,347 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Rips complex user manual
+=========================
+Definition
+----------
+
+==================================================================== ================================ ======================
+:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Introduced in: GUDHI 2.0.0 :Copyright: GPL v3
+==================================================================== ================================ ======================
+
++-------------------------------------------+----------------------------------------------------------------------+
+| :doc:`rips_complex_user` | :doc:`rips_complex_ref` |
++-------------------------------------------+----------------------------------------------------------------------+
+
+The `Rips complex <https://en.wikipedia.org/wiki/Vietoris%E2%80%93Rips_complex>`_ is a simplicial complex that
+generalizes proximity (:math:`\varepsilon`-ball) graphs to higher dimensions. The vertices correspond to the input
+points, and a simplex is present if and only if its diameter is smaller than some parameter α. Considering all
+parameters α defines a filtered simplicial complex, where the filtration value of a simplex is its diameter.
+The filtration can be restricted to values α smaller than some threshold, to reduce its size. Beware that some
+people define the Rips complex using a bound of 2α instead of α, particularly when comparing it to an ambient
+ÄŒech complex. They end up with the same combinatorial object, but filtration values which are half of ours.
+
+The input discrete metric space can be provided as a point cloud plus a distance function, or as a distance matrix.
+
+When creating a simplicial complex from the graph, :doc:`RipsComplex <rips_complex_ref>` first builds the graph and
+inserts it into the data structure. It then expands the simplicial complex (adds the simplices corresponding to cliques)
+when required. The expansion can be stopped at dimension `max_dimension`, by default 1.
+
+A vertex name corresponds to the index of the point in the given range (aka. the point cloud).
+
+.. figure::
+ ../../doc/Rips_complex/rips_complex_representation.png
+ :align: center
+
+ Rips-complex one skeleton graph representation
+
+On this example, as edges (4,5), (4,6) and (5,6) are in the complex, simplex (4,5,6) is added with the filtration value
+set with :math:`max(filtration(4,5), filtration(4,6), filtration(5,6))`. And so on for simplex (0,1,2,3).
+
+If the `RipsComplex` interfaces are not detailed enough for your need, please refer to rips_persistence_step_by_step.cpp
+C++ example, where the graph construction over the Simplex_tree is more detailed.
+
+A Rips complex can easily become huge, even if we limit the length of the edges
+and the dimension of the simplices. One easy trick, before building a Rips
+complex on a point cloud, is to call `sparsify_point_set` which removes points
+that are too close to each other. This does not change its persistence diagram
+by more than the length used to define "too close".
+
+A more general technique is to use a sparse approximation of the Rips
+introduced by Don Sheehy :cite:`sheehy13linear`. We are using the version
+described in :cite:`buchet16efficient` (except that we multiply all filtration
+values by 2, to match the usual Rips complex). :cite:`cavanna15geometric` proves
+a :math:`\frac{1}{1-\varepsilon}`-interleaving, although in practice the
+error is usually smaller. A more intuitive presentation of the idea is
+available in :cite:`cavanna15geometric`, and in a video
+:cite:`cavanna15visualizing`. Passing an extra argument `sparse=0.3` at the
+construction of a `RipsComplex` object asks it to build a sparse Rips with
+parameter :math:`\varepsilon=0.3`, while the default `sparse=None` builds the
+regular Rips complex.
+
+
+Point cloud
+-----------
+
+Example from a point cloud
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the neighborhood graph from the given points, up to max_edge_length.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the simplicial complex.
+
+.. testcode::
+
+ import gudhi
+ rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]],
+ max_edge_length=12.0)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+When launching (Rips maximal distance between 2 points is 12.0, is expanded
+until dimension 1 - one skeleton graph in other words), the output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [5] -> 0.00
+ [6] -> 0.00
+ [2, 3] -> 5.00
+ [4, 5] -> 5.39
+ [0, 2] -> 5.83
+ [0, 1] -> 6.08
+ [1, 3] -> 6.32
+ [1, 2] -> 6.71
+ [5, 6] -> 7.28
+ [2, 4] -> 8.94
+ [0, 3] -> 9.43
+ [4, 6] -> 9.49
+ [3, 6] -> 11.00
+
+Notice that if we use
+
+.. code-block:: python
+
+ rips_complex = gudhi.RipsComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]],
+ max_edge_length=12.0, sparse=2)
+
+asking for a very sparse version (theory only gives some guarantee on the meaning of the output if `sparse<1`),
+2 to 5 edges disappear, depending on the random vertex used to start the sparsification.
+
+Example from OFF file
+^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the :doc:`RipsComplex <rips_complex_ref>` from the given
+points in an OFF file, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the Rips complex.
+
+
+.. testcode::
+
+ import gudhi
+ point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off')
+ rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=12.0)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+the program output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [5] -> 0.00
+ [6] -> 0.00
+ [2, 3] -> 5.00
+ [4, 5] -> 5.39
+ [0, 2] -> 5.83
+ [0, 1] -> 6.08
+ [1, 3] -> 6.32
+ [1, 2] -> 6.71
+ [5, 6] -> 7.28
+ [2, 4] -> 8.94
+ [0, 3] -> 9.43
+ [4, 6] -> 9.49
+ [3, 6] -> 11.00
+
+Distance matrix
+---------------
+
+Example from a distance matrix
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the one skeleton graph from the given distance matrix, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the simplicial complex.
+
+.. testcode::
+
+ import gudhi
+ rips_complex = gudhi.RipsComplex(distance_matrix=[[],
+ [6.0827625303],
+ [5.8309518948, 6.7082039325],
+ [9.4339811321, 6.3245553203, 5],
+ [13.0384048104, 15.6524758425, 8.94427191, 12.0415945788],
+ [18.0277563773, 19.6468827044, 13.152946438, 14.7648230602, 5.3851648071],
+ [17.88854382, 17.1172427686, 12.0830459736, 11, 9.4868329805, 7.2801098893]],
+ max_edge_length=12.0)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+When launching (Rips maximal distance between 2 points is 12.0, is expanded
+until dimension 1 - one skeleton graph in other words), the output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [5] -> 0.00
+ [6] -> 0.00
+ [2, 3] -> 5.00
+ [4, 5] -> 5.39
+ [0, 2] -> 5.83
+ [0, 1] -> 6.08
+ [1, 3] -> 6.32
+ [1, 2] -> 6.71
+ [5, 6] -> 7.28
+ [2, 4] -> 8.94
+ [0, 3] -> 9.43
+ [4, 6] -> 9.49
+ [3, 6] -> 11.00
+
+Example from csv file
+^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the :doc:`RipsComplex <rips_complex_ref>` from the given
+distance matrix in a csv file, and max_edge_length value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the Rips complex.
+
+
+.. testcode::
+
+ import gudhi
+ distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=gudhi.__root_source_dir__ + \
+ '/data/distance_matrix/full_square_distance_matrix.csv')
+ rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=12.0)
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+the program output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 18 simplices - 7 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [5] -> 0.00
+ [6] -> 0.00
+ [2, 3] -> 5.00
+ [4, 5] -> 5.39
+ [0, 2] -> 5.83
+ [0, 1] -> 6.08
+ [1, 3] -> 6.32
+ [1, 2] -> 6.71
+ [5, 6] -> 7.28
+ [2, 4] -> 8.94
+ [0, 3] -> 9.43
+ [4, 6] -> 9.49
+ [3, 6] -> 11.00
+
+Correlation matrix
+------------------
+
+Example from a correlation matrix
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Analogously to the case of distance matrix, Rips complexes can be also constructed based on correlation matrix.
+Given a correlation matrix M, comportment-wise 1-M is a distance matrix.
+This example builds the one skeleton graph from the given corelation matrix and threshold value.
+Then it creates a :doc:`Simplex_tree <simplex_tree_ref>` with it.
+
+Finally, it is asked to display information about the simplicial complex.
+
+.. testcode::
+
+ import gudhi
+ import numpy as np
+
+ # User defined correlation matrix is:
+ # |1 0.06 0.23 0.01 0.89|
+ # |0.06 1 0.74 0.01 0.61|
+ # |0.23 0.74 1 0.72 0.03|
+ # |0.01 0.01 0.72 1 0.7 |
+ # |0.89 0.61 0.03 0.7 1 |
+ correlation_matrix=np.array([[1., 0.06, 0.23, 0.01, 0.89],
+ [0.06, 1., 0.74, 0.01, 0.61],
+ [0.23, 0.74, 1., 0.72, 0.03],
+ [0.01, 0.01, 0.72, 1., 0.7],
+ [0.89, 0.61, 0.03, 0.7, 1.]], float)
+
+ distance_matrix = np.ones((correlation_matrix.shape),float) - correlation_matrix
+ rips_complex = gudhi.RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+ result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ fmt = '%s -> %.2f'
+ for filtered_value in simplex_tree.get_filtration():
+ print(fmt % tuple(filtered_value))
+
+When launching (Rips maximal distance between 2 points is 12.0, is expanded
+until dimension 1 - one skeleton graph in other words), the output is:
+
+.. testoutput::
+
+ Rips complex is of dimension 1 - 15 simplices - 5 vertices.
+ [0] -> 0.00
+ [1] -> 0.00
+ [2] -> 0.00
+ [3] -> 0.00
+ [4] -> 0.00
+ [0, 4] -> 0.11
+ [1, 2] -> 0.26
+ [2, 3] -> 0.28
+ [3, 4] -> 0.30
+ [1, 4] -> 0.39
+ [0, 2] -> 0.77
+ [0, 1] -> 0.94
+ [2, 4] -> 0.97
+ [0, 3] -> 0.99
+ [1, 3] -> 0.99
+
+.. note::
+ As persistence diagrams points will be under the diagonal,
+ bottleneck distance and persistence graphical tool will not work properly,
+ this is a known issue.
diff --git a/src/python/doc/simplex_tree_ref.rst b/src/python/doc/simplex_tree_ref.rst
new file mode 100644
index 00000000..9eb8c199
--- /dev/null
+++ b/src/python/doc/simplex_tree_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+=============================
+Simplex tree reference manual
+=============================
+
+.. autoclass:: gudhi.SimplexTree
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.SimplexTree.__init__
diff --git a/src/python/doc/simplex_tree_sum.inc b/src/python/doc/simplex_tree_sum.inc
new file mode 100644
index 00000000..5ba58d2b
--- /dev/null
+++ b/src/python/doc/simplex_tree_sum.inc
@@ -0,0 +1,13 @@
+.. table::
+ :widths: 30 50 20
+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+
+ | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria |
+ | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | |
+ | :alt: Simplex tree representation | | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | The data structure is described in | |
+ | | :cite:`boissonnatmariasimplextreealgorithmica` | :Copyright: MIT |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+
+ | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` |
+ +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/simplex_tree_user.rst b/src/python/doc/simplex_tree_user.rst
new file mode 100644
index 00000000..aebeb29f
--- /dev/null
+++ b/src/python/doc/simplex_tree_user.rst
@@ -0,0 +1,72 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Simplex tree user manual
+========================
+Definition
+----------
+
+.. include:: simplex_tree_sum.inc
+
+A simplicial complex :math:`\mathbf{K}` on a set of vertices :math:`V = \{1, \cdots ,|V|\}` is a collection of
+simplices :math:`\{\sigma\}`, :math:`\sigma \subseteq V` such that
+:math:`\tau \subseteq \sigma \in \mathbf{K} \rightarrow \tau \in \mathbf{K}`. The dimension :math:`n=|\sigma|-1` of
+:math:`\sigma` is its number of elements minus `1`.
+
+A filtration of a simplicial complex is a function :math:`f:\mathbf{K} \rightarrow \mathbb{R}` satisfying
+:math:`f(\tau)\leq f(\sigma)` whenever :math:`\tau \subseteq \sigma`. Ordering the simplices by increasing filtration
+values (breaking ties so as a simplex appears after its subsimplices of same filtration value) provides an indexing
+scheme.
+
+
+Implementation
+--------------
+
+There are two implementation of complexes. The first on is the Simplex_tree data structure.
+The simplex tree is an efficient and flexible data structure for representing general (filtered) simplicial complexes.
+The data structure is described in :cite`boissonnatmariasimplextreealgorithmica`.
+
+The second one is the Hasse_complex. The Hasse complex is a data structure representing explicitly all co-dimension 1
+incidence relations in a complex. It is consequently faster when accessing the boundary of a simplex, but is less
+compact and harder to construct from scratch.
+
+Example
+-------
+
+.. testcode::
+
+ import gudhi
+ st = gudhi.SimplexTree()
+ if st.insert([0, 1]):
+ print("[0, 1] inserted")
+ if st.insert([0, 1, 2], filtration=4.0):
+ print("[0, 1, 2] inserted")
+ if st.find([0, 1]):
+ print("[0, 1] found")
+ result_str = 'num_vertices=' + repr(st.num_vertices())
+ print(result_str)
+ result_str = 'num_simplices=' + repr(st.num_simplices())
+ print(result_str)
+ print("skeleton(2) =")
+ for sk_value in st.get_skeleton(2):
+ print(sk_value)
+
+
+The output is:
+
+.. testoutput::
+
+ [0, 1] inserted
+ [0, 1, 2] inserted
+ [0, 1] found
+ num_vertices=3
+ num_simplices=7
+ skeleton(2) =
+ ([0, 1, 2], 4.0)
+ ([0, 1], 0.0)
+ ([0, 2], 4.0)
+ ([0], 0.0)
+ ([1, 2], 4.0)
+ ([1], 0.0)
+ ([2], 4.0)
diff --git a/src/python/doc/strong_witness_complex_ref.rst b/src/python/doc/strong_witness_complex_ref.rst
new file mode 100644
index 00000000..d624d711
--- /dev/null
+++ b/src/python/doc/strong_witness_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+=======================================
+Strong witness complex reference manual
+=======================================
+
+.. autoclass:: gudhi.StrongWitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.StrongWitnessComplex.__init__
diff --git a/src/python/doc/tangential_complex_ref.rst b/src/python/doc/tangential_complex_ref.rst
new file mode 100644
index 00000000..cdfda082
--- /dev/null
+++ b/src/python/doc/tangential_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+===================================
+Tangential complex reference manual
+===================================
+
+.. autoclass:: gudhi.TangentialComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.TangentialComplex.__init__
diff --git a/src/python/doc/tangential_complex_sum.inc b/src/python/doc/tangential_complex_sum.inc
new file mode 100644
index 00000000..d84aa433
--- /dev/null
+++ b/src/python/doc/tangential_complex_sum.inc
@@ -0,0 +1,14 @@
+.. table::
+ :widths: 30 50 20
+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
+ | .. figure:: | A Tangential Delaunay complex is a simplicial complex designed to | :Author: Clément Jamin |
+ | ../../doc/Tangential_complex/tc_examples.png | reconstruct a :math:`k`-dimensional manifold embedded in :math:`d`- | |
+ | :figclass: align-center | dimensional Euclidean space. The input is a point sample coming from | :Introduced in: GUDHI 2.0.0 |
+ | | an unknown manifold. The running time depends only linearly on the | |
+ | | extrinsic dimension :math:`d` and exponentially on the intrinsic | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | dimension :math:`k`. | |
+ | | | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
+ | * :doc:`tangential_complex_user` | * :doc:`tangential_complex_ref` |
+ +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/tangential_complex_user.rst b/src/python/doc/tangential_complex_user.rst
new file mode 100644
index 00000000..ebfe1e29
--- /dev/null
+++ b/src/python/doc/tangential_complex_user.rst
@@ -0,0 +1,204 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Tangential complex user manual
+==============================
+.. include:: tangential_complex_sum.inc
+
+Definition
+----------
+
+A Tangential Delaunay complex is a simplicial complex designed to reconstruct a
+:math:`k`-dimensional smooth manifold embedded in :math:`d`-dimensional
+Euclidean space. The input is a point sample coming from an unknown manifold,
+which means that the points lie close to a structure of "small" intrinsic
+dimension. The running time depends only linearly on the extrinsic dimension
+:math:`d` and exponentially on the intrinsic dimension :math:`k`.
+
+An extensive description of the Tangential complex can be found in
+:cite:`tangentialcomplex2014`.
+
+What is a Tangential Complex?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Let us start with the description of the Tangential complex of a simple
+example, with :math:`k = 1` and :math:`d = 2`. The point set
+:math:`\mathscr P` is located on a closed curve embedded in 2D.
+Only 4 points will be displayed (more are required for PCA) to simplify the
+figures.
+
+.. figure:: ../../doc/Tangential_complex/tc_example_01.png
+ :alt: The input
+ :figclass: align-center
+
+ The input
+
+For each point :math:`P`, estimate its tangent subspace :math:`T_P` using PCA.
+
+.. figure:: ../../doc/Tangential_complex/tc_example_02.png
+ :alt: The estimated normals
+ :figclass: align-center
+
+ The estimated normals
+
+
+Let us add the Voronoi diagram of the points in orange. For each point
+:math:`P`, construct its star in the Delaunay triangulation of
+:math:`\mathscr P` restricted to :math:`T_P`.
+
+.. figure:: ../../doc/Tangential_complex/tc_example_03.png
+ :alt: The Voronoi diagram
+ :figclass: align-center
+
+ The Voronoi diagram
+
+The Tangential Delaunay complex is the union of those stars.
+
+In practice, neither the ambient Voronoi diagram nor the ambient Delaunay
+triangulation is computed. Instead, local :math:`k`-dimensional regular
+triangulations are computed with a limited number of points as we only need the
+star of each point. More details can be found in :cite:`tangentialcomplex2014`.
+
+Inconsistencies
+^^^^^^^^^^^^^^^
+Inconsistencies between the stars can occur. An inconsistency occurs when a
+simplex is not in the star of all its vertices.
+
+Let us take the same example.
+
+.. figure:: ../../doc/Tangential_complex/tc_example_07_before.png
+ :alt: Before
+ :figclass: align-center
+
+ Before
+
+Let us slightly move the tangent subspace :math:`T_Q`
+
+.. figure:: ../../doc/Tangential_complex/tc_example_07_after.png
+ :alt: After
+ :figclass: align-center
+
+ After
+
+Now, the star of :math:`Q` contains :math:`QP`, but the star of :math:`P` does
+not contain :math:`QP`. We have an inconsistency.
+
+.. figure:: ../../doc/Tangential_complex/tc_example_08.png
+ :alt: After
+ :figclass: align-center
+
+ After
+
+One way to solve inconsistencies is to randomly perturb the positions of the
+points involved in an inconsistency. In the current implementation, this
+perturbation is done in the tangent subspace of each point. The maximum
+perturbation radius is given as a parameter to the constructor.
+
+In most cases, we recommend to provide a point set where the minimum distance
+between any two points is not too small. This can be achieved using the
+functions provided by the Subsampling module. Then, a good value to start with
+for the maximum perturbation radius would be around half the minimum distance
+between any two points. The Example with perturbation below shows an example of
+such a process.
+
+In most cases, this process is able to dramatically reduce the number of
+inconsistencies, but is not guaranteed to succeed.
+
+Output
+^^^^^^
+The result of the computation is exported as a Simplex_tree. It is the union of
+the stars of all the input points. A vertex in the Simplex Tree is the index of
+the point in the range provided by the user. The point corresponding to a
+vertex can also be obtained through the Tangential_complex::get_point function.
+Note that even if the positions of the points are perturbed, their original
+positions are kept (e.g. Tangential_complex::get_point returns the original
+position of the point).
+
+The result can be obtained after the computation of the Tangential complex
+itself and/or after the perturbation process.
+
+
+Simple example
+--------------
+
+This example builds the Tangential complex of point set read in an OFF file.
+
+.. testcode::
+
+ import gudhi
+ tc = gudhi.TangentialComplex(intrisic_dim = 1,
+ off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off')
+ tc.compute_tangential_complex()
+ result_str = 'Tangential contains ' + repr(tc.num_simplices()) + \
+ ' simplices - ' + repr(tc.num_vertices()) + ' vertices.'
+ print(result_str)
+
+ st = tc.create_simplex_tree()
+ result_str = 'Simplex tree is of dimension ' + repr(st.dimension()) + \
+ ' - ' + repr(st.num_simplices()) + ' simplices - ' + \
+ repr(st.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in st.get_filtration():
+ print(filtered_value[0])
+
+The output is:
+
+.. testoutput::
+
+ Tangential contains 12 simplices - 7 vertices.
+ Simplex tree is of dimension 1 - 15 simplices - 7 vertices.
+ [0]
+ [1]
+ [0, 1]
+ [2]
+ [0, 2]
+ [1, 2]
+ [3]
+ [1, 3]
+ [4]
+ [2, 4]
+ [5]
+ [4, 5]
+ [6]
+ [3, 6]
+ [5, 6]
+
+
+Example with perturbation
+-------------------------
+
+This example builds the Tangential complex of a point set, then tries to solve
+inconsistencies by perturbing the positions of points involved in inconsistent
+simplices.
+
+.. testcode::
+
+ import gudhi
+ tc = gudhi.TangentialComplex(intrisic_dim = 1,
+ points=[[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]])
+ tc.compute_tangential_complex()
+ result_str = 'Tangential contains ' + repr(tc.num_vertices()) + ' vertices.'
+ print(result_str)
+
+ if tc.num_inconsistent_simplices() > 0:
+ print('Tangential contains inconsistencies.')
+
+ tc.fix_inconsistencies_using_perturbation(10, 60)
+ if tc.num_inconsistent_simplices() == 0:
+ print('Inconsistencies has been fixed.')
+
+The output is:
+
+.. testoutput::
+
+ Tangential contains 4 vertices.
+ Inconsistencies has been fixed.
+
+
+Bibliography
+============
+
+.. bibliography:: ../../biblio/bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/python/doc/todos.rst b/src/python/doc/todos.rst
new file mode 100644
index 00000000..ca274ced
--- /dev/null
+++ b/src/python/doc/todos.rst
@@ -0,0 +1,9 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+==========
+To be done
+==========
+
+.. todolist::
diff --git a/src/python/doc/witness_complex_ref.rst b/src/python/doc/witness_complex_ref.rst
new file mode 100644
index 00000000..9987d3fd
--- /dev/null
+++ b/src/python/doc/witness_complex_ref.rst
@@ -0,0 +1,14 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+================================
+Witness complex reference manual
+================================
+
+.. autoclass:: gudhi.WitnessComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.WitnessComplex.__init__
diff --git a/src/python/doc/witness_complex_sum.inc b/src/python/doc/witness_complex_sum.inc
new file mode 100644
index 00000000..71b65a71
--- /dev/null
+++ b/src/python/doc/witness_complex_sum.inc
@@ -0,0 +1,18 @@
+.. table::
+ :widths: 30 50 20
+
+ +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | .. figure:: | Witness complex :math:`Wit(W,L)` is a simplicial complex defined on | :Author: Siargey Kachanovich |
+ | ../../doc/Witness_complex/Witness_complex_representation.png | two sets of points in :math:`\mathbb{R}^D`. | |
+ | :alt: Witness complex representation | | :Introduced in: GUDHI 2.0.0 |
+ | :figclass: align-center | The data structure is described in | |
+ | | :cite:`boissonnatmariasimplextreealgorithmica`. | :Copyright: MIT (`GPL v3 </licensing/>`_ for Euclidean versions only) |
+ | | | |
+ | | | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 for Euclidean versions only |
+ +-------------------------------------------------------------------+----------------------------------------------------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------+
+ | * :doc:`witness_complex_user` | * :doc:`witness_complex_ref` |
+ | | * :doc:`strong_witness_complex_ref` |
+ | | * :doc:`euclidean_witness_complex_ref` |
+ | | * :doc:`euclidean_strong_witness_complex_ref` |
+ +-------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
diff --git a/src/python/doc/witness_complex_user.rst b/src/python/doc/witness_complex_user.rst
new file mode 100644
index 00000000..40e94134
--- /dev/null
+++ b/src/python/doc/witness_complex_user.rst
@@ -0,0 +1,135 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+Witness complex user manual
+===========================
+
+.. include:: witness_complex_sum.inc
+
+Definitions
+-----------
+
+Witness complex is a simplicial complex defined on two sets of points in :math:`\mathbb{R}^D`:
+
+- :math:`W` set of **witnesses** and
+- :math:`L` set of **landmarks**.
+
+Even though often the set of landmarks :math:`L` is a subset of the set of witnesses :math:`W`, it is not a requirement
+for the current implementation.
+
+Landmarks are the vertices of the simplicial complex and witnesses help to decide on which simplices are inserted via a
+predicate "is witnessed".
+
+De Silva and Carlsson in their paper :cite:`de2004topological` differentiate **weak witnessing** and
+**strong witnessing**:
+
+- *weak*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l) \leq d(w,l')`
+- *strong*: :math:`\sigma \subset L` is witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l) \leq d(w,l')`
+
+where :math:`d(.,.)` is a distance function.
+
+Both definitions can be relaxed by a real value :math:`\alpha`:
+
+- *weak*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L \setminus \sigma},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2`
+- *strong*: :math:`\sigma \subset L` is :math:`\alpha`-witnessed by :math:`w \in W` if :math:`\forall l \in \sigma,\ \forall l' \in \mathbf{L},\ d(w,l)^2 \leq d(w,l')^2 + \alpha^2`
+
+which leads to definitions of **weak relaxed witness complex** (or just relaxed witness complex for short) and
+**strong relaxed witness complex** respectively.
+
+.. figure:: ../../doc/Witness_complex/swit.svg
+ :alt: Strongly witnessed simplex
+ :figclass: align-center
+
+ Strongly witnessed simplex
+
+
+In particular case of 0-relaxation, weak complex corresponds to **witness complex** introduced in
+:cite:`de2004topological`, whereas 0-relaxed strong witness complex consists of just vertices and is not very
+interesting. Hence for small relaxation weak version is preferable.
+However, to capture the homotopy type (for example using Gudhi::persistent_cohomology::Persistent_cohomology) it is
+often necessary to work with higher filtration values. In this case strong relaxed witness complex is faster to compute
+and offers similar results.
+
+Implementation
+--------------
+
+The two complexes described above are implemented in the corresponding classes
+
+- :doc:`witness_complex_ref`
+- :doc:`strong_witness_complex_ref`
+- :doc:`euclidean_witness_complex_ref`
+- :doc:`euclidean_strong_witness_complex_ref`
+
+The construction of the Euclidean versions of complexes follow the same scheme:
+
+1. Construct a search tree on landmarks.
+2. Construct lists of nearest landmarks for each witness.
+3. Construct the witness complex for nearest landmark lists.
+
+In the non-Euclidean classes, the lists of nearest landmarks are supposed to be given as input.
+
+The constructors take on the steps 1 and 2, while the function 'create_complex' executes the step 3.
+
+Constructing weak relaxed witness complex from an off file
+----------------------------------------------------------
+
+Let's start with a simple example, which reads an off point file and computes a weak witness complex.
+
+.. code-block:: python
+
+ import gudhi
+ import argparse
+
+ parser = argparse.ArgumentParser(description='EuclideanWitnessComplex creation from '
+ 'points read in a OFF file.',
+ epilog='Example: '
+ 'example/witness_complex_diagram_persistence_from_off_file_example.py '
+ '-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2'
+ '- Constructs a alpha complex with the '
+ 'points from the given OFF file.')
+ parser.add_argument("-f", "--file", type=str, required=True)
+ parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
+ parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
+ parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+
+ args = parser.parse_args()
+
+ with open(args.file, 'r') as f:
+ first_line = f.readline()
+ if (first_line == 'OFF\n') or (first_line == 'nOFF\n'):
+ print("#####################################################################")
+ print("EuclideanWitnessComplex creation from points read in a OFF file")
+
+ witnesses = gudhi.read_off(off_file=args.file)
+ landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks)
+
+ message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \
+ " - Number of landmarks=" + repr(args.number_of_landmarks)
+ print(message)
+
+ witness_complex = gudhi.EuclideanWitnessComplex(witnesses=witnesses, landmarks=landmarks)
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=args.max_alpha_square,
+ limit_dimension=args.limit_dimension)
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
+
+
+Example2: Computing persistence using strong relaxed witness complex
+--------------------------------------------------------------------
+
+Here is an example of constructing a strong witness complex filtration and computing persistence on it:
+
+* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
+
+Bibliography
+============
+
+.. bibliography:: ../../biblio/bibliography.bib
+ :filter: docnames
+ :style: unsrt
diff --git a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..b8f283b3
--- /dev/null
+++ b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="AlphaComplex creation from " "points read in a OFF file.",
+ epilog="Example: "
+ "example/alpha_complex_diagram_persistence_from_off_file_example.py "
+ "-f ../data/points/tore3D_300.off -a 0.6"
+ "- Constructs a alpha complex with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+with open(args.file, "r") as f:
+ first_line = f.readline()
+ if (first_line == "OFF\n") or (first_line == "nOFF\n"):
+ print("#####################################################################")
+ print("AlphaComplex creation from points read in a OFF file")
+
+ message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square)
+ print(message)
+
+ alpha_complex = gudhi.AlphaComplex(off_file=args.file)
+ simplex_tree = alpha_complex.create_simplex_tree(
+ max_alpha_square=args.max_alpha_square
+ )
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
+ pplot.show()
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/python/example/alpha_complex_from_points_example.py b/src/python/example/alpha_complex_from_points_example.py
new file mode 100755
index 00000000..a746998c
--- /dev/null
+++ b/src/python/example/alpha_complex_from_points_example.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+
+from gudhi import AlphaComplex, SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("AlphaComplex creation from points")
+alpha_complex = AlphaComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]])
+simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0)
+
+if simplex_tree.find([0, 1]):
+ print("[0, 1] Found !!")
+else:
+ print("[0, 1] Not found...")
+
+if simplex_tree.find([4]):
+ print("[4] Found !!")
+else:
+ print("[4] Not found...")
+
+if simplex_tree.insert([0, 1, 2], filtration=4.0):
+ print("[0, 1, 2] Inserted !!")
+else:
+ print("[0, 1, 2] Not inserted...")
+
+if simplex_tree.insert([0, 1, 4], filtration=4.0):
+ print("[0, 1, 4] Inserted !!")
+else:
+ print("[0, 1, 4] Not inserted...")
+
+if simplex_tree.find([4]):
+ print("[4] Found !!")
+else:
+ print("[4] Not found...")
+
+print("dimension=", simplex_tree.dimension())
+print("filtrations=", simplex_tree.get_filtration())
+print("star([0])=", simplex_tree.get_star([0]))
+print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1))
+
+print("point[0]=", alpha_complex.get_point(0))
+print("point[5]=", alpha_complex.get_point(5))
diff --git a/src/python/example/alpha_rips_persistence_bottleneck_distance.py b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
new file mode 100755
index 00000000..086307ee
--- /dev/null
+++ b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+import math
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="AlphaComplex and RipsComplex "
+ "persistence creation from points read in "
+ "a OFF file. Bottleneck distance computation"
+ " on each dimension",
+ epilog="Example: "
+ "example/alpha_rips_persistence_bottleneck_distance.py "
+ "-f ../data/points/tore3D_1307.off -t 0.15 -d 3",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-t", "--threshold", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+
+args = parser.parse_args()
+with open(args.file, "r") as f:
+ first_line = f.readline()
+ if (first_line == "OFF\n") or (first_line == "nOFF\n"):
+ point_cloud = gudhi.read_off(off_file=args.file)
+ print("#####################################################################")
+ print("RipsComplex creation from points read in a OFF file")
+
+ message = "RipsComplex with max_edge_length=" + repr(args.threshold)
+ print(message)
+
+ rips_complex = gudhi.RipsComplex(
+ points=point_cloud, max_edge_length=args.threshold
+ )
+
+ rips_stree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+ message = "Number of simplices=" + repr(rips_stree.num_simplices())
+ print(message)
+
+ rips_diag = rips_stree.persistence()
+
+ print("#####################################################################")
+ print("AlphaComplex creation from points read in a OFF file")
+
+ message = "AlphaComplex with max_edge_length=" + repr(args.threshold)
+ print(message)
+
+ alpha_complex = gudhi.AlphaComplex(points=point_cloud)
+ alpha_stree = alpha_complex.create_simplex_tree(
+ max_alpha_square=(args.threshold * args.threshold)
+ )
+
+ message = "Number of simplices=" + repr(alpha_stree.num_simplices())
+ print(message)
+
+ alpha_diag = alpha_stree.persistence()
+
+ max_b_distance = 0.0
+ for dim in range(args.max_dimension):
+ # Alpha persistence values needs to be transform because filtration
+ # values are alpha square values
+ funcs = [math.sqrt, math.sqrt]
+ alpha_intervals = []
+ for interval in alpha_stree.persistence_intervals_in_dimension(dim):
+ alpha_intervals.append(
+ map(lambda func, value: func(value), funcs, interval)
+ )
+
+ rips_intervals = rips_stree.persistence_intervals_in_dimension(dim)
+ bottleneck_distance = gudhi.bottleneck_distance(
+ rips_intervals, alpha_intervals
+ )
+ message = (
+ "In dimension "
+ + repr(dim)
+ + ", bottleneck distance = "
+ + repr(bottleneck_distance)
+ )
+ print(message)
+ max_b_distance = max(bottleneck_distance, max_b_distance)
+
+ print(
+ "================================================================================"
+ )
+ message = "Bottleneck distance is " + repr(max_b_distance)
+ print(message)
+
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/python/example/bottleneck_basic_example.py b/src/python/example/bottleneck_basic_example.py
new file mode 100755
index 00000000..392d2a6e
--- /dev/null
+++ b/src/python/example/bottleneck_basic_example.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Francois Godi, Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]]
+
+diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]]
+
+message = "diag1=" + repr(diag1)
+print(message)
+
+message = "diag2=" + repr(diag2)
+print(message)
+
+message = "Bottleneck distance approximation=" + repr(
+ gudhi.bottleneck_distance(diag1, diag2, 0.1)
+)
+print(message)
+
+message = "Bottleneck distance exact value=" + repr(
+ gudhi.bottleneck_distance(diag1, diag2)
+)
+print(message)
diff --git a/src/python/example/coordinate_graph_induced_complex.py b/src/python/example/coordinate_graph_induced_complex.py
new file mode 100755
index 00000000..e32141b4
--- /dev/null
+++ b/src/python/example/coordinate_graph_induced_complex.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="Coordinate GIC " "from points read in a OFF file.",
+ epilog="Example: "
+ "example/coordinate_graph_induced_complex.py "
+ "-f ../data/points/KleinBottle5D.off -c 0 -v"
+ "- Constructs the coordinate GIC with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--coordinate", type=int, default=0)
+parser.add_argument(
+ "-v",
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Flag for program verbosity",
+)
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if nerve_complex.read_point_cloud(args.file):
+ nerve_complex.set_type("GIC")
+ nerve_complex.set_color_from_coordinate(args.coordinate)
+ nerve_complex.set_function_from_coordinate(args.coordinate)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if args.verbose:
+ print("Iterator on coordinate GIC simplices")
+ result_str = (
+ "Coordinate GIC is of dimension "
+ + repr(simplex_tree.dimension())
+ + " - "
+ + repr(simplex_tree.num_simplices())
+ + " simplices - "
+ + repr(simplex_tree.num_vertices())
+ + " vertices."
+ )
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..610ba44f
--- /dev/null
+++ b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="EuclideanStrongWitnessComplex creation from "
+ "points read in a OFF file.",
+ epilog="Example: "
+ "example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py "
+ "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2"
+ "- Constructs a strong witness complex with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
+parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
+parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+with open(args.file, "r") as f:
+ first_line = f.readline()
+ if (first_line == "OFF\n") or (first_line == "nOFF\n"):
+ print("#####################################################################")
+ print("EuclideanStrongWitnessComplex creation from points read in a OFF file")
+
+ witnesses = gudhi.read_off(off_file=args.file)
+ landmarks = gudhi.pick_n_random_points(
+ points=witnesses, nb_points=args.number_of_landmarks
+ )
+
+ message = (
+ "EuclideanStrongWitnessComplex with max_edge_length="
+ + repr(args.max_alpha_square)
+ + " - Number of landmarks="
+ + repr(args.number_of_landmarks)
+ )
+ print(message)
+
+ witness_complex = gudhi.EuclideanStrongWitnessComplex(
+ witnesses=witnesses, landmarks=landmarks
+ )
+ simplex_tree = witness_complex.create_simplex_tree(
+ max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension
+ )
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
+ pplot.show()
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..7587b732
--- /dev/null
+++ b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="EuclideanWitnessComplex creation from " "points read in a OFF file.",
+ epilog="Example: "
+ "example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py "
+ "-f ../data/points/tore3D_300.off -a 1.0 -n 20 -d 2"
+ "- Constructs a weak witness complex with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-a", "--max_alpha_square", type=float, required=True)
+parser.add_argument("-n", "--number_of_landmarks", type=int, required=True)
+parser.add_argument("-d", "--limit_dimension", type=int, required=True)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+with open(args.file, "r") as f:
+ first_line = f.readline()
+ if (first_line == "OFF\n") or (first_line == "nOFF\n"):
+ print("#####################################################################")
+ print("EuclideanWitnessComplex creation from points read in a OFF file")
+
+ witnesses = gudhi.read_off(off_file=args.file)
+ landmarks = gudhi.pick_n_random_points(
+ points=witnesses, nb_points=args.number_of_landmarks
+ )
+
+ message = (
+ "EuclideanWitnessComplex with max_edge_length="
+ + repr(args.max_alpha_square)
+ + " - Number of landmarks="
+ + repr(args.number_of_landmarks)
+ )
+ print(message)
+
+ witness_complex = gudhi.EuclideanWitnessComplex(
+ witnesses=witnesses, landmarks=landmarks
+ )
+ simplex_tree = witness_complex.create_simplex_tree(
+ max_alpha_square=args.max_alpha_square, limit_dimension=args.limit_dimension
+ )
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
+ pplot.show()
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/python/example/functional_graph_induced_complex.py b/src/python/example/functional_graph_induced_complex.py
new file mode 100755
index 00000000..8b645040
--- /dev/null
+++ b/src/python/example/functional_graph_induced_complex.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="Functional GIC " "from points read in a OFF file.",
+ epilog="Example: "
+ "example/functional_graph_induced_complex.py "
+ "-o ../data/points/COIL_database/lucky_cat.off "
+ "-f ../data/points/COIL_database/lucky_cat_PCA1"
+ "- Constructs the functional GIC with the "
+ "points from the given OFF and function files.",
+)
+parser.add_argument("-o", "--off-file", type=str, required=True)
+parser.add_argument("-f", "--function-file", type=str, required=True)
+parser.add_argument(
+ "-v",
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Flag for program verbosity",
+)
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if nerve_complex.read_point_cloud(args.off_file):
+ nerve_complex.set_type("GIC")
+ nerve_complex.set_color_from_file(args.function_file)
+ nerve_complex.set_function_from_file(args.function_file)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if args.verbose:
+ print("Iterator on functional GIC simplices")
+ result_str = (
+ "Functional GIC is of dimension "
+ + repr(simplex_tree.dimension())
+ + " - "
+ + repr(simplex_tree.num_simplices())
+ + " simplices - "
+ + repr(simplex_tree.num_vertices())
+ + " vertices."
+ )
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/python/example/gudhi_graphical_tools_example.py b/src/python/example/gudhi_graphical_tools_example.py
new file mode 100755
index 00000000..3b0ca54d
--- /dev/null
+++ b/src/python/example/gudhi_graphical_tools_example.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("Show barcode persistence example")
+
+persistence = [
+ (2, (1.0, float("inf"))),
+ (1, (1.4142135623730951, float("inf"))),
+ (1, (1.4142135623730951, float("inf"))),
+ (0, (0.0, float("inf"))),
+ (0, (0.0, 1.0)),
+ (0, (0.0, 1.0)),
+ (0, (0.0, 1.0)),
+]
+gudhi.plot_persistence_barcode(persistence)
+
+print("#####################################################################")
+print("Show diagram persistence example")
+
+pplot = gudhi.plot_persistence_diagram(persistence)
+pplot.show()
+
+print("#####################################################################")
+print("Show diagram persistence example with a confidence band")
+
+pplot = gudhi.plot_persistence_diagram(persistence, band=0.2)
+pplot.show()
diff --git a/src/python/example/nerve_of_a_covering.py b/src/python/example/nerve_of_a_covering.py
new file mode 100755
index 00000000..3c8e0f90
--- /dev/null
+++ b/src/python/example/nerve_of_a_covering.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="Nerve of a covering creation " "from points read in a OFF file.",
+ epilog="Example: "
+ "example/nerve_of_a_covering.py "
+ "-f ../data/points/human.off -c 2 -r 10 -g 0.3"
+ "- Constructs Nerve of a covering with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--coordinate", type=int, default=0)
+parser.add_argument("-r", "--resolution", type=int, default=10)
+parser.add_argument("-g", "--gain", type=float, default=0.3)
+parser.add_argument(
+ "-v",
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Flag for program verbosity",
+)
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if nerve_complex.read_point_cloud(args.file):
+ nerve_complex.set_type("Nerve")
+ nerve_complex.set_color_from_coordinate(args.coordinate)
+ nerve_complex.set_function_from_coordinate(args.coordinate)
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_resolution_with_interval_number(args.resolution)
+ nerve_complex.set_gain(args.gain)
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.write_info()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if args.verbose:
+ print("Iterator on graph induced complex simplices")
+ result_str = (
+ "Nerve is of dimension "
+ + repr(simplex_tree.dimension())
+ + " - "
+ + repr(simplex_tree.num_simplices())
+ + " simplices - "
+ + repr(simplex_tree.num_vertices())
+ + " vertices."
+ )
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
new file mode 100755
index 00000000..9cb855cd
--- /dev/null
+++ b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def is_file_perseus(file):
+ num_lines = open(file).read().count("\n")
+ try:
+ f = open(file)
+ num_dim = int(f.readline())
+ coeff = 1
+ for dim in range(0, num_dim):
+ try:
+ line = int(f.readline())
+ coeff *= abs(line)
+ except ValueError:
+ return False
+ if num_lines == (1 + num_dim + coeff):
+ return True
+ else:
+ return False
+ except ValueError:
+ return False
+
+
+parser = argparse.ArgumentParser(
+ description="Periodic cubical complex from a " "Perseus-style file name.",
+ epilog="Example: "
+ "./periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
+ " -f ../data/bitmap/CubicalTwoSphere.txt",
+)
+
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument(
+ "--no-barcode",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the barcodes",
+)
+
+args = parser.parse_args()
+
+if is_file_perseus(args.file):
+ print("#####################################################################")
+ print("PeriodicCubicalComplex creation")
+ periodic_cubical_complex = gudhi.PeriodicCubicalComplex(perseus_file=args.file)
+
+ print("persistence(homology_coeff_field=3, min_persistence=0)=")
+ diag = periodic_cubical_complex.persistence(
+ homology_coeff_field=3, min_persistence=0
+ )
+ print(diag)
+
+ print("betti_numbers()=")
+ print(periodic_cubical_complex.betti_numbers())
+ if args.no_barcode == False:
+ gudhi.plot_persistence_barcode(diag)
+else:
+ print(args.file, "is not a valid perseus style file")
diff --git a/src/python/example/random_cubical_complex_persistence_example.py b/src/python/example/random_cubical_complex_persistence_example.py
new file mode 100755
index 00000000..da0eb177
--- /dev/null
+++ b/src/python/example/random_cubical_complex_persistence_example.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+
+import gudhi
+import numpy
+from functools import reduce
+import argparse
+import operator
+
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="Random cubical complex.",
+ epilog="Example: "
+ "./random_cubical_complex_persistence_example.py"
+ " 10 10 10 - Constructs a random cubical "
+ "complex in a dimension [10, 10, 10] (aka. "
+ "1000 random top dimensional cells).",
+)
+parser.add_argument("dimension", type=int, nargs="*", help="Cubical complex dimensions")
+
+args = parser.parse_args()
+dimension_multiplication = reduce(operator.mul, args.dimension, 1)
+
+if dimension_multiplication > 1:
+ print("#####################################################################")
+ print("CubicalComplex creation")
+ cubical_complex = gudhi.CubicalComplex(
+ dimensions=args.dimension,
+ top_dimensional_cells=numpy.random.rand(dimension_multiplication),
+ )
+
+ print("persistence(homology_coeff_field=2, min_persistence=0)=")
+ print(cubical_complex.persistence(homology_coeff_field=2, min_persistence=0))
+
+ print("betti_numbers()=")
+ print(cubical_complex.betti_numbers())
diff --git a/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
new file mode 100755
index 00000000..3571580b
--- /dev/null
+++ b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+
+import gudhi
+import sys
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="RipsComplex creation from " "a correlation matrix read in a csv file.",
+ epilog="Example: "
+ "example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py "
+ "-f ../data/correlation_matrix/lower_triangular_correlation_matrix.csv -e 12.0 -d 3"
+ "- Constructs a Rips complex with the "
+ "correlation matrix from the given csv file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--min_edge_correlation", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+if not (-1.0 < args.min_edge_correlation < 1.0):
+ print("Wrong value of the treshold corelation (should be between -1 and 1).")
+ sys.exit(1)
+
+print("#####################################################################")
+print("Caution: as persistence diagrams points will be under the diagonal,")
+print("bottleneck distance and persistence graphical tool will not work")
+print("properly, this is a known issue.")
+
+print("#####################################################################")
+print("RipsComplex creation from correlation matrix read in a csv file")
+
+message = "RipsComplex with min_edge_correlation=" + repr(args.min_edge_correlation)
+print(message)
+
+correlation_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ csv_file=args.file
+)
+# Given a correlation matrix M, we compute component-wise M'[i,j] = 1-M[i,j] to get a distance matrix:
+distance_matrix = [
+ [1.0 - correlation_matrix[i][j] for j in range(len(correlation_matrix[i]))]
+ for i in range(len(correlation_matrix))
+]
+
+rips_complex = gudhi.RipsComplex(
+ distance_matrix=distance_matrix, max_edge_length=1.0 - args.min_edge_correlation
+)
+simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+print(message)
+
+diag = simplex_tree.persistence()
+
+print("betti_numbers()=")
+print(simplex_tree.betti_numbers())
+
+# invert the persistence diagram
+invert_diag = [
+ (diag[pers][0], (1.0 - diag[pers][1][0], 1.0 - diag[pers][1][1]))
+ for pers in range(len(diag))
+]
+
+if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(invert_diag, band=args.band)
+ pplot.show()
diff --git a/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
new file mode 100755
index 00000000..0b9a9ba9
--- /dev/null
+++ b/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="RipsComplex creation from " "a distance matrix read in a csv file.",
+ epilog="Example: "
+ "example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py "
+ "-f ../data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3"
+ "- Constructs a Rips complex with the "
+ "distance matrix from the given csv file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+print("#####################################################################")
+print("RipsComplex creation from distance matrix read in a csv file")
+
+message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
+print(message)
+
+distance_matrix = gudhi.read_lower_triangular_matrix_from_csv_file(csv_file=args.file)
+rips_complex = gudhi.RipsComplex(
+ distance_matrix=distance_matrix, max_edge_length=args.max_edge_length
+)
+simplex_tree = rips_complex.create_simplex_tree(max_dimension=args.max_dimension)
+
+message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+print(message)
+
+diag = simplex_tree.persistence()
+
+print("betti_numbers()=")
+print(simplex_tree.betti_numbers())
+
+if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
+ pplot.show()
diff --git a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
new file mode 100755
index 00000000..2b335bba
--- /dev/null
+++ b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="RipsComplex creation from " "points read in a OFF file.",
+ epilog="Example: "
+ "example/rips_complex_diagram_persistence_from_off_file_example.py "
+ "-f ../data/points/tore3D_300.off -a 0.6"
+ "- Constructs a Rips complex with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-e", "--max_edge_length", type=float, default=0.5)
+parser.add_argument("-d", "--max_dimension", type=int, default=1)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+with open(args.file, "r") as f:
+ first_line = f.readline()
+ if (first_line == "OFF\n") or (first_line == "nOFF\n"):
+ print("#####################################################################")
+ print("RipsComplex creation from points read in a OFF file")
+
+ message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
+ print(message)
+
+ point_cloud = gudhi.read_off(off_file=args.file)
+ rips_complex = gudhi.RipsComplex(
+ points=point_cloud, max_edge_length=args.max_edge_length
+ )
+ simplex_tree = rips_complex.create_simplex_tree(
+ max_dimension=args.max_dimension
+ )
+
+ message = "Number of simplices=" + repr(simplex_tree.num_simplices())
+ print(message)
+
+ diag = simplex_tree.persistence()
+
+ print("betti_numbers()=")
+ print(simplex_tree.betti_numbers())
+
+ if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
+ pplot.show()
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/python/example/rips_complex_from_points_example.py b/src/python/example/rips_complex_from_points_example.py
new file mode 100755
index 00000000..59d8a261
--- /dev/null
+++ b/src/python/example/rips_complex_from_points_example.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("RipsComplex creation from points")
+rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42)
+
+simplex_tree = rips.create_simplex_tree(max_dimension=1)
+
+print("filtrations=", simplex_tree.get_filtration())
+print("star([0])=", simplex_tree.get_star([0]))
+print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1))
diff --git a/src/python/example/rips_persistence_diagram.py b/src/python/example/rips_persistence_diagram.py
new file mode 100755
index 00000000..f5897d7b
--- /dev/null
+++ b/src/python/example/rips_persistence_diagram.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Marc Glisse"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("RipsComplex creation from points")
+rips = gudhi.RipsComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]], max_edge_length=42)
+
+simplex_tree = rips.create_simplex_tree(max_dimension=1)
+
+
+diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0)
+print("diag=", diag)
+
+pplot = gudhi.plot_persistence_diagram(diag)
+pplot.show()
diff --git a/src/python/example/simplex_tree_example.py b/src/python/example/simplex_tree_example.py
new file mode 100755
index 00000000..30de00da
--- /dev/null
+++ b/src/python/example/simplex_tree_example.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("SimplexTree creation from insertion")
+
+st = gudhi.SimplexTree()
+
+if st.insert([0, 1]):
+ print("Inserted !!")
+else:
+ print("Not inserted...")
+
+if st.find([0, 1]):
+ print("Found !!")
+else:
+ print("Not found...")
+
+if st.insert([0, 1, 2], filtration=4.0):
+ print("Inserted !!")
+else:
+ print("Not inserted...")
+
+print("dimension=", st.dimension())
+
+st.initialize_filtration()
+print("filtration=", st.get_filtration())
+print("filtration[1, 2]=", st.filtration([1, 2]))
+print("filtration[4, 2]=", st.filtration([4, 2]))
+
+print("num_simplices=", st.num_simplices())
+print("num_vertices=", st.num_vertices())
+
+print("skeleton[2]=", st.get_skeleton(2))
+print("skeleton[1]=", st.get_skeleton(1))
+print("skeleton[0]=", st.get_skeleton(0))
diff --git a/src/python/example/sparse_rips_persistence_diagram.py b/src/python/example/sparse_rips_persistence_diagram.py
new file mode 100755
index 00000000..671d5e34
--- /dev/null
+++ b/src/python/example/sparse_rips_persistence_diagram.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Marc Glisse"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("Sparse RipsComplex creation from points")
+rips = gudhi.RipsComplex(
+ points=[[0, 0], [0, 0.1], [1, 0], [0, 1], [1, 1]], max_edge_length=42, sparse=0.5
+)
+
+simplex_tree = rips.create_simplex_tree(max_dimension=2)
+
+
+diag = simplex_tree.persistence(homology_coeff_field=2, min_persistence=0)
+print("diag=", diag)
+
+pplot = gudhi.plot_persistence_diagram(diag)
+pplot.show()
diff --git a/src/python/example/tangential_complex_plain_homology_from_off_file_example.py b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
new file mode 100755
index 00000000..456bc9eb
--- /dev/null
+++ b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="TangentialComplex creation from " "points read in a OFF file.",
+ epilog="Example: "
+ "example/tangential_complex_plain_homology_from_off_file_example.py "
+ "-f ../data/points/tore3D_300.off -i 3"
+ "- Constructs a tangential complex with the "
+ "points from the given OFF file",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-i", "--intrisic_dim", type=int, required=True)
+parser.add_argument("-b", "--band", type=float, default=0.0)
+parser.add_argument(
+ "--no-diagram",
+ default=False,
+ action="store_true",
+ help="Flag for not to display the diagrams",
+)
+
+args = parser.parse_args()
+
+with open(args.file, "r") as f:
+ first_line = f.readline()
+ if (first_line == "OFF\n") or (first_line == "nOFF\n"):
+ print("#####################################################################")
+ print("TangentialComplex creation from points read in a OFF file")
+
+ tc = gudhi.TangentialComplex(intrisic_dim=args.intrisic_dim, off_file=args.file)
+ tc.compute_tangential_complex()
+ st = tc.create_simplex_tree()
+
+ message = "Number of simplices=" + repr(st.num_simplices())
+ print(message)
+
+ diag = st.persistence(persistence_dim_max=True)
+
+ print("betti_numbers()=")
+ print(st.betti_numbers())
+
+ if args.no_diagram == False:
+ pplot = gudhi.plot_persistence_diagram(diag, band=args.band)
+ pplot.show()
+ else:
+ print(args.file, "is not a valid OFF file")
+
+ f.close()
diff --git a/src/python/example/voronoi_graph_induced_complex.py b/src/python/example/voronoi_graph_induced_complex.py
new file mode 100755
index 00000000..38be6c92
--- /dev/null
+++ b/src/python/example/voronoi_graph_induced_complex.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "MIT"
+
+parser = argparse.ArgumentParser(
+ description="Voronoi GIC " "from points read in a OFF file.",
+ epilog="Example: "
+ "example/voronoi_graph_induced_complex.py "
+ "-f ../data/points/human.off -n 700 -v"
+ "- Constructs the Voronoi GIC with the "
+ "points from the given OFF file.",
+)
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-n", "--subsample-nb-points", type=int, default=100)
+parser.add_argument(
+ "-v",
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Flag for program verbosity",
+)
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if nerve_complex.read_point_cloud(args.file):
+ nerve_complex.set_type("GIC")
+ nerve_complex.set_color_from_coordinate()
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_cover_from_Voronoi(args.subsample_nb_points)
+ nerve_complex.find_simplices()
+ nerve_complex.plot_off()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if args.verbose:
+ print("Iterator on graph induced complex simplices")
+ result_str = (
+ "Graph induced complex is of dimension "
+ + repr(simplex_tree.dimension())
+ + " - "
+ + repr(simplex_tree.num_simplices())
+ + " simplices - "
+ + repr(simplex_tree.num_vertices())
+ + " vertices."
+ )
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/src/python/example/witness_complex_from_nearest_landmark_table.py b/src/python/example/witness_complex_from_nearest_landmark_table.py
new file mode 100755
index 00000000..c04a82b2
--- /dev/null
+++ b/src/python/example/witness_complex_from_nearest_landmark_table.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+from gudhi import StrongWitnessComplex, SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+print("#####################################################################")
+print("WitnessComplex creation from nearest landmark table")
+nearest_landmark_table = [
+ [[0, 0.0], [1, 0.1], [2, 0.2], [3, 0.3], [4, 0.4]],
+ [[1, 0.0], [2, 0.1], [3, 0.2], [4, 0.3], [0, 0.4]],
+ [[2, 0.0], [3, 0.1], [4, 0.2], [0, 0.3], [1, 0.4]],
+ [[3, 0.0], [4, 0.1], [0, 0.2], [1, 0.3], [2, 0.4]],
+ [[4, 0.0], [0, 0.1], [1, 0.2], [2, 0.3], [3, 0.4]],
+]
+
+witness_complex = StrongWitnessComplex(nearest_landmark_table=nearest_landmark_table)
+simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=0.41)
+
+message = "Number of simplices: " + repr(simplex_tree.num_simplices())
+print(message)
+
+diag = simplex_tree.persistence(min_persistence=-0.1, homology_coeff_field=11)
+print(diag)
diff --git a/src/python/gudhi/__init__.py b/src/python/gudhi/__init__.py
new file mode 100644
index 00000000..fde749eb
--- /dev/null
+++ b/src/python/gudhi/__init__.py
@@ -0,0 +1 @@
+# Fake empty __init__.py for cython to accept this directory as a Python package
diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in
new file mode 100644
index 00000000..947aa3c9
--- /dev/null
+++ b/src/python/gudhi/__init__.py.in
@@ -0,0 +1,40 @@
+from importlib import import_module
+
+"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "GUDHI Editorial Board"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "https://gudhi.inria.fr/licensing/"
+__version__ = "@GUDHI_VERSION@"
+# This variable is used by doctest to find files
+__root_source_dir__ = "@CMAKE_SOURCE_DIR@"
+__debug_info__ = @GUDHI_PYTHON_DEBUG_INFO@
+
+from sys import exc_info
+from importlib import import_module
+
+__all__ = [@GUDHI_PYTHON_MODULES@]
+
+__available_modules__ = ''
+__missing_modules__ = ''
+
+# try to import * from gudhi.__module_name__
+for __module_name__ in __all__:
+ try:
+ __module__ = import_module('gudhi.' + __module_name__)
+ try:
+ __to_import__ = __module__.__all__
+ except AttributeError:
+ __to_import__ = [name for name in __module__.__dict__ if not name.startswith('_')]
+ globals().update({name: __module__.__dict__[name] for name in __to_import__})
+ __available_modules__ += __module_name__ + ";"
+ except:
+ __missing_modules__ += __module_name__ + ";"
diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx
new file mode 100644
index 00000000..6d6309db
--- /dev/null
+++ b/src/python/gudhi/alpha_complex.pyx
@@ -0,0 +1,116 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+from libc.stdint cimport intptr_t
+import os
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Alpha_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface":
+ Alpha_complex_interface(vector[vector[double]] points)
+ # bool from_file is a workaround for cython to find the correct signature
+ Alpha_complex_interface(string off_file, bool from_file)
+ vector[double] get_point(int vertex)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+
+# AlphaComplex python interface
+cdef class AlphaComplex:
+ """AlphaComplex is a simplicial complex constructed from the finite cells
+ of a Delaunay Triangulation.
+
+ The filtration value of each simplex is computed as the square of the
+ circumradius of the simplex if the circumsphere is empty (the simplex is
+ then said to be Gabriel), and as the minimum of the filtration values of
+ the codimension 1 cofaces that make it not Gabriel otherwise.
+
+ All simplices that have a filtration value strictly greater than a given
+ alpha squared value are not inserted into the complex.
+
+ .. note::
+
+ When Alpha_complex is constructed with an infinite value of alpha, the
+ complex is a Delaunay complex.
+
+ """
+
+ cdef Alpha_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, points=None, off_file=''):
+ """AlphaComplex constructor.
+
+ :param points: A list of points in d-Dimension.
+ :type points: list of list of double
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, points=None, off_file=''):
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ self.thisptr = new Alpha_complex_interface(str.encode(off_file), True)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty Alpha construction
+ points=[]
+ self.thisptr = new Alpha_complex_interface(points)
+
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if AlphaComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int
+ :rtype: list of float
+ :returns: the point.
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
+ def create_simplex_tree(self, max_alpha_square=float('inf')):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity, and
+ there is very little point using anything else since it does
+ not save time.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr, max_alpha_square)
+ return stree
diff --git a/src/python/gudhi/bottleneck.pyx b/src/python/gudhi/bottleneck.pyx
new file mode 100644
index 00000000..4b378cbc
--- /dev/null
+++ b/src/python/gudhi/bottleneck.pyx
@@ -0,0 +1,49 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+import os
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Bottleneck_distance_interface.h" namespace "Gudhi::persistence_diagram":
+ double bottleneck(vector[pair[double, double]], vector[pair[double, double]], double)
+ double bottleneck(vector[pair[double, double]], vector[pair[double, double]])
+
+def bottleneck_distance(diagram_1, diagram_2, e=None):
+ """This function returns the point corresponding to a given vertex.
+
+ :param diagram_1: The first diagram.
+ :type diagram_1: vector[pair[double, double]]
+ :param diagram_2: The second diagram.
+ :type diagram_2: vector[pair[double, double]]
+ :param e: If `e` is 0, this uses an expensive algorithm to compute the
+ exact distance.
+ If `e` is not 0, it asks for an additive `e`-approximation, and
+ currently also allows a small multiplicative error (the last 2 or 3
+ bits of the mantissa may be wrong). This version of the algorithm takes
+ advantage of the limited precision of `double` and is usually a lot
+ faster to compute, whatever the value of `e`.
+
+ Thus, by default, `e` is the smallest positive double.
+ :type e: float
+ :rtype: float
+ :returns: the bottleneck distance.
+ """
+ if e is None:
+ # Default value is the smallest double value (not 0, 0 is for exact version)
+ return bottleneck(diagram_1, diagram_2)
+ else:
+ # Can be 0 for exact version
+ return bottleneck(diagram_1, diagram_2, e)
diff --git a/src/python/gudhi/cubical_complex.pyx b/src/python/gudhi/cubical_complex.pyx
new file mode 100644
index 00000000..0dc133d1
--- /dev/null
+++ b/src/python/gudhi/cubical_complex.pyx
@@ -0,0 +1,188 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+from numpy import array as np_array
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Bitmap_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<>":
+ Bitmap_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells)
+ Bitmap_cubical_complex_base_interface(string perseus_file)
+ int num_simplices()
+ int dimension()
+
+cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
+ cdef cppclass Cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Cubical_complex::Cubical_complex_interface<>>":
+ Cubical_complex_persistence_interface(Bitmap_cubical_complex_base_interface * st, bool persistence_dim_max)
+ vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ vector[int] betti_numbers()
+ vector[int] persistent_betti_numbers(double from_value, double to_value)
+ vector[pair[double,double]] intervals_in_dimension(int dimension)
+
+# CubicalComplex python interface
+cdef class CubicalComplex:
+ """The CubicalComplex is an example of a structured complex useful in
+ computational mathematics (specially rigorous numerics) and image
+ analysis.
+ """
+ cdef Bitmap_cubical_complex_base_interface * thisptr
+
+ cdef Cubical_complex_persistence_interface * pcohptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, dimensions=None, top_dimensional_cells=None,
+ perseus_file=''):
+ """CubicalComplex constructor from dimensions and
+ top_dimensional_cells or from a Perseus-style file name.
+
+ :param dimensions: A list of number of top dimensional cells.
+ :type dimensions: list of int
+ :param top_dimensional_cells: A list of cells filtration values.
+ :type top_dimensional_cells: list of double
+
+ Or
+
+ :param perseus_file: A Perseus-style file name.
+ :type perseus_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, dimensions=None, top_dimensional_cells=None,
+ perseus_file=''):
+ if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file is ''):
+ self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells)
+ elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file is not ''):
+ if os.path.isfile(perseus_file):
+ self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file))
+ else:
+ print("file " + perseus_file + " not found.")
+ else:
+ print("CubicalComplex can be constructed from dimensions and "
+ "top_dimensional_cells or from a Perseus-style file name.")
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+ if self.pcohptr != NULL:
+ del self.pcohptr
+
+ def __is_defined(self):
+ """Returns true if CubicalComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def __is_persistence_defined(self):
+ """Returns true if Persistence pointer is not NULL.
+ """
+ return self.pcohptr != NULL
+
+ def num_simplices(self):
+ """This function returns the number of all cubes in the complex.
+
+ :returns: int -- the number of all cubes in the complex.
+ """
+ return self.thisptr.num_simplices()
+
+ def dimension(self):
+ """This function returns the dimension of the complex.
+
+ :returns: int -- the complex dimension.
+ """
+ return self.thisptr.dimension()
+
+ def persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function returns the persistence of the complex.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: list of pairs(dimension, pair(birth, death)) -- the
+ persistence of the complex.
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ if self.thisptr != NULL:
+ self.pcohptr = new Cubical_complex_persistence_interface(self.thisptr, True)
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
+ return persistence_result
+
+ def betti_numbers(self):
+ """This function returns the Betti numbers of the complex.
+
+ :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
+
+ :note: betti_numbers function requires persistence function to be
+ launched first.
+
+ :note: betti_numbers function always returns [1, 0, 0, ...] as infinity
+ filtration cubes are not removed from the complex.
+ """
+ cdef vector[int] bn_result
+ if self.pcohptr != NULL:
+ bn_result = self.pcohptr.betti_numbers()
+ return bn_result
+
+ def persistent_betti_numbers(self, from_value, to_value):
+ """This function returns the persistent Betti numbers of the complex.
+
+ :param from_value: The persistence birth limit to be added in the
+ numbers (persistent birth <= from_value).
+ :type from_value: float.
+ :param to_value: The persistence death limit to be added in the
+ numbers (persistent death > to_value).
+ :type to_value: float.
+
+ :returns: list of int -- The persistent Betti numbers ([B0, B1, ...,
+ Bn]).
+
+ :note: persistent_betti_numbers function requires persistence
+ function to be launched first.
+ """
+ cdef vector[int] pbn_result
+ if self.pcohptr != NULL:
+ pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
+ return pbn_result
+
+ def persistence_intervals_in_dimension(self, dimension):
+ """This function returns the persistence intervals of the complex in a
+ specific dimension.
+
+ :param dimension: The specific dimension.
+ :type dimension: int.
+ :returns: The persistence intervals.
+ :rtype: numpy array of dimension 2
+
+ :note: intervals_in_dim function requires persistence function to be
+ launched first.
+ """
+ cdef vector[pair[double,double]] intervals_result
+ if self.pcohptr != NULL:
+ intervals_result = self.pcohptr.intervals_in_dimension(dimension)
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
+ return np_array(intervals_result)
diff --git a/src/python/gudhi/euclidean_strong_witness_complex.pyx b/src/python/gudhi/euclidean_strong_witness_complex.pyx
new file mode 100644
index 00000000..5d6e4fb9
--- /dev/null
+++ b/src/python/gudhi/euclidean_strong_witness_complex.pyx
@@ -0,0 +1,92 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface":
+ Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+ vector[double] get_point(unsigned vertex)
+
+# EuclideanStrongWitnessComplex python interface
+cdef class EuclideanStrongWitnessComplex:
+ """Constructs strong witness complex for given sets of witnesses and
+ landmarks in Euclidean space.
+ """
+
+ cdef Euclidean_strong_witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, landmarks=None, witnesses=None):
+ """WitnessComplex constructor.
+
+ :param landmarks: A list of landmarks (in the point cloud).
+ :type landmarks: list of list of double
+
+ :param witnesses: The point cloud.
+ :type witnesses: list of list of double
+ """
+
+ # The real cython constructor
+ def __cinit__(self, landmarks=None, witnesses=None):
+ if landmarks is not None and witnesses is not None:
+ self.thisptr = new Euclidean_strong_witness_complex_interface(landmarks, witnesses)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if WitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square, limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square)
+ return stree
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int.
+ :returns: The point.
+ :rtype: list of float
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
diff --git a/src/python/gudhi/euclidean_witness_complex.pyx b/src/python/gudhi/euclidean_witness_complex.pyx
new file mode 100644
index 00000000..2531919b
--- /dev/null
+++ b/src/python/gudhi/euclidean_witness_complex.pyx
@@ -0,0 +1,92 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface":
+ Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+ vector[double] get_point(unsigned vertex)
+
+# EuclideanWitnessComplex python interface
+cdef class EuclideanWitnessComplex:
+ """Constructs (weak) witness complex for given sets of witnesses and
+ landmarks in Euclidean space.
+ """
+
+ cdef Euclidean_witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, landmarks=None, witnesses=None):
+ """WitnessComplex constructor.
+
+ :param landmarks: A list of landmarks (in the point cloud).
+ :type landmarks: list of list of double
+
+ :param witnesses: The point cloud.
+ :type witnesses: list of list of double
+ """
+
+ # The real cython constructor
+ def __cinit__(self, landmarks=None, witnesses=None):
+ if landmarks is not None and witnesses is not None:
+ self.thisptr = new Euclidean_witness_complex_interface(landmarks, witnesses)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if WitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square, limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum alpha square threshold the
+ simplices shall not exceed. Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square)
+ return stree
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int.
+ :returns: The point.
+ :rtype: list of float
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
diff --git a/src/python/gudhi/nerve_gic.pyx b/src/python/gudhi/nerve_gic.pyx
new file mode 100644
index 00000000..2b230b8c
--- /dev/null
+++ b/src/python/gudhi/nerve_gic.pyx
@@ -0,0 +1,412 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Nerve_gic_interface.h" namespace "Gudhi":
+ cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface":
+ Nerve_gic_interface()
+ double compute_confidence_level_from_distance(double distance)
+ double compute_distance_from_confidence_level(double alpha)
+ void compute_distribution(int N)
+ double compute_p_value()
+ vector[pair[double, double]] compute_PD()
+ void find_simplices()
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree)
+ bool read_point_cloud(string off_file_name)
+ double set_automatic_resolution()
+ void set_color_from_coordinate(int k)
+ void set_color_from_file(string color_file_name)
+ void set_color_from_range(vector[double] color)
+ void set_cover_from_file(string cover_file_name)
+ void set_cover_from_function()
+ void set_cover_from_Euclidean_Voronoi(int m)
+ void set_function_from_coordinate(int k)
+ void set_function_from_file(string func_file_name)
+ void set_function_from_range(vector[double] function)
+ void set_gain(double g)
+ double set_graph_from_automatic_euclidean_rips(int N)
+ void set_graph_from_file(string graph_file_name)
+ void set_graph_from_OFF()
+ void set_graph_from_euclidean_rips(double threshold)
+ void set_mask(int nodemask)
+ void set_resolution_with_interval_length(double resolution)
+ void set_resolution_with_interval_number(int resolution)
+ void set_subsampling(double constant, double power)
+ void set_type(string type)
+ void set_verbose(bool verbose)
+ vector[int] subpopulation(int c)
+ void write_info()
+ void plot_DOT()
+ void plot_OFF()
+ void set_point_cloud_from_range(vector[vector[double]] cloud)
+ void set_distances_from_range(vector[vector[double]] distance_matrix)
+
+# CoverComplex python interface
+cdef class CoverComplex:
+ """Cover complex data structure.
+
+ The data structure is a simplicial complex, representing a Graph Induced
+ simplicial Complex (GIC) or a Nerve, and whose simplices are computed with
+ a cover C of a point cloud P, which often comes from the preimages of
+ intervals covering the image of a function f defined on P. These intervals
+ are parameterized by their resolution (either their length or their number)
+ and their gain (percentage of overlap). To compute a GIC, one also needs a
+ graph G built on top of P, whose cliques with vertices belonging to
+ different elements of C correspond to the simplices of the GIC.
+ """
+
+ cdef Nerve_gic_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self):
+ """CoverComplex constructor.
+ """
+
+ # The real cython constructor
+ def __cinit__(self):
+ self.thisptr = new Nerve_gic_interface()
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if CoverComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def set_point_cloud_from_range(self, cloud):
+ """ Reads and stores the input point cloud from a vector stored in memory.
+
+ :param cloud: Input vector containing the point cloud.
+ :type cloud: vector[vector[double]]
+ """
+ return self.thisptr.set_point_cloud_from_range(cloud)
+
+ def set_distances_from_range(self, distance_matrix):
+ """ Reads and stores the input distance matrix from a vector stored in memory.
+
+ :param distance_matrix: Input vector containing the distance matrix.
+ :type distance_matrix: vector[vector[double]]
+ """
+ return self.thisptr.set_distances_from_range(distance_matrix)
+
+ def compute_confidence_level_from_distance(self, distance):
+ """Computes the confidence level of a specific bottleneck distance
+ threshold.
+
+ :param distance: Bottleneck distance.
+ :type distance: double
+ :rtype: double
+ :returns: Confidence level.
+ """
+ return self.thisptr.compute_confidence_level_from_distance(distance)
+
+ def compute_distance_from_confidence_level(self, alpha):
+ """Computes the bottleneck distance threshold corresponding to a
+ specific confidence level.
+
+ :param alpha: Confidence level.
+ :type alpha: double
+ :rtype: double
+ :returns: Bottleneck distance.
+ """
+ return self.thisptr.compute_distance_from_confidence_level(alpha)
+
+ def compute_distribution(self, N=100):
+ """Computes bootstrapped distances distribution.
+
+ :param N: Loop number (default value is 100).
+ :type alpha: int
+ """
+ self.thisptr.compute_distribution(N)
+
+ def compute_p_value(self):
+ """Computes the p-value, i.e. the opposite of the confidence level of
+ the largest bottleneck distance preserving the points in the
+ persistence diagram of the output simplicial complex.
+
+ :rtype: double
+ :returns: p-value.
+ """
+ return self.thisptr.compute_p_value()
+
+ def compute_PD(self):
+ """Computes the extended persistence diagram of the complex.
+ """
+ return self.thisptr.compute_PD()
+
+ def create_simplex_tree(self):
+ """
+ :returns: A simplex tree created from the Cover complex.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr)
+ return stree
+
+ def find_simplices(self):
+ """Computes the simplices of the simplicial complex.
+ """
+ self.thisptr.find_simplices()
+
+ def read_point_cloud(self, off_file):
+ """Reads and stores the input point cloud from .(n)OFF file.
+
+ :param off_file: Name of the input .OFF or .nOFF file.
+ :type off_file: string
+ :rtype: bool
+ :returns: Read file status.
+ """
+ if os.path.isfile(off_file):
+ return self.thisptr.read_point_cloud(str.encode(off_file))
+ else:
+ print("file " + off_file + " not found.")
+ return False
+
+ def set_automatic_resolution(self):
+ """Computes the optimal length of intervals (i.e. the smallest interval
+ length avoiding discretization artifacts—see :cite:`Carriere17c`) for a
+ functional cover.
+
+ :rtype: double
+ :returns: reso interval length used to compute the cover.
+ """
+ return self.thisptr.set_automatic_resolution()
+
+ def set_color_from_coordinate(self, k=0):
+ """Computes the function used to color the nodes of the simplicial
+ complex from the k-th coordinate.
+
+ :param k: Coordinate to use (start at 0). Default value is 0.
+ :type k: int
+ """
+ return self.thisptr.set_color_from_coordinate(k)
+
+ def set_color_from_file(self, color_file_name):
+ """Computes the function used to color the nodes of the simplicial
+ complex from a file containing the function values.
+
+ :param color_file_name: Name of the input color file.
+ :type color_file_name: string
+ """
+ if os.path.isfile(color_file_name):
+ self.thisptr.set_color_from_file(str.encode(color_file_name))
+ else:
+ print("file " + color_file_name + " not found.")
+
+ def set_color_from_range(self, color):
+ """Computes the function used to color the nodes of the simplicial
+ complex from a vector stored in memory.
+
+ :param color: Input vector of values.
+ :type color: vector[double]
+ """
+ self.thisptr.set_color_from_range(color)
+
+ def set_cover_from_file(self, cover_file_name):
+ """Creates the cover C from a file containing the cover elements of
+ each point (the order has to be the same as in the input file!).
+
+ :param cover_file_name: Name of the input cover file.
+ :type cover_file_name: string
+ """
+ if os.path.isfile(cover_file_name):
+ self.thisptr.set_cover_from_file(str.encode(cover_file_name))
+ else:
+ print("file " + cover_file_name + " not found.")
+
+ def set_cover_from_function(self):
+ """Creates a cover C from the preimages of the function f.
+ """
+ self.thisptr.set_cover_from_function()
+
+ def set_cover_from_Voronoi(self, m=100):
+ """Creates the cover C from the Voronoï cells of a subsampling of the
+ point cloud.
+
+ :param m: Number of points in the subsample. Default value is 100.
+ :type m: int
+ """
+ self.thisptr.set_cover_from_Euclidean_Voronoi(m)
+
+ def set_function_from_coordinate(self, k):
+ """Creates the function f from the k-th coordinate of the point cloud.
+
+ :param k: Coordinate to use (start at 0).
+ :type k: int
+ """
+ self.thisptr.set_function_from_coordinate(k)
+
+ def set_function_from_file(self, func_file_name):
+ """Creates the function f from a file containing the function values.
+
+ :param func_file_name: Name of the input function file.
+ :type func_file_name: string
+ """
+ if os.path.isfile(func_file_name):
+ self.thisptr.set_function_from_file(str.encode(func_file_name))
+ else:
+ print("file " + func_file_name + " not found.")
+
+ def set_function_from_range(self, function):
+ """Creates the function f from a vector stored in memory.
+
+ :param function: Input vector of values.
+ :type function: vector[double]
+ """
+ self.thisptr.set_function_from_range(function)
+
+ def set_gain(self, g = 0.3):
+ """Sets a gain from a value stored in memory.
+
+ :param g: Gain (default value is 0.3).
+ :type g: double
+ """
+ self.thisptr.set_gain(g)
+
+ def set_graph_from_automatic_rips(self, N=100):
+ """Creates a graph G from a Rips complex whose threshold value is
+ automatically tuned with subsampling—see.
+
+ :param N: Number of subsampling iteration (the default reasonable value
+ is 100, but there is no guarantee on how to choose it).
+ :type N: int
+ :rtype: double
+ :returns: Delta threshold used for computing the Rips complex.
+ """
+ return self.thisptr.set_graph_from_automatic_euclidean_rips(N)
+
+ def set_graph_from_file(self, graph_file_name):
+ """Creates a graph G from a file containing the edges.
+
+ :param graph_file_name: Name of the input graph file. The graph file
+ contains one edge per line, each edge being represented by the IDs of
+ its two nodes.
+ :type graph_file_name: string
+ """
+ if os.path.isfile(graph_file_name):
+ self.thisptr.set_graph_from_file(str.encode(graph_file_name))
+ else:
+ print("file " + graph_file_name + " not found.")
+
+ def set_graph_from_OFF(self):
+ """Creates a graph G from the triangulation given by the input OFF
+ file.
+ """
+ self.thisptr.set_graph_from_OFF()
+
+ def set_graph_from_rips(self, threshold):
+ """Creates a graph G from a Rips complex.
+
+ :param threshold: Threshold value for the Rips complex.
+ :type threshold: double
+ """
+ self.thisptr.set_graph_from_euclidean_rips(threshold)
+
+ def set_mask(self, nodemask):
+ """Sets the mask, which is a threshold integer such that nodes in the
+ complex that contain a number of data points which is less than or
+ equal to this threshold are not displayed.
+
+ :param nodemask: Threshold.
+ :type nodemask: int
+ """
+ self.thisptr.set_mask(nodemask)
+
+ def set_resolution_with_interval_length(self, resolution):
+ """Sets a length of intervals from a value stored in memory.
+
+ :param resolution: Length of intervals.
+ :type resolution: double
+ """
+ self.thisptr.set_resolution_with_interval_length(resolution)
+
+ def set_resolution_with_interval_number(self, resolution):
+ """Sets a number of intervals from a value stored in memory.
+
+ :param resolution: Number of intervals.
+ :type resolution: int
+ """
+ self.thisptr.set_resolution_with_interval_number(resolution)
+
+ def set_subsampling(self, constant, power):
+ """Sets the constants used to subsample the data set. These constants
+ are explained in :cite:`Carriere17c`.
+
+ :param constant: Constant.
+ :type constant: double
+ :param power: Power.
+ :type resolution: double
+ """
+ self.thisptr.set_subsampling(constant, power)
+
+ def set_type(self, type):
+ """Specifies whether the type of the output simplicial complex.
+
+ :param type: either "GIC" or "Nerve".
+ :type type: string
+ """
+ self.thisptr.set_type(str.encode(type))
+
+ def set_verbose(self, verbose):
+ """Specifies whether the program should display information or not.
+
+ :param verbose: true = display info, false = do not display info.
+ :type verbose: boolean
+ """
+ self.thisptr.set_verbose(verbose)
+
+ def subpopulation(self, c):
+ """Returns the data subset corresponding to a specific node of the
+ created complex.
+
+ :param c: ID of the node.
+ :type c: int
+ :rtype: vector[int]
+ :returns: Vector of IDs of data points.
+ """
+ return self.thisptr.subpopulation(c)
+
+ def write_info(self):
+ """Creates a .txt file called SC.txt describing the 1-skeleton, which can
+ then be plotted with e.g. KeplerMapper.
+ """
+ return self.thisptr.write_info()
+
+ def plot_dot(self):
+ """Creates a .dot file called SC.dot for neato (part of the graphviz
+ package) once the simplicial complex is computed to get a visualization of
+ its 1-skeleton in a .pdf file.
+ """
+ return self.thisptr.plot_DOT()
+
+ def plot_off(self):
+ """Creates a .off file called SC.off for 3D visualization, which contains
+ the 2-skeleton of the GIC. This function assumes that the cover has been
+ computed with Voronoi. If data points are in 1D or 2D, the remaining
+ coordinates of the points embedded in 3D are set to 0.
+ """
+ return self.thisptr.plot_OFF()
diff --git a/src/python/gudhi/off_reader.pyx b/src/python/gudhi/off_reader.pyx
new file mode 100644
index 00000000..9efd97ff
--- /dev/null
+++ b/src/python/gudhi/off_reader.pyx
@@ -0,0 +1,38 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+import os
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Off_reader_interface.h" namespace "Gudhi":
+ vector[vector[double]] read_points_from_OFF_file(string off_file)
+
+def read_off(off_file=''):
+ """Read points from OFF file.
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :returns: The point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ return read_points_from_OFF_file(str.encode(off_file))
+ else:
+ print("file " + off_file + " not found.")
+ return []
+
diff --git a/src/python/gudhi/periodic_cubical_complex.pyx b/src/python/gudhi/periodic_cubical_complex.pyx
new file mode 100644
index 00000000..724fadd4
--- /dev/null
+++ b/src/python/gudhi/periodic_cubical_complex.pyx
@@ -0,0 +1,190 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+from numpy import array as np_array
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Cubical_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Periodic_cubical_complex_base_interface "Gudhi::Cubical_complex::Cubical_complex_interface<Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>>":
+ Periodic_cubical_complex_base_interface(vector[unsigned] dimensions, vector[double] top_dimensional_cells, vector[bool] periodic_dimensions)
+ Periodic_cubical_complex_base_interface(string perseus_file)
+ int num_simplices()
+ int dimension()
+
+cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
+ cdef cppclass Periodic_cubical_complex_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Cubical_complex::Cubical_complex_interface<Gudhi::cubical_complex::Bitmap_cubical_complex_periodic_boundary_conditions_base<double>>>":
+ Periodic_cubical_complex_persistence_interface(Periodic_cubical_complex_base_interface * st, bool persistence_dim_max)
+ vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ vector[int] betti_numbers()
+ vector[int] persistent_betti_numbers(double from_value, double to_value)
+ vector[pair[double,double]] intervals_in_dimension(int dimension)
+
+# PeriodicCubicalComplex python interface
+cdef class PeriodicCubicalComplex:
+ """The PeriodicCubicalComplex is an example of a structured complex useful
+ in computational mathematics (specially rigorous numerics) and image
+ analysis.
+ """
+ cdef Periodic_cubical_complex_base_interface * thisptr
+
+ cdef Periodic_cubical_complex_persistence_interface * pcohptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, dimensions=None, top_dimensional_cells=None,
+ periodic_dimensions=None, perseus_file=''):
+ """PeriodicCubicalComplex constructor from dimensions and
+ top_dimensional_cells or from a Perseus-style file name.
+
+ :param dimensions: A list of number of top dimensional cells.
+ :type dimensions: list of int
+ :param top_dimensional_cells: A list of cells filtration values.
+ :type top_dimensional_cells: list of double
+ :param periodic_dimensions: A list of top dimensional cells periodicity value.
+ :type periodic_dimensions: list of boolean
+
+ Or
+
+ :param perseus_file: A Perseus-style file name.
+ :type perseus_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, dimensions=None, top_dimensional_cells=None,
+ periodic_dimensions=None, perseus_file=''):
+ if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file is ''):
+ self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions)
+ elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file is not ''):
+ if os.path.isfile(perseus_file):
+ self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file))
+ else:
+ print("file " + perseus_file + " not found.")
+ else:
+ print("CubicalComplex can be constructed from dimensions and "
+ "top_dimensional_cells or from a Perseus-style file name.")
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+ if self.pcohptr != NULL:
+ del self.pcohptr
+
+ def __is_defined(self):
+ """Returns true if PeriodicCubicalComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def __is_persistence_defined(self):
+ """Returns true if Persistence pointer is not NULL.
+ """
+ return self.pcohptr != NULL
+
+ def num_simplices(self):
+ """This function returns the number of all cubes in the complex.
+
+ :returns: int -- the number of all cubes in the complex.
+ """
+ return self.thisptr.num_simplices()
+
+ def dimension(self):
+ """This function returns the dimension of the complex.
+
+ :returns: int -- the complex dimension.
+ """
+ return self.thisptr.dimension()
+
+ def persistence(self, homology_coeff_field=11, min_persistence=0):
+ """This function returns the persistence of the complex.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :returns: list of pairs(dimension, pair(birth, death)) -- the
+ persistence of the complex.
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ if self.thisptr != NULL:
+ self.pcohptr = new Periodic_cubical_complex_persistence_interface(self.thisptr, True)
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
+ return persistence_result
+
+ def betti_numbers(self):
+ """This function returns the Betti numbers of the complex.
+
+ :returns: list of int -- The Betti numbers ([B0, B1, ..., Bn]).
+
+ :note: betti_numbers function requires persistence function to be
+ launched first.
+
+ :note: betti_numbers function always returns [1, 0, 0, ...] as infinity
+ filtration cubes are not removed from the complex.
+ """
+ cdef vector[int] bn_result
+ if self.pcohptr != NULL:
+ bn_result = self.pcohptr.betti_numbers()
+ return bn_result
+
+ def persistent_betti_numbers(self, from_value, to_value):
+ """This function returns the persistent Betti numbers of the complex.
+
+ :param from_value: The persistence birth limit to be added in the
+ numbers (persistent birth <= from_value).
+ :type from_value: float.
+ :param to_value: The persistence death limit to be added in the
+ numbers (persistent death > to_value).
+ :type to_value: float.
+
+ :returns: list of int -- The persistent Betti numbers ([B0, B1, ...,
+ Bn]).
+
+ :note: persistent_betti_numbers function requires persistence
+ function to be launched first.
+ """
+ cdef vector[int] pbn_result
+ if self.pcohptr != NULL:
+ pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
+ return pbn_result
+
+ def persistence_intervals_in_dimension(self, dimension):
+ """This function returns the persistence intervals of the complex in a
+ specific dimension.
+
+ :param dimension: The specific dimension.
+ :type dimension: int.
+ :returns: The persistence intervals.
+ :rtype: numpy array of dimension 2
+
+ :note: intervals_in_dim function requires persistence function to be
+ launched first.
+ """
+ cdef vector[pair[double,double]] intervals_result
+ if self.pcohptr != NULL:
+ intervals_result = self.pcohptr.intervals_in_dimension(dimension)
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
+ return np_array(intervals_result)
diff --git a/src/python/gudhi/persistence_graphical_tools.py b/src/python/gudhi/persistence_graphical_tools.py
new file mode 100644
index 00000000..181bc8ea
--- /dev/null
+++ b/src/python/gudhi/persistence_graphical_tools.py
@@ -0,0 +1,423 @@
+from os import path
+from math import isfinite
+import numpy as np
+
+from gudhi.reader_utils import read_persistence_intervals_in_dimension
+from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau, Bertrand Michel
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau, Bertrand Michel"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def __min_birth_max_death(persistence, band=0.0):
+ """This function returns (min_birth, max_death) from the persistence.
+
+ :param persistence: The persistence to plot.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param band: band
+ :type band: float.
+ :returns: (float, float) -- (min_birth, max_death).
+ """
+ # Look for minimum birth date and maximum death date for plot optimisation
+ max_death = 0
+ min_birth = persistence[0][1][0]
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float("inf"):
+ if float(interval[1][1]) > max_death:
+ max_death = float(interval[1][1])
+ if float(interval[1][0]) > max_death:
+ max_death = float(interval[1][0])
+ if float(interval[1][0]) < min_birth:
+ min_birth = float(interval[1][0])
+ if band > 0.0:
+ max_death += band
+ return (min_birth, max_death)
+
+
+"""
+Only 13 colors for the palette
+"""
+palette = [
+ "#ff0000",
+ "#00ff00",
+ "#0000ff",
+ "#00ffff",
+ "#ff00ff",
+ "#ffff00",
+ "#000000",
+ "#880000",
+ "#008800",
+ "#000088",
+ "#888800",
+ "#880088",
+ "#008888",
+]
+
+
+def plot_persistence_barcode(
+ persistence=[],
+ persistence_file="",
+ alpha=0.6,
+ max_intervals=1000,
+ max_barcodes=1000,
+ inf_delta=0.1,
+ legend=False,
+):
+ """This function plots the persistence bar code from persistence values list
+ or from a :doc:`persistence file <fileformats>`.
+
+ :param persistence: Persistence intervals values list grouped by dimension.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>` style name
+ (reset persistence if both are set).
+ :type persistence_file: string
+ :param alpha: barcode transparency value (0.0 transparent through 1.0
+ opaque - default is 0.6).
+ :type alpha: float.
+ :param max_intervals: maximal number of intervals to display.
+ Selected intervals are those with the longest life time. Set it
+ to 0 to see all. Default value is 1000.
+ :type max_intervals: int.
+ :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x
+ inf_delta)` above :code:`max_death` value. A reasonable value is
+ between 0.05 and 0.5 - default is 0.1.
+ :type inf_delta: float.
+ :param legend: Display the dimension color legend (default is False).
+ :type legend: boolean.
+ :returns: A matplotlib object containing horizontal bar plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ try:
+ import matplotlib.pyplot as plt
+ import matplotlib.patches as mpatches
+
+ if persistence_file is not "":
+ if path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(
+ persistence_file=persistence_file
+ )
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_barcodes is not 1000:
+ print("Deprecated parameter. It has been replaced by max_intervals")
+ max_intervals = max_barcodes
+
+ if max_intervals > 0 and max_intervals < len(persistence):
+ # Sort by life time, then takes only the max_intervals elements
+ persistence = sorted(
+ persistence,
+ key=lambda life_time: life_time[1][1] - life_time[1][0],
+ reverse=True,
+ )[:max_intervals]
+
+ persistence = sorted(persistence, key=lambda birth: birth[1][0])
+
+ (min_birth, max_death) = __min_birth_max_death(persistence)
+ ind = 0
+ delta = (max_death - min_birth) * inf_delta
+ # Replace infinity values with max_death + delta for bar code to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+ # Draw horizontal bars in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float("inf"):
+ # Finite death case
+ plt.barh(
+ ind,
+ (interval[1][1] - interval[1][0]),
+ height=0.8,
+ left=interval[1][0],
+ alpha=alpha,
+ color=palette[interval[0]],
+ linewidth=0,
+ )
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.barh(
+ ind,
+ (infinity - interval[1][0]),
+ height=0.8,
+ left=interval[1][0],
+ alpha=alpha,
+ color=palette[interval[0]],
+ linewidth=0,
+ )
+ ind = ind + 1
+
+ if legend:
+ dimensions = list(set(item[0] for item in persistence))
+ plt.legend(
+ handles=[
+ mpatches.Patch(color=palette[dim], label=str(dim))
+ for dim in dimensions
+ ],
+ loc="lower right",
+ )
+ plt.title("Persistence barcode")
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, 0, ind])
+ return plt
+
+ except ImportError:
+ print("This function is not available, you may be missing matplotlib.")
+
+
+def plot_persistence_diagram(
+ persistence=[],
+ persistence_file="",
+ alpha=0.6,
+ band=0.0,
+ max_intervals=1000,
+ max_plots=1000,
+ inf_delta=0.1,
+ legend=False,
+):
+ """This function plots the persistence diagram from persistence values
+ list or from a :doc:`persistence file <fileformats>`.
+
+ :param persistence: Persistence intervals values list grouped by dimension.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>` style name
+ (reset persistence if both are set).
+ :type persistence_file: string
+ :param alpha: plot transparency value (0.0 transparent through 1.0
+ opaque - default is 0.6).
+ :type alpha: float.
+ :param band: band (not displayed if :math:`\leq` 0. - default is 0.)
+ :type band: float.
+ :param max_intervals: maximal number of intervals to display.
+ Selected intervals are those with the longest life time. Set it
+ to 0 to see all. Default value is 1000.
+ :type max_intervals: int.
+ :param inf_delta: Infinity is placed at :code:`((max_death - min_birth) x
+ inf_delta)` above :code:`max_death` value. A reasonable value is
+ between 0.05 and 0.5 - default is 0.1.
+ :type inf_delta: float.
+ :param legend: Display the dimension color legend (default is False).
+ :type legend: boolean.
+ :returns: A matplotlib object containing diagram plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ try:
+ import matplotlib.pyplot as plt
+ import matplotlib.patches as mpatches
+
+ if persistence_file is not "":
+ if path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(
+ persistence_file=persistence_file
+ )
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_plots is not 1000:
+ print("Deprecated parameter. It has been replaced by max_intervals")
+ max_intervals = max_plots
+
+ if max_intervals > 0 and max_intervals < len(persistence):
+ # Sort by life time, then takes only the max_intervals elements
+ persistence = sorted(
+ persistence,
+ key=lambda life_time: life_time[1][1] - life_time[1][0],
+ reverse=True,
+ )[:max_intervals]
+
+ (min_birth, max_death) = __min_birth_max_death(persistence, band)
+ delta = (max_death - min_birth) * inf_delta
+ # Replace infinity values with max_death + delta for diagram to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+
+ # line display of equation : birth = death
+ x = np.linspace(axis_start, infinity, 1000)
+ # infinity line and text
+ plt.plot(x, x, color="k", linewidth=1.0)
+ plt.plot(x, [infinity] * len(x), linewidth=1.0, color="k", alpha=alpha)
+ plt.text(axis_start, infinity, r"$\infty$", color="k", alpha=alpha)
+ # bootstrap band
+ if band > 0.0:
+ plt.fill_between(x, x, x + band, alpha=alpha, facecolor="red")
+
+ # Draw points in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float("inf"):
+ # Finite death case
+ plt.scatter(
+ interval[1][0],
+ interval[1][1],
+ alpha=alpha,
+ color=palette[interval[0]],
+ )
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.scatter(
+ interval[1][0], infinity, alpha=alpha, color=palette[interval[0]]
+ )
+
+ if legend:
+ dimensions = list(set(item[0] for item in persistence))
+ plt.legend(
+ handles=[
+ mpatches.Patch(color=palette[dim], label=str(dim))
+ for dim in dimensions
+ ]
+ )
+
+ plt.title("Persistence diagram")
+ plt.xlabel("Birth")
+ plt.ylabel("Death")
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, axis_start, infinity + delta])
+ return plt
+
+ except ImportError:
+ print("This function is not available, you may be missing matplotlib.")
+
+
+def plot_persistence_density(
+ persistence=[],
+ persistence_file="",
+ nbins=300,
+ bw_method=None,
+ max_intervals=1000,
+ dimension=None,
+ cmap=None,
+ legend=False,
+):
+ """This function plots the persistence density from persistence
+ values list or from a :doc:`persistence file <fileformats>`. Be
+ aware that this function does not distinguish the dimension, it is
+ up to you to select the required one. This function also does not handle
+ degenerate data set (scipy correlation matrix inversion can fail).
+
+ :param persistence: Persistence intervals values list grouped by dimension.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>`
+ style name (reset persistence if both are set).
+ :type persistence_file: string
+ :param nbins: Evaluate a gaussian kde on a regular grid of nbins x
+ nbins over data extents (default is 300)
+ :type nbins: int.
+ :param bw_method: The method used to calculate the estimator
+ bandwidth. This can be 'scott', 'silverman', a scalar constant
+ or a callable. If a scalar, this will be used directly as
+ kde.factor. If a callable, it should take a gaussian_kde
+ instance as only parameter and return a scalar. If None
+ (default), 'scott' is used. See
+ `scipy.stats.gaussian_kde documentation
+ <http://scipy.github.io/devdocs/generated/scipy.stats.gaussian_kde.html>`_
+ for more details.
+ :type bw_method: str, scalar or callable, optional.
+ :param max_intervals: maximal number of points used in the density
+ estimation.
+ Selected intervals are those with the longest life time. Set it
+ to 0 to see all. Default value is 1000.
+ :type max_intervals: int.
+ :param dimension: the dimension to be selected in the intervals
+ (default is None to mix all dimensions).
+ :type dimension: int.
+ :param cmap: A matplotlib colormap (default is
+ matplotlib.pyplot.cm.hot_r).
+ :type cmap: cf. matplotlib colormap.
+ :param legend: Display the color bar values (default is False).
+ :type legend: boolean.
+ :returns: A matplotlib object containing diagram plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ try:
+ import matplotlib.pyplot as plt
+ from scipy.stats import kde
+
+ if persistence_file is not "":
+ if dimension is None:
+ # All dimension case
+ dimension = -1
+ if path.isfile(persistence_file):
+ persistence_dim = read_persistence_intervals_in_dimension(
+ persistence_file=persistence_file, only_this_dim=dimension
+ )
+ print(persistence_dim)
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if len(persistence) > 0:
+ persistence_dim = np.array(
+ [
+ (dim_interval[1][0], dim_interval[1][1])
+ for dim_interval in persistence
+ if (dim_interval[0] == dimension) or (dimension is None)
+ ]
+ )
+
+ persistence_dim = persistence_dim[np.isfinite(persistence_dim[:, 1])]
+ if max_intervals > 0 and max_intervals < len(persistence_dim):
+ # Sort by life time, then takes only the max_intervals elements
+ persistence_dim = np.array(
+ sorted(
+ persistence_dim,
+ key=lambda life_time: life_time[1] - life_time[0],
+ reverse=True,
+ )[:max_intervals]
+ )
+
+ # Set as numpy array birth and death (remove undefined values - inf and NaN)
+ birth = persistence_dim[:, 0]
+ death = persistence_dim[:, 1]
+
+ # line display of equation : birth = death
+ x = np.linspace(death.min(), birth.max(), 1000)
+ plt.plot(x, x, color="k", linewidth=1.0)
+
+ # Evaluate a gaussian kde on a regular grid of nbins x nbins over data extents
+ k = kde.gaussian_kde([birth, death], bw_method=bw_method)
+ xi, yi = np.mgrid[
+ birth.min() : birth.max() : nbins * 1j,
+ death.min() : death.max() : nbins * 1j,
+ ]
+ zi = k(np.vstack([xi.flatten(), yi.flatten()]))
+
+ # default cmap value cannot be done at argument definition level as matplotlib is not yet defined.
+ if cmap is None:
+ cmap = plt.cm.hot_r
+ # Make the plot
+ plt.pcolormesh(xi, yi, zi.reshape(xi.shape), cmap=cmap)
+
+ if legend:
+ plt.colorbar()
+
+ plt.title("Persistence density")
+ plt.xlabel("Birth")
+ plt.ylabel("Death")
+ return plt
+
+ except ImportError:
+ print(
+ "This function is not available, you may be missing matplotlib and/or scipy."
+ )
diff --git a/src/python/gudhi/reader_utils.pyx b/src/python/gudhi/reader_utils.pyx
new file mode 100644
index 00000000..147fae71
--- /dev/null
+++ b/src/python/gudhi/reader_utils.pyx
@@ -0,0 +1,87 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp.map cimport map
+from libcpp.pair cimport pair
+
+from os import path
+from numpy import array as np_array
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 Inria"
+__license__ = "MIT"
+
+cdef extern from "Reader_utils_interface.h" namespace "Gudhi":
+ vector[vector[double]] read_matrix_from_csv_file(string off_file, char separator)
+ map[int, vector[pair[double, double]]] read_pers_intervals_grouped_by_dimension(string filename)
+ vector[pair[double, double]] read_pers_intervals_in_dimension(string filename, int only_this_dim)
+
+def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'):
+ """Read lower triangular matrix from a CSV style file.
+
+ :param csv_file: A CSV file style name.
+ :type csv_file: string
+ :param separator: The value separator in the CSV file. Default value is ';'
+ :type separator: char
+
+ :returns: The lower triangular matrix.
+ :rtype: vector[vector[double]]
+ """
+ if csv_file is not '':
+ if path.isfile(csv_file):
+ return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0]))
+ print("file " + csv_file + " not set or not found.")
+ return []
+
+def read_persistence_intervals_grouped_by_dimension(persistence_file=''):
+ """Reads a file containing persistence intervals.
+ Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
+ The return value is an `map[dim, vector[pair[birth, death]]]`
+ where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+ Note: the function does not check that birth <= death.
+
+ :param persistence_file: A persistence file style name.
+ :type persistence_file: string
+
+ :returns: The persistence pairs grouped by dimension.
+ :rtype: map[int, vector[pair[double, double]]]
+ """
+ if persistence_file is not '':
+ if path.isfile(persistence_file):
+ return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file))
+ print("file " + persistence_file + " not set or not found.")
+ return []
+
+def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-1):
+ """Reads a file containing persistence intervals.
+ Each line of persistence_file might contain 2, 3 or 4 values:
+ [[field] dimension] birth death
+ Note: the function does not check that birth <= death.
+
+ :param persistence_file: A persistence file style name.
+ :type persistence_file: string
+ :param only_this_dim: The specific dimension. Default value is -1.
+ If `only_this_dim` = -1, dimension is ignored and all lines are returned.
+ If `only_this_dim` is >= 0, only the lines where dimension =
+ `only_this_dim` (or where dimension is not specified) are returned.
+ :type only_this_dim: int.
+
+ :returns: The persistence intervals.
+ :rtype: numpy array of dimension 2
+ """
+ if persistence_file is not '':
+ if path.isfile(persistence_file):
+ return np_array(read_pers_intervals_in_dimension(str.encode(
+ persistence_file), only_this_dim))
+ print("file " + persistence_file + " not set or not found.")
+ return []
diff --git a/src/python/gudhi/rips_complex.pyx b/src/python/gudhi/rips_complex.pyx
new file mode 100644
index 00000000..f2cd6a8d
--- /dev/null
+++ b/src/python/gudhi/rips_complex.pyx
@@ -0,0 +1,103 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Rips_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Rips_complex_interface "Gudhi::rips_complex::Rips_complex_interface":
+ Rips_complex_interface()
+ void init_points(vector[vector[double]] values, double threshold)
+ void init_matrix(vector[vector[double]] values, double threshold)
+ void init_points_sparse(vector[vector[double]] values, double threshold, double sparse)
+ void init_matrix_sparse(vector[vector[double]] values, double threshold, double sparse)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max)
+
+# RipsComplex python interface
+cdef class RipsComplex:
+ """The data structure is a one skeleton graph, or Rips graph, containing
+ edges when the edge length is less or equal to a given threshold. Edge
+ length is computed from a user given point cloud with a given distance
+ function, or a distance matrix.
+ """
+
+ cdef Rips_complex_interface thisref
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, points=None, distance_matrix=None,
+ max_edge_length=float('inf'), sparse=None):
+ """RipsComplex constructor.
+
+ :param max_edge_length: Rips value.
+ :type max_edge_length: float
+
+ :param points: A list of points in d-Dimension.
+ :type points: list of list of double
+
+ Or
+
+ :param distance_matrix: A distance matrix (full square or lower
+ triangular).
+ :type points: list of list of double
+
+ And in both cases
+
+ :param sparse: If this is not None, it switches to building a sparse
+ Rips and represents the approximation parameter epsilon.
+ :type sparse: float
+ """
+
+ # The real cython constructor
+ def __cinit__(self, points=None, distance_matrix=None,
+ max_edge_length=float('inf'), sparse=None):
+ if sparse is not None:
+ if distance_matrix is not None:
+ self.thisref.init_matrix_sparse(distance_matrix,
+ max_edge_length,
+ sparse)
+ else:
+ if points is None:
+ # Empty Rips construction
+ points=[]
+ self.thisref.init_points_sparse(points, max_edge_length, sparse)
+ else:
+ if distance_matrix is not None:
+ self.thisref.init_matrix(distance_matrix, max_edge_length)
+ else:
+ if points is None:
+ # Empty Rips construction
+ points=[]
+ self.thisref.init_points(points, max_edge_length)
+
+
+ def create_simplex_tree(self, max_dimension=1):
+ """
+ :param max_dimension: graph expansion for rips until this given maximal
+ dimension.
+ :type max_dimension: int
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ self.thisref.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_dimension)
+ return stree
diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd
new file mode 100644
index 00000000..5f86cfe2
--- /dev/null
+++ b/src/python/gudhi/simplex_tree.pxd
@@ -0,0 +1,56 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp cimport bool
+from libcpp.string cimport string
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
+ cdef cppclass Simplex_tree_options_full_featured:
+ pass
+
+ cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface<Gudhi::Simplex_tree_options_full_featured>":
+ Simplex_tree()
+ double simplex_filtration(vector[int] simplex)
+ void assign_simplex_filtration(vector[int] simplex, double filtration)
+ void initialize_filtration()
+ int num_vertices()
+ int num_simplices()
+ void set_dimension(int dimension)
+ int dimension()
+ int upper_bound_dimension()
+ bool find_simplex(vector[int] simplex)
+ bool insert_simplex_and_subfaces(vector[int] simplex,
+ double filtration)
+ vector[pair[vector[int], double]] get_filtration()
+ vector[pair[vector[int], double]] get_skeleton(int dimension)
+ vector[pair[vector[int], double]] get_star(vector[int] simplex)
+ vector[pair[vector[int], double]] get_cofaces(vector[int] simplex,
+ int dimension)
+ void expansion(int max_dim)
+ void remove_maximal_simplex(vector[int] simplex)
+ bool prune_above_filtration(double filtration)
+ bool make_filtration_non_decreasing()
+
+cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
+ cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>":
+ Simplex_tree_persistence_interface(Simplex_tree_interface_full_featured * st, bool persistence_dim_max)
+ vector[pair[int, pair[double, double]]] get_persistence(int homology_coeff_field, double min_persistence)
+ vector[int] betti_numbers()
+ vector[int] persistent_betti_numbers(double from_value, double to_value)
+ vector[pair[double,double]] intervals_in_dimension(int dimension)
+ void write_output_diagram(string diagram_file_name)
+ vector[pair[vector[int], vector[int]]] persistence_pairs()
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
new file mode 100644
index 00000000..9f490271
--- /dev/null
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -0,0 +1,508 @@
+from libc.stdint cimport intptr_t
+from numpy import array as np_array
+cimport simplex_tree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+# SimplexTree python interface
+cdef class SimplexTree:
+ """The simplex tree is an efficient and flexible data structure for
+ representing general (filtered) simplicial complexes. The data structure
+ is described in Jean-Daniel Boissonnat and Clément Maria. The Simplex
+ Tree: An Efficient Data Structure for General Simplicial Complexes.
+ Algorithmica, pages 1–22, 2014.
+
+ This class is a filtered, with keys, and non contiguous vertices version
+ of the simplex tree.
+ """
+ # unfortunately 'cdef public Simplex_tree_interface_full_featured* thisptr' is not possible
+ # Use intptr_t instead to cast the pointer
+ cdef public intptr_t thisptr
+
+ # Get the pointer casted as it should be
+ cdef Simplex_tree_interface_full_featured* get_ptr(self):
+ return <Simplex_tree_interface_full_featured*>(self.thisptr)
+
+ cdef Simplex_tree_persistence_interface * pcohptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self):
+ """SimplexTree constructor.
+ """
+
+ # The real cython constructor
+ def __cinit__(self):
+ self.thisptr = <intptr_t>(new Simplex_tree_interface_full_featured())
+
+ def __dealloc__(self):
+ cdef Simplex_tree_interface_full_featured* ptr = self.get_ptr()
+ if ptr != NULL:
+ del ptr
+ if self.pcohptr != NULL:
+ del self.pcohptr
+
+ def __is_defined(self):
+ """Returns true if SimplexTree pointer is not NULL.
+ """
+ return self.get_ptr() != NULL
+
+ def __is_persistence_defined(self):
+ """Returns true if Persistence pointer is not NULL.
+ """
+ return self.pcohptr != NULL
+
+ def filtration(self, simplex):
+ """This function returns the filtration value for a given N-simplex in
+ this simplicial complex, or +infinity if it is not in the complex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: The simplicial complex filtration value.
+ :rtype: float
+ """
+ return self.get_ptr().simplex_filtration(simplex)
+
+ def assign_filtration(self, simplex, filtration):
+ """This function assigns the simplicial complex filtration value for a
+ given N-simplex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :param filtration: The simplicial complex filtration value.
+ :type filtration: float
+ """
+ self.get_ptr().assign_simplex_filtration(simplex, filtration)
+
+ def initialize_filtration(self):
+ """This function initializes and sorts the simplicial complex
+ filtration vector.
+
+ .. note::
+
+ This function must be launched before
+ :func:`persistence()<gudhi.SimplexTree.persistence>`,
+ :func:`betti_numbers()<gudhi.SimplexTree.betti_numbers>`,
+ :func:`persistent_betti_numbers()<gudhi.SimplexTree.persistent_betti_numbers>`,
+ or :func:`get_filtration()<gudhi.SimplexTree.get_filtration>`
+ after :func:`inserting<gudhi.SimplexTree.insert>` or
+ :func:`removing<gudhi.SimplexTree.remove_maximal_simplex>`
+ simplices.
+ """
+ self.get_ptr().initialize_filtration()
+
+ def num_vertices(self):
+ """This function returns the number of vertices of the simplicial
+ complex.
+
+ :returns: The simplicial complex number of vertices.
+ :rtype: int
+ """
+ return self.get_ptr().num_vertices()
+
+ def num_simplices(self):
+ """This function returns the number of simplices of the simplicial
+ complex.
+
+ :returns: the simplicial complex number of simplices.
+ :rtype: int
+ """
+ return self.get_ptr().num_simplices()
+
+ def dimension(self):
+ """This function returns the dimension of the simplicial complex.
+
+ :returns: the simplicial complex dimension.
+ :rtype: int
+
+ .. note::
+
+ This function is not constant time because it can recompute
+ dimension if required (can be triggered by
+ :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ or
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ methods).
+ """
+ return self.get_ptr().dimension()
+
+ def upper_bound_dimension(self):
+ """This function returns a valid dimension upper bound of the
+ simplicial complex.
+
+ :returns: an upper bound on the dimension of the simplicial complex.
+ :rtype: int
+ """
+ return self.get_ptr().upper_bound_dimension()
+
+ def set_dimension(self, dimension):
+ """This function sets the dimension of the simplicial complex.
+
+ :param dimension: The new dimension value.
+ :type dimension: int.
+
+ .. note::
+
+ This function must be used with caution because it disables
+ dimension recomputation when required
+ (this recomputation can be triggered by
+ :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ or
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ ).
+ """
+ self.get_ptr().set_dimension(<int>dimension)
+
+ def find(self, simplex):
+ """This function returns if the N-simplex was found in the simplicial
+ complex or not.
+
+ :param simplex: The N-simplex to find, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: true if the simplex was found, false otherwise.
+ :rtype: bool
+ """
+ cdef vector[int] csimplex
+ for i in simplex:
+ csimplex.push_back(i)
+ return self.get_ptr().find_simplex(csimplex)
+
+ def insert(self, simplex, filtration=0.0):
+ """This function inserts the given N-simplex and its subfaces with the
+ given filtration value (default value is '0.0'). If some of those
+ simplices are already present with a higher filtration value, their
+ filtration value is lowered.
+
+ :param simplex: The N-simplex to insert, represented by a list of
+ vertex.
+ :type simplex: list of int.
+ :param filtration: The filtration value of the simplex.
+ :type filtration: float.
+ :returns: true if the simplex was not yet in the complex, false
+ otherwise (whatever its original filtration value).
+ :rtype: bool
+ """
+ cdef vector[int] csimplex
+ for i in simplex:
+ csimplex.push_back(i)
+ return self.get_ptr().insert_simplex_and_subfaces(csimplex,
+ <double>filtration)
+
+ def get_filtration(self):
+ """This function returns a list of all simplices with their given
+ filtration values.
+
+ :returns: The simplices sorted by increasing filtration values.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[pair[vector[int], double]] filtration \
+ = self.get_ptr().get_filtration()
+ ct = []
+ for filtered_complex in filtration:
+ v = []
+ for vertex in filtered_complex.first:
+ v.append(vertex)
+ ct.append((v, filtered_complex.second))
+ return ct
+
+ def get_skeleton(self, dimension):
+ """This function returns the (simplices of the) skeleton of a maximum
+ given dimension.
+
+ :param dimension: The skeleton dimension value.
+ :type dimension: int.
+ :returns: The (simplices of the) skeleton of a maximum dimension.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[pair[vector[int], double]] skeleton \
+ = self.get_ptr().get_skeleton(<int>dimension)
+ ct = []
+ for filtered_simplex in skeleton:
+ v = []
+ for vertex in filtered_simplex.first:
+ v.append(vertex)
+ ct.append((v, filtered_simplex.second))
+ return ct
+
+ def get_star(self, simplex):
+ """This function returns the star of a given N-simplex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: The (simplices of the) star of a simplex.
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[int] csimplex
+ for i in simplex:
+ csimplex.push_back(i)
+ cdef vector[pair[vector[int], double]] star \
+ = self.get_ptr().get_star(csimplex)
+ ct = []
+ for filtered_simplex in star:
+ v = []
+ for vertex in filtered_simplex.first:
+ v.append(vertex)
+ ct.append((v, filtered_simplex.second))
+ return ct
+
+ def get_cofaces(self, simplex, codimension):
+ """This function returns the cofaces of a given N-simplex with a
+ given codimension.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :param codimension: The codimension. If codimension = 0, all cofaces
+ are returned (equivalent of get_star function)
+ :type codimension: int.
+ :returns: The (simplices of the) cofaces of a simplex
+ :rtype: list of tuples(simplex, filtration)
+ """
+ cdef vector[int] csimplex
+ for i in simplex:
+ csimplex.push_back(i)
+ cdef vector[pair[vector[int], double]] cofaces \
+ = self.get_ptr().get_cofaces(csimplex, <int>codimension)
+ ct = []
+ for filtered_simplex in cofaces:
+ v = []
+ for vertex in filtered_simplex.first:
+ v.append(vertex)
+ ct.append((v, filtered_simplex.second))
+ return ct
+
+ def remove_maximal_simplex(self, simplex):
+ """This function removes a given maximal N-simplex from the simplicial
+ complex.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+
+ .. note::
+
+ Be aware that removing is shifting data in a flat_map
+ (:func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>` to be done).
+
+ .. note::
+
+ The dimension of the simplicial complex may be lower after calling
+ remove_maximal_simplex than it was before. However,
+ :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ method will return the old value, which
+ remains a valid upper bound. If you care, you can call
+ :func:`dimension()<gudhi.SimplexTree.dimension>`
+ to recompute the exact dimension.
+ """
+ self.get_ptr().remove_maximal_simplex(simplex)
+
+ def prune_above_filtration(self, filtration):
+ """Prune above filtration value given as parameter.
+
+ :param filtration: Maximum threshold value.
+ :type filtration: float.
+ :returns: The filtration modification information.
+ :rtype: bool
+
+
+ .. note::
+
+ Some simplex tree functions require the filtration to be valid.
+ prune_above_filtration function is not launching
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ but returns the filtration modification
+ information. If the complex has changed , please call
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ to recompute it.
+
+ .. note::
+
+ Note that the dimension of the simplicial complex may be lower
+ after calling
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ than it was before. However,
+ :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ will return the old value, which remains a
+ valid upper bound. If you care, you can call
+ :func:`dimension()<gudhi.SimplexTree.dimension>`
+ method to recompute the exact dimension.
+ """
+ return self.get_ptr().prune_above_filtration(filtration)
+
+ def expansion(self, max_dim):
+ """Expands the Simplex_tree containing only its one skeleton
+ until dimension max_dim.
+
+ The expanded simplicial complex until dimension :math:`d`
+ attached to a graph :math:`G` is the maximal simplicial complex of
+ dimension at most :math:`d` admitting the graph :math:`G` as
+ :math:`1`-skeleton.
+ The filtration value assigned to a simplex is the maximal filtration
+ value of one of its edges.
+
+ The Simplex_tree must contain no simplex of dimension bigger than
+ 1 when calling the method.
+
+ :param max_dim: The maximal dimension.
+ :type max_dim: int.
+ """
+ self.get_ptr().expansion(max_dim)
+
+ def make_filtration_non_decreasing(self):
+ """This function ensures that each simplex has a higher filtration
+ value than its faces by increasing the filtration values.
+
+ :returns: True if any filtration value was modified,
+ False if the filtration was already non-decreasing.
+ :rtype: bool
+
+
+ .. note::
+
+ Some simplex tree functions require the filtration to be valid.
+ make_filtration_non_decreasing function is not launching
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ but returns the filtration modification
+ information. If the complex has changed , please call
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ to recompute it.
+ """
+ return self.get_ptr().make_filtration_non_decreasing()
+
+ def persistence(self, homology_coeff_field=11, min_persistence=0, persistence_dim_max = False):
+ """This function returns the persistence of the simplicial complex.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a
+ prime number. Default value is 11.
+ :type homology_coeff_field: int.
+ :param min_persistence: The minimum persistence value to take into
+ account (strictly greater than min_persistence). Default value is
+ 0.0.
+ Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float.
+ :param persistence_dim_max: If true, the persistent homology for the
+ maximal dimension in the complex is computed. If false, it is
+ ignored. Default is false.
+ :type persistence_dim_max: bool
+ :returns: The persistence of the simplicial complex.
+ :rtype: list of pairs(dimension, pair(birth, death))
+ """
+ if self.pcohptr != NULL:
+ del self.pcohptr
+ self.pcohptr = new Simplex_tree_persistence_interface(self.get_ptr(), persistence_dim_max)
+ cdef vector[pair[int, pair[double, double]]] persistence_result
+ if self.pcohptr != NULL:
+ persistence_result = self.pcohptr.get_persistence(homology_coeff_field, min_persistence)
+ return persistence_result
+
+ def betti_numbers(self):
+ """This function returns the Betti numbers of the simplicial complex.
+
+ :returns: The Betti numbers ([B0, B1, ..., Bn]).
+ :rtype: list of int
+
+ :note: betti_numbers function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ cdef vector[int] bn_result
+ if self.pcohptr != NULL:
+ bn_result = self.pcohptr.betti_numbers()
+ else:
+ print("betti_numbers function requires persistence function"
+ " to be launched first.")
+ return bn_result
+
+ def persistent_betti_numbers(self, from_value, to_value):
+ """This function returns the persistent Betti numbers of the
+ simplicial complex.
+
+ :param from_value: The persistence birth limit to be added in the
+ numbers (persistent birth <= from_value).
+ :type from_value: float.
+ :param to_value: The persistence death limit to be added in the
+ numbers (persistent death > to_value).
+ :type to_value: float.
+
+ :returns: The persistent Betti numbers ([B0, B1, ..., Bn]).
+ :rtype: list of int
+
+ :note: persistent_betti_numbers function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ cdef vector[int] pbn_result
+ if self.pcohptr != NULL:
+ pbn_result = self.pcohptr.persistent_betti_numbers(<double>from_value, <double>to_value)
+ else:
+ print("persistent_betti_numbers function requires persistence function"
+ " to be launched first.")
+ return pbn_result
+
+ def persistence_intervals_in_dimension(self, dimension):
+ """This function returns the persistence intervals of the simplicial
+ complex in a specific dimension.
+
+ :param dimension: The specific dimension.
+ :type dimension: int.
+ :returns: The persistence intervals.
+ :rtype: numpy array of dimension 2
+
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ cdef vector[pair[double,double]] intervals_result
+ if self.pcohptr != NULL:
+ intervals_result = self.pcohptr.intervals_in_dimension(dimension)
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
+ return np_array(intervals_result)
+
+ def persistence_pairs(self):
+ """This function returns a list of persistence birth and death simplices pairs.
+
+ :returns: A list of persistence simplices intervals.
+ :rtype: list of pair of list of int
+
+ :note: persistence_pairs function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ cdef vector[pair[vector[int],vector[int]]] persistence_pairs_result
+ if self.pcohptr != NULL:
+ persistence_pairs_result = self.pcohptr.persistence_pairs()
+ else:
+ print("persistence_pairs function requires persistence function"
+ " to be launched first.")
+ return persistence_pairs_result
+
+ def write_persistence_diagram(self, persistence_file=''):
+ """This function writes the persistence intervals of the simplicial
+ complex in a user given file name.
+
+ :param persistence_file: The specific dimension.
+ :type persistence_file: string.
+
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
+ """
+ if self.pcohptr != NULL:
+ if persistence_file != '':
+ self.pcohptr.write_output_diagram(str.encode(persistence_file))
+ else:
+ print("persistence_file must be specified")
+ else:
+ print("intervals_in_dim function requires persistence function"
+ " to be launched first.")
diff --git a/src/python/gudhi/strong_witness_complex.pyx b/src/python/gudhi/strong_witness_complex.pyx
new file mode 100644
index 00000000..e757abea
--- /dev/null
+++ b/src/python/gudhi/strong_witness_complex.pyx
@@ -0,0 +1,78 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface":
+ Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+
+# StrongWitnessComplex python interface
+cdef class StrongWitnessComplex:
+ """Constructs (strong) witness complex for a given table of nearest
+ landmarks with respect to witnesses.
+ """
+
+ cdef Strong_witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, nearest_landmark_table=None):
+ """StrongWitnessComplex constructor.
+
+ :param nearest_landmark_table: A list of lists of nearest landmarks and their distances.
+ `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to
+ witness w, and d is the (squared) distance between l and w.
+ :type nearest_landmark_table: list of list of pair of int and float
+ """
+
+ # The real cython constructor
+ def __cinit__(self, nearest_landmark_table=None):
+ if nearest_landmark_table is not None:
+ self.thisptr = new Strong_witness_complex_interface(nearest_landmark_table)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if StrongWitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum relaxation parameter.
+ Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square)
+ return stree
diff --git a/src/python/gudhi/subsampling.pyx b/src/python/gudhi/subsampling.pyx
new file mode 100644
index 00000000..1135c1fb
--- /dev/null
+++ b/src/python/gudhi/subsampling.pyx
@@ -0,0 +1,130 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling":
+ vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points)
+ vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point)
+ vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points)
+ vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point)
+ vector[vector[double]] subsampling_n_random_points(vector[vector[double]] points, unsigned nb_points)
+ vector[vector[double]] subsampling_n_random_points_from_file(string off_file, unsigned nb_points)
+ vector[vector[double]] subsampling_sparsify_points(vector[vector[double]] points, double min_squared_dist)
+ vector[vector[double]] subsampling_sparsify_points_from_file(string off_file, double min_squared_dist)
+
+def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_point = ''):
+ """Subsample by a greedy strategy of iteratively adding the farthest point
+ from the current chosen point set to the subsampling.
+ The iteration starts with the landmark `starting point`.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param nb_points: Number of points of the subsample.
+ :type nb_points: unsigned.
+ :param starting_point: The iteration starts with the landmark `starting \
+ point`,which is the index of the poit to start with. If not set, this \
+ index is choosen randomly.
+ :type starting_point: unsigned.
+ :returns: The subsample point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ if starting_point is '':
+ return subsampling_n_farthest_points_from_file(str.encode(off_file),
+ nb_points)
+ else:
+ return subsampling_n_farthest_points_from_file(str.encode(off_file),
+ nb_points,
+ starting_point)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty points
+ points=[]
+ if starting_point is '':
+ return subsampling_n_farthest_points(points, nb_points)
+ else:
+ return subsampling_n_farthest_points(points, nb_points,
+ starting_point)
+
+def pick_n_random_points(points=None, off_file='', nb_points=0):
+ """Subsample a point set by picking random vertices.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param nb_points: Number of points of the subsample.
+ :type nb_points: unsigned.
+ :returns: The subsample point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ return subsampling_n_random_points_from_file(str.encode(off_file),
+ nb_points)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty points
+ points=[]
+ return subsampling_n_random_points(points, nb_points)
+
+def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
+ """Outputs a subset of the input points so that the squared distance
+ between any two points is greater than or equal to min_squared_dist.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param min_squared_dist: Minimum squared distance separating the output \
+ points.
+ :type min_squared_dist: float.
+ :returns: The subsample point set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ return subsampling_sparsify_points_from_file(str.encode(off_file),
+ min_squared_dist)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty points
+ points=[]
+ return subsampling_sparsify_points(points, min_squared_dist)
diff --git a/src/python/gudhi/tangential_complex.pyx b/src/python/gudhi/tangential_complex.pyx
new file mode 100644
index 00000000..3a945fe2
--- /dev/null
+++ b/src/python/gudhi/tangential_complex.pyx
@@ -0,0 +1,173 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+from libc.stdint cimport intptr_t
+import os
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Tangential_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Tangential_complex_interface "Gudhi::tangential_complex::Tangential_complex_interface":
+ Tangential_complex_interface(int intrisic_dim, vector[vector[double]] points)
+ # bool from_file is a workaround for cython to find the correct signature
+ Tangential_complex_interface(int intrisic_dim, string off_file, bool from_file)
+ void compute_tangential_complex() except +
+ vector[double] get_point(unsigned vertex)
+ unsigned number_of_vertices()
+ unsigned number_of_simplices()
+ unsigned number_of_inconsistent_simplices()
+ unsigned number_of_inconsistent_stars()
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree)
+ void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit)
+ void set_max_squared_edge_length(double max_squared_edge_length)
+
+# TangentialComplex python interface
+cdef class TangentialComplex:
+ """The class Tangential_complex represents a tangential complex. After the
+ computation of the complex, an optional post-processing called perturbation
+ can be run to attempt to remove inconsistencies.
+ """
+
+ cdef Tangential_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, intrisic_dim, points=None, off_file=''):
+ """TangentialComplex constructor.
+
+ :param intrisic_dim: Intrinsic dimension of the manifold.
+ :type intrisic_dim: integer
+
+ :param points: A list of points in d-Dimension.
+ :type points: list of list of double
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+ """
+
+ # The real cython constructor
+ def __cinit__(self, intrisic_dim, points=None, off_file=''):
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if points is None:
+ # Empty tangential construction
+ points=[]
+ self.thisptr = new Tangential_complex_interface(intrisic_dim, points)
+
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if TangentialComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def compute_tangential_complex(self):
+ """This function computes the tangential complex.
+
+ Raises:
+ ValueError: In debug mode, if the computed star dimension is too
+ low. Try to set a bigger maximal edge length value with
+ :func:`~gudhi.Tangential_complex.set_max_squared_edge_length`
+ if this happens.
+ """
+ self.thisptr.compute_tangential_complex()
+
+ def get_point(self, vertex):
+ """This function returns the point corresponding to a given vertex.
+
+ :param vertex: The vertex.
+ :type vertex: int.
+ :returns: The point.
+ :rtype: list of float
+ """
+ cdef vector[double] point = self.thisptr.get_point(vertex)
+ return point
+
+ def num_vertices(self):
+ """
+ :returns: The number of vertices.
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_vertices()
+
+ def num_simplices(self):
+ """
+ :returns: Total number of simplices in stars (including duplicates that appear in several stars).
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_simplices()
+
+ def num_inconsistent_simplices(self):
+ """
+ :returns: The number of inconsistent simplices.
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_inconsistent_simplices()
+
+ def num_inconsistent_stars(self):
+ """
+ :returns: The number of stars containing at least one inconsistent simplex.
+ :rtype: unsigned
+ """
+ return self.thisptr.number_of_inconsistent_stars()
+
+ def create_simplex_tree(self):
+ """Exports the complex into a simplex tree.
+
+ :returns: A simplex tree created from the complex.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr)
+ return stree
+
+ def fix_inconsistencies_using_perturbation(self, max_perturb, time_limit=-1.0):
+ """Attempts to fix inconsistencies by perturbing the point positions.
+
+ :param max_perturb: Maximum length of the translations used by the
+ perturbation.
+ :type max_perturb: double
+ :param time_limit: Time limit in seconds. If -1, no time limit is set.
+ :type time_limit: double
+ """
+ self.thisptr.fix_inconsistencies_using_perturbation(max_perturb,
+ time_limit)
+
+ def set_max_squared_edge_length(self, max_squared_edge_length):
+ """Sets the maximal possible squared edge length for the edges in the
+ triangulations.
+
+ :param max_squared_edge_length: Maximal possible squared edge length.
+ :type max_squared_edge_length: double
+
+ If the maximal edge length value is too low
+ :func:`~gudhi.Tangential_complex.compute_tangential_complex`
+ will throw an exception in debug mode.
+ """
+ self.thisptr.set_max_squared_edge_length(max_squared_edge_length)
diff --git a/src/python/gudhi/witness_complex.pyx b/src/python/gudhi/witness_complex.pyx
new file mode 100644
index 00000000..baa70b7a
--- /dev/null
+++ b/src/python/gudhi/witness_complex.pyx
@@ -0,0 +1,78 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+cdef extern from "Witness_complex_interface.h" namespace "Gudhi":
+ cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface":
+ Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
+ unsigned limit_dimension)
+
+# WitnessComplex python interface
+cdef class WitnessComplex:
+ """Constructs (weak) witness complex for a given table of nearest landmarks
+ with respect to witnesses.
+ """
+
+ cdef Witness_complex_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self, nearest_landmark_table=None):
+ """WitnessComplex constructor.
+
+ :param nearest_landmark_table: A list of lists of nearest landmarks and their distances.
+ `nearest_landmark_table[w][k]==(l,d)` means that l is the k-th nearest landmark to
+ witness w, and d is the (squared) distance between l and w.
+ :type nearest_landmark_table: list of list of pair of int and float
+ """
+
+ # The real cython constructor
+ def __cinit__(self, nearest_landmark_table=None):
+ if nearest_landmark_table is not None:
+ self.thisptr = new Witness_complex_interface(nearest_landmark_table)
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if WitnessComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def create_simplex_tree(self, max_alpha_square = float('inf'), limit_dimension = -1):
+ """
+ :param max_alpha_square: The maximum relaxation parameter.
+ Default is set to infinity.
+ :type max_alpha_square: float
+ :returns: A simplex tree created from the Delaunay Triangulation.
+ :rtype: SimplexTree
+ """
+ stree = SimplexTree()
+ cdef intptr_t stree_int_ptr=stree.thisptr
+ if limit_dimension is not -1:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square, limit_dimension)
+ else:
+ self.thisptr.create_simplex_tree(<Simplex_tree_interface_full_featured*>stree_int_ptr,
+ max_alpha_square)
+ return stree
diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h
new file mode 100644
index 00000000..b3553d32
--- /dev/null
+++ b/src/python/include/Alpha_complex_interface.h
@@ -0,0 +1,70 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_ALPHA_COMPLEX_INTERFACE_H_
+#define INCLUDE_ALPHA_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Alpha_complex.h>
+#include <CGAL/Epick_d.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace alpha_complex {
+
+class Alpha_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+
+ public:
+ Alpha_complex_interface(const std::vector<std::vector<double>>& points) {
+ alpha_complex_ = new Alpha_complex<Dynamic_kernel>(points);
+ }
+
+ Alpha_complex_interface(const std::string& off_file_name, bool from_file = true) {
+ alpha_complex_ = new Alpha_complex<Dynamic_kernel>(off_file_name);
+ }
+
+ ~Alpha_complex_interface() {
+ delete alpha_complex_;
+ }
+
+ std::vector<double> get_point(int vh) {
+ std::vector<double> vd;
+ try {
+ Point_d ph = alpha_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ } catch (std::out_of_range const&) {
+ // std::out_of_range is thrown in case not found. Other exceptions must be re-thrown
+ }
+ return vd;
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square) {
+ alpha_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Alpha_complex<Dynamic_kernel>* alpha_complex_;
+};
+
+} // namespace alpha_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_ALPHA_COMPLEX_INTERFACE_H_
diff --git a/src/python/include/Bottleneck_distance_interface.h b/src/python/include/Bottleneck_distance_interface.h
new file mode 100644
index 00000000..a4f3eaf1
--- /dev/null
+++ b/src/python/include/Bottleneck_distance_interface.h
@@ -0,0 +1,41 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_
+#define INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_
+
+#include <gudhi/Bottleneck.h>
+
+#include <iostream>
+#include <vector>
+#include <utility> // for std::pair
+
+namespace Gudhi {
+
+namespace persistence_diagram {
+
+ // bottleneck_distance function renamed for the python function can be called bottleneck_dstance
+ double bottleneck(const std::vector<std::pair<double, double>>& diag1,
+ const std::vector<std::pair<double, double>>& diag2,
+ double e) {
+ return bottleneck_distance(diag1, diag2, e);
+ }
+
+ double bottleneck(const std::vector<std::pair<double, double>>& diag1,
+ const std::vector<std::pair<double, double>>& diag2) {
+ return bottleneck_distance(diag1, diag2);
+ }
+
+} // namespace persistence_diagram
+
+} // namespace Gudhi
+
+
+#endif // INCLUDE_BOTTLENECK_DISTANCE_INTERFACE_H_
diff --git a/src/python/include/Cubical_complex_interface.h b/src/python/include/Cubical_complex_interface.h
new file mode 100644
index 00000000..648598e1
--- /dev/null
+++ b/src/python/include/Cubical_complex_interface.h
@@ -0,0 +1,50 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_
+#define INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Bitmap_cubical_complex.h>
+#include <gudhi/Bitmap_cubical_complex_base.h>
+#include <gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace cubical_complex {
+
+template<typename CubicalComplexOptions = Bitmap_cubical_complex_base<double>>
+class Cubical_complex_interface : public Bitmap_cubical_complex<CubicalComplexOptions> {
+ public:
+ Cubical_complex_interface(const std::vector<unsigned>& dimensions,
+ const std::vector<double>& top_dimensional_cells)
+ : Bitmap_cubical_complex<CubicalComplexOptions>(dimensions, top_dimensional_cells) {
+ }
+
+ Cubical_complex_interface(const std::vector<unsigned>& dimensions,
+ const std::vector<double>& top_dimensional_cells,
+ const std::vector<bool>& periodic_dimensions)
+ : Bitmap_cubical_complex<CubicalComplexOptions>(dimensions, top_dimensional_cells, periodic_dimensions) {
+ }
+
+ Cubical_complex_interface(const std::string& perseus_file)
+ : Bitmap_cubical_complex<CubicalComplexOptions>(perseus_file.c_str()) {
+ }
+};
+
+} // namespace cubical_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_CUBICAL_COMPLEX_INTERFACE_H_
+
diff --git a/src/python/include/Euclidean_strong_witness_complex_interface.h b/src/python/include/Euclidean_strong_witness_complex_interface.h
new file mode 100644
index 00000000..c1c72737
--- /dev/null
+++ b/src/python/include/Euclidean_strong_witness_complex_interface.h
@@ -0,0 +1,81 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_strong_witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <CGAL/Epick_d.h>
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+
+class Euclidean_strong_witness_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+
+ typedef typename Simplex_tree<>::Simplex_key Simplex_key;
+
+ public:
+ Euclidean_strong_witness_complex_interface(const std::vector<std::vector<double>>& landmarks,
+ const std::vector<std::vector<double>>& witnesses) {
+ landmarks_.reserve(landmarks.size());
+ for (auto& landmark : landmarks)
+ landmarks_.emplace_back(landmark.begin(), landmark.end());
+ witness_complex_ = new Euclidean_strong_witness_complex<Dynamic_kernel>(landmarks_, witnesses);
+ }
+
+ ~Euclidean_strong_witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square,
+ std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ std::vector<double> get_point(unsigned vh) {
+ std::vector<double> vd;
+ if (vh < landmarks_.size()) {
+ Point_d ph = witness_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ }
+ return vd;
+ }
+
+ private:
+ std::vector<Point_d> landmarks_;
+ Euclidean_strong_witness_complex<Dynamic_kernel>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_EUCLIDEAN_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+
diff --git a/src/python/include/Euclidean_witness_complex_interface.h b/src/python/include/Euclidean_witness_complex_interface.h
new file mode 100644
index 00000000..5d7dbdc2
--- /dev/null
+++ b/src/python/include/Euclidean_witness_complex_interface.h
@@ -0,0 +1,80 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Euclidean_witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <CGAL/Epick_d.h>
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+
+class Euclidean_witness_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+
+ typedef typename Simplex_tree<>::Simplex_key Simplex_key;
+
+ public:
+ Euclidean_witness_complex_interface(const std::vector<std::vector<double>>& landmarks,
+ const std::vector<std::vector<double>>& witnesses) {
+ landmarks_.reserve(landmarks.size());
+ for (auto& landmark : landmarks)
+ landmarks_.emplace_back(landmark.begin(), landmark.end());
+ witness_complex_ = new Euclidean_witness_complex<Dynamic_kernel>(landmarks_, witnesses);
+ }
+
+ ~Euclidean_witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square, std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Gudhi::Simplex_tree<>* simplex_tree, double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ std::vector<double> get_point(unsigned vh) {
+ std::vector<double> vd;
+ if (vh < landmarks_.size()) {
+ Point_d ph = witness_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ }
+ return vd;
+ }
+
+ private:
+ std::vector<Point_d> landmarks_;
+ Euclidean_witness_complex<Dynamic_kernel>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_EUCLIDEAN_WITNESS_COMPLEX_INTERFACE_H_
+
diff --git a/src/python/include/Nerve_gic_interface.h b/src/python/include/Nerve_gic_interface.h
new file mode 100644
index 00000000..5e7f8ae6
--- /dev/null
+++ b/src/python/include/Nerve_gic_interface.h
@@ -0,0 +1,49 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_NERVE_GIC_INTERFACE_H_
+#define INCLUDE_NERVE_GIC_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/GIC.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+class Nerve_gic_interface : public Cover_complex<std::vector<double>> {
+ public:
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree) {
+ create_complex(*simplex_tree);
+ simplex_tree->initialize_filtration();
+ }
+ void set_cover_from_Euclidean_Voronoi(int m) {
+ set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m);
+ }
+ double set_graph_from_automatic_euclidean_rips(int N) {
+ return set_graph_from_automatic_rips(Gudhi::Euclidean_distance(), N);
+ }
+ void set_graph_from_euclidean_rips(double threshold) {
+ set_graph_from_rips(threshold, Gudhi::Euclidean_distance());
+ }
+};
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_NERVE_GIC_INTERFACE_H_
diff --git a/src/python/include/Off_reader_interface.h b/src/python/include/Off_reader_interface.h
new file mode 100644
index 00000000..e6e1f931
--- /dev/null
+++ b/src/python/include/Off_reader_interface.h
@@ -0,0 +1,30 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_OFF_READER_INTERFACE_H_
+#define INCLUDE_OFF_READER_INTERFACE_H_
+
+#include <gudhi/Points_off_io.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+std::vector<std::vector<double>> read_points_from_OFF_file(const std::string& off_file) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ return off_reader.get_point_cloud();
+}
+
+} // namespace Gudhi
+
+#endif // INCLUDE_OFF_READER_INTERFACE_H_
+
diff --git a/src/python/include/Persistent_cohomology_interface.h b/src/python/include/Persistent_cohomology_interface.h
new file mode 100644
index 00000000..8c79e6f3
--- /dev/null
+++ b/src/python/include/Persistent_cohomology_interface.h
@@ -0,0 +1,109 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_
+#define INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_
+
+#include <gudhi/Persistent_cohomology.h>
+
+#include <vector>
+#include <utility> // for std::pair
+#include <algorithm> // for sort
+
+namespace Gudhi {
+
+template<class FilteredComplex>
+class Persistent_cohomology_interface : public
+persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp> {
+ private:
+ /*
+ * Compare two intervals by dimension, then by length.
+ */
+ struct cmp_intervals_by_dim_then_length {
+ explicit cmp_intervals_by_dim_then_length(FilteredComplex * sc)
+ : sc_(sc) { }
+
+ template<typename Persistent_interval>
+ bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) {
+ if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2)))
+ return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1))
+ > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2)));
+ else
+ return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2)));
+ }
+ FilteredComplex* sc_;
+ };
+
+ public:
+ Persistent_cohomology_interface(FilteredComplex* stptr)
+ : persistent_cohomology::Persistent_cohomology<FilteredComplex, persistent_cohomology::Field_Zp>(*stptr),
+ stptr_(stptr) { }
+
+ Persistent_cohomology_interface(FilteredComplex* stptr, bool persistence_dim_max)
+ : persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>(*stptr, persistence_dim_max),
+ stptr_(stptr) { }
+
+ std::vector<std::pair<int, std::pair<double, double>>> get_persistence(int homology_coeff_field,
+ double min_persistence) {
+ persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::init_coefficients(homology_coeff_field);
+ persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::compute_persistent_cohomology(min_persistence);
+
+ // Custom sort and output persistence
+ cmp_intervals_by_dim_then_length cmp(stptr_);
+ auto persistent_pairs = persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::get_persistent_pairs();
+ std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp);
+
+ std::vector<std::pair<int, std::pair<double, double>>> persistence;
+ for (auto pair : persistent_pairs) {
+ persistence.push_back(std::make_pair(stptr_->dimension(get<0>(pair)),
+ std::make_pair(stptr_->filtration(get<0>(pair)),
+ stptr_->filtration(get<1>(pair)))));
+ }
+ return persistence;
+ }
+
+ std::vector<std::pair<std::vector<int>, std::vector<int>>> persistence_pairs() {
+ auto pairs = persistent_cohomology::Persistent_cohomology<FilteredComplex,
+ persistent_cohomology::Field_Zp>::get_persistent_pairs();
+
+ std::vector<std::pair<std::vector<int>, std::vector<int>>> persistence_pairs;
+ persistence_pairs.reserve(pairs.size());
+ for (auto pair : pairs) {
+ std::vector<int> birth;
+ if (get<0>(pair) != stptr_->null_simplex()) {
+ for (auto vertex : stptr_->simplex_vertex_range(get<0>(pair))) {
+ birth.push_back(vertex);
+ }
+ }
+
+ std::vector<int> death;
+ if (get<1>(pair) != stptr_->null_simplex()) {
+ for (auto vertex : stptr_->simplex_vertex_range(get<1>(pair))) {
+ death.push_back(vertex);
+ }
+ }
+
+ persistence_pairs.push_back(std::make_pair(birth, death));
+ }
+ return persistence_pairs;
+ }
+
+ private:
+ // A copy
+ FilteredComplex* stptr_;
+};
+
+} // namespace Gudhi
+
+#endif // INCLUDE_PERSISTENT_COHOMOLOGY_INTERFACE_H_
diff --git a/src/python/include/Reader_utils_interface.h b/src/python/include/Reader_utils_interface.h
new file mode 100644
index 00000000..5f0deb87
--- /dev/null
+++ b/src/python/include/Reader_utils_interface.h
@@ -0,0 +1,44 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2017 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_READER_UTILS_INTERFACE_H_
+#define INCLUDE_READER_UTILS_INTERFACE_H_
+
+#include <gudhi/reader_utils.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+#include <map>
+#include <utility> // for pair<>
+
+namespace Gudhi {
+
+// Redefine functions with a different name in order the original name can be used in the Python version.
+std::vector<std::vector<double>> read_matrix_from_csv_file(const std::string& filename,
+ const char separator = ';') {
+ return read_lower_triangular_matrix_from_csv_file<double>(filename, separator);
+}
+
+inline std::map<int, std::vector<std::pair<double, double>>>
+ read_pers_intervals_grouped_by_dimension(std::string const& filename) {
+ return read_persistence_intervals_grouped_by_dimension(filename);
+}
+
+inline std::vector<std::pair<double, double>>
+ read_pers_intervals_in_dimension(std::string const& filename, int only_this_dim = -1) {
+ return read_persistence_intervals_in_dimension(filename, only_this_dim);
+}
+
+
+} // namespace Gudhi
+
+
+#endif // INCLUDE_READER_UTILS_INTERFACE_H_
diff --git a/src/python/include/Rips_complex_interface.h b/src/python/include/Rips_complex_interface.h
new file mode 100644
index 00000000..a66b0e5b
--- /dev/null
+++ b/src/python/include/Rips_complex_interface.h
@@ -0,0 +1,70 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_RIPS_COMPLEX_INTERFACE_H_
+#define INCLUDE_RIPS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Sparse_rips_complex.h>
+#include <gudhi/distance_functions.h>
+
+#include <boost/optional.hpp>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <utility> // std::pair
+#include <string>
+
+namespace Gudhi {
+
+namespace rips_complex {
+
+class Rips_complex_interface {
+ using Point_d = std::vector<double>;
+ using Distance_matrix = std::vector<std::vector<Simplex_tree_interface<>::Filtration_value>>;
+
+ public:
+ void init_points(const std::vector<std::vector<double>>& points, double threshold) {
+ rips_complex_.emplace(points, threshold, Gudhi::Euclidean_distance());
+ }
+ void init_matrix(const std::vector<std::vector<double>>& matrix, double threshold) {
+ rips_complex_.emplace(matrix, threshold);
+ }
+
+ void init_points_sparse(const std::vector<std::vector<double>>& points, double threshold, double epsilon) {
+ sparse_rips_complex_.emplace(points, Gudhi::Euclidean_distance(), epsilon, -std::numeric_limits<double>::infinity(), threshold);
+ }
+ void init_matrix_sparse(const std::vector<std::vector<double>>& matrix, double threshold, double epsilon) {
+ sparse_rips_complex_.emplace(matrix, epsilon, -std::numeric_limits<double>::infinity(), threshold);
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, int dim_max) {
+ if (rips_complex_)
+ rips_complex_->create_complex(*simplex_tree, dim_max);
+ else
+ sparse_rips_complex_->create_complex(*simplex_tree, dim_max);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ // std::variant would work, but we don't require C++17 yet, and boost::variant is not super convenient.
+ // Anyway, storing a graph would make more sense. Or changing the interface completely so there is no such storage.
+ boost::optional<Rips_complex<Simplex_tree_interface<>::Filtration_value>> rips_complex_;
+ boost::optional<Sparse_rips_complex<Simplex_tree_interface<>::Filtration_value>> sparse_rips_complex_;
+};
+
+} // namespace rips_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_RIPS_COMPLEX_INTERFACE_H_
diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h
new file mode 100644
index 00000000..06f31341
--- /dev/null
+++ b/src/python/include/Simplex_tree_interface.h
@@ -0,0 +1,142 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_SIMPLEX_TREE_INTERFACE_H_
+#define INCLUDE_SIMPLEX_TREE_INTERFACE_H_
+
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Points_off_io.h>
+
+#include "Persistent_cohomology_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <utility> // std::pair
+
+namespace Gudhi {
+
+template<typename SimplexTreeOptions = Simplex_tree_options_full_featured>
+class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
+ public:
+ using Base = Simplex_tree<SimplexTreeOptions>;
+ using Filtration_value = typename Base::Filtration_value;
+ using Vertex_handle = typename Base::Vertex_handle;
+ using Simplex_handle = typename Base::Simplex_handle;
+ using Insertion_result = typename std::pair<Simplex_handle, bool>;
+ using Simplex = std::vector<Vertex_handle>;
+ using Filtered_simplices = std::vector<std::pair<Simplex, Filtration_value>>;
+
+ public:
+ bool find_simplex(const Simplex& vh) {
+ return (Base::find(vh) != Base::null_simplex());
+ }
+
+ void assign_simplex_filtration(const Simplex& vh, Filtration_value filtration) {
+ Base::assign_filtration(Base::find(vh), filtration);
+ }
+
+ bool insert(const Simplex& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
+ return (result.second);
+ }
+
+ // Do not interface this function, only used in alpha complex interface for complex creation
+ bool insert_simplex(const Simplex& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex(simplex, filtration);
+ return (result.second);
+ }
+
+ // Do not interface this function, only used in interface for complex creation
+ bool insert_simplex_and_subfaces(const Simplex& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
+ return (result.second);
+ }
+
+ // Do not interface this function, only used in strong witness interface for complex creation
+ bool insert_simplex(const std::vector<std::size_t>& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex(simplex, filtration);
+ return (result.second);
+ }
+
+ // Do not interface this function, only used in strong witness interface for complex creation
+ bool insert_simplex_and_subfaces(const std::vector<std::size_t>& simplex, Filtration_value filtration = 0) {
+ Insertion_result result = Base::insert_simplex_and_subfaces(simplex, filtration);
+ return (result.second);
+ }
+
+ Filtration_value simplex_filtration(const Simplex& simplex) {
+ return Base::filtration(Base::find(simplex));
+ }
+
+ void remove_maximal_simplex(const Simplex& simplex) {
+ Base::remove_maximal_simplex(Base::find(simplex));
+ Base::initialize_filtration();
+ }
+
+ Filtered_simplices get_filtration() {
+ Base::initialize_filtration();
+ Filtered_simplices filtrations;
+ for (auto f_simplex : Base::filtration_simplex_range()) {
+ Simplex simplex;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ simplex.insert(simplex.begin(), vertex);
+ }
+ filtrations.push_back(std::make_pair(simplex, Base::filtration(f_simplex)));
+ }
+ return filtrations;
+ }
+
+ Filtered_simplices get_skeleton(int dimension) {
+ Filtered_simplices skeletons;
+ for (auto f_simplex : Base::skeleton_simplex_range(dimension)) {
+ Simplex simplex;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ simplex.insert(simplex.begin(), vertex);
+ }
+ skeletons.push_back(std::make_pair(simplex, Base::filtration(f_simplex)));
+ }
+ return skeletons;
+ }
+
+ Filtered_simplices get_star(const Simplex& simplex) {
+ Filtered_simplices star;
+ for (auto f_simplex : Base::star_simplex_range(Base::find(simplex))) {
+ Simplex simplex_star;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ simplex_star.insert(simplex_star.begin(), vertex);
+ }
+ star.push_back(std::make_pair(simplex_star, Base::filtration(f_simplex)));
+ }
+ return star;
+ }
+
+ Filtered_simplices get_cofaces(const Simplex& simplex, int dimension) {
+ Filtered_simplices cofaces;
+ for (auto f_simplex : Base::cofaces_simplex_range(Base::find(simplex), dimension)) {
+ Simplex simplex_coface;
+ for (auto vertex : Base::simplex_vertex_range(f_simplex)) {
+ simplex_coface.insert(simplex_coface.begin(), vertex);
+ }
+ cofaces.push_back(std::make_pair(simplex_coface, Base::filtration(f_simplex)));
+ }
+ return cofaces;
+ }
+
+ void create_persistence(Gudhi::Persistent_cohomology_interface<Base>* pcoh) {
+ Base::initialize_filtration();
+ pcoh = new Gudhi::Persistent_cohomology_interface<Base>(*this);
+ }
+};
+
+} // namespace Gudhi
+
+#endif // INCLUDE_SIMPLEX_TREE_INTERFACE_H_
diff --git a/src/python/include/Strong_witness_complex_interface.h b/src/python/include/Strong_witness_complex_interface.h
new file mode 100644
index 00000000..cda5b514
--- /dev/null
+++ b/src/python/include/Strong_witness_complex_interface.h
@@ -0,0 +1,61 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Strong_witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+class Strong_witness_complex_interface {
+ using Nearest_landmark_range = std::vector<std::pair<std::size_t, double>>;
+ using Nearest_landmark_table = std::vector<Nearest_landmark_range>;
+
+ public:
+ Strong_witness_complex_interface(const Nearest_landmark_table& nlt) {
+ witness_complex_ = new Strong_witness_complex<Nearest_landmark_table>(nlt);
+ }
+
+ ~Strong_witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
+ std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree,
+ double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Strong_witness_complex<Nearest_landmark_table>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_STRONG_WITNESS_COMPLEX_INTERFACE_H_
diff --git a/src/python/include/Subsampling_interface.h b/src/python/include/Subsampling_interface.h
new file mode 100644
index 00000000..cdda851f
--- /dev/null
+++ b/src/python/include/Subsampling_interface.h
@@ -0,0 +1,107 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_SUBSAMPLING_INTERFACE_H_
+#define INCLUDE_SUBSAMPLING_INTERFACE_H_
+
+#include <gudhi/choose_n_farthest_points.h>
+#include <gudhi/pick_n_random_points.h>
+#include <gudhi/sparsify_point_set.h>
+#include <gudhi/Points_off_io.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace subsampling {
+
+using Subsampling_dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+using Subsampling_point_d = Subsampling_dynamic_kernel::Point_d;
+using Subsampling_ft = Subsampling_dynamic_kernel::FT;
+
+// ------ choose_n_farthest_points ------
+std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector<std::vector<double>>& points,
+ unsigned nb_points) {
+ std::vector<std::vector<double>> landmarks;
+ Subsampling_dynamic_kernel k;
+ choose_n_farthest_points(k, points, nb_points, random_starting_point, std::back_inserter(landmarks));
+
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector<std::vector<double>>& points,
+ unsigned nb_points, unsigned starting_point) {
+ std::vector<std::vector<double>> landmarks;
+ Subsampling_dynamic_kernel k;
+ choose_n_farthest_points(k, points, nb_points, starting_point, std::back_inserter(landmarks));
+
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points_from_file(const std::string& off_file,
+ unsigned nb_points) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_farthest_points(points, nb_points);
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points_from_file(const std::string& off_file,
+ unsigned nb_points, unsigned starting_point) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_farthest_points(points, nb_points, starting_point);
+}
+
+// ------ pick_n_random_points ------
+std::vector<std::vector<double>> subsampling_n_random_points(const std::vector<std::vector<double>>& points,
+ unsigned nb_points) {
+ std::vector<std::vector<double>> landmarks;
+ pick_n_random_points(points, nb_points, std::back_inserter(landmarks));
+
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_random_points_from_file(const std::string& off_file,
+ unsigned nb_points) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_random_points(points, nb_points);
+}
+
+// ------ sparsify_point_set ------
+std::vector<std::vector<double>> subsampling_sparsify_points(const std::vector<std::vector<double>>& points,
+ double min_squared_dist) {
+ std::vector<Subsampling_point_d> input, output;
+ for (auto point : points)
+ input.push_back(Subsampling_point_d(point.size(), point.begin(), point.end()));
+ Subsampling_dynamic_kernel k;
+ sparsify_point_set(k, input, min_squared_dist, std::back_inserter(output));
+
+ std::vector<std::vector<double>> landmarks;
+ for (auto point : output)
+ landmarks.push_back(std::vector<double>(point.cartesian_begin(), point.cartesian_end()));
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_sparsify_points_from_file(const std::string& off_file,
+ double min_squared_dist) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_sparsify_points(points, min_squared_dist);
+}
+
+} // namespace subsampling
+
+} // namespace Gudhi
+
+#endif // INCLUDE_SUBSAMPLING_INTERFACE_H_
diff --git a/src/python/include/Tangential_complex_interface.h b/src/python/include/Tangential_complex_interface.h
new file mode 100644
index 00000000..698226cc
--- /dev/null
+++ b/src/python/include/Tangential_complex_interface.h
@@ -0,0 +1,109 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_
+#define INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/Points_off_io.h>
+#include <CGAL/Epick_d.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <string>
+
+namespace Gudhi {
+
+namespace tangential_complex {
+
+class Tangential_complex_interface {
+ using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Point_d = Dynamic_kernel::Point_d;
+ using TC = Tangential_complex<Dynamic_kernel, CGAL::Dynamic_dimension_tag, CGAL::Parallel_tag>;
+
+ public:
+ Tangential_complex_interface(int intrisic_dim, const std::vector<std::vector<double>>& points) {
+ Dynamic_kernel k;
+
+ tangential_complex_ = new TC(points, intrisic_dim, k);
+ }
+
+ Tangential_complex_interface(int intrisic_dim, const std::string& off_file_name, bool from_file = true) {
+ Dynamic_kernel k;
+
+ Gudhi::Points_off_reader<Point_d> off_reader(off_file_name);
+ std::vector<Point_d> points = off_reader.get_point_cloud();
+
+ tangential_complex_ = new TC(points, intrisic_dim, k);
+ }
+
+ ~Tangential_complex_interface() {
+ delete tangential_complex_;
+ }
+
+ void compute_tangential_complex() {
+ tangential_complex_->compute_tangential_complex();
+ num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices();
+ }
+
+ std::vector<double> get_point(unsigned vh) {
+ std::vector<double> vd;
+ if (vh < tangential_complex_->number_of_vertices()) {
+ Point_d ph = tangential_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
+ vd.push_back(*coord);
+ }
+ return vd;
+ }
+
+ unsigned number_of_vertices() {
+ return tangential_complex_->number_of_vertices();
+ }
+
+ unsigned number_of_simplices() {
+ return num_inconsistencies_.num_simplices;
+ }
+
+ unsigned number_of_inconsistent_simplices() {
+ return num_inconsistencies_.num_inconsistent_simplices;
+ }
+
+ unsigned number_of_inconsistent_stars() {
+ return num_inconsistencies_.num_inconsistent_stars;
+ }
+
+ void fix_inconsistencies_using_perturbation(double max_perturb, double time_limit) {
+ tangential_complex_->fix_inconsistencies_using_perturbation(max_perturb, time_limit);
+ num_inconsistencies_ = tangential_complex_->number_of_inconsistent_simplices();
+ }
+
+ void create_simplex_tree(Simplex_tree<>* simplex_tree) {
+ tangential_complex_->create_complex<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>(*simplex_tree);
+ simplex_tree->initialize_filtration();
+ }
+
+ void set_max_squared_edge_length(double max_squared_edge_length) {
+ tangential_complex_->set_max_squared_edge_length(max_squared_edge_length);
+ }
+
+private:
+ TC* tangential_complex_;
+ TC::Num_inconsistencies num_inconsistencies_;
+};
+
+} // namespace tangential_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_TANGENTIAL_COMPLEX_INTERFACE_H_
diff --git a/src/python/include/Witness_complex_interface.h b/src/python/include/Witness_complex_interface.h
new file mode 100644
index 00000000..45e14253
--- /dev/null
+++ b/src/python/include/Witness_complex_interface.h
@@ -0,0 +1,62 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef INCLUDE_WITNESS_COMPLEX_INTERFACE_H_
+#define INCLUDE_WITNESS_COMPLEX_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Witness_complex.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <vector>
+#include <utility> // std::pair
+#include <iostream>
+#include <cstddef>
+
+namespace Gudhi {
+
+namespace witness_complex {
+
+class Witness_complex_interface {
+ using Nearest_landmark_range = std::vector<std::pair<std::size_t, double>>;
+ using Nearest_landmark_table = std::vector<Nearest_landmark_range>;
+
+ public:
+ Witness_complex_interface(const Nearest_landmark_table& nlt) {
+ witness_complex_ = new Witness_complex<Nearest_landmark_table>(nlt);
+ }
+
+ ~Witness_complex_interface() {
+ delete witness_complex_;
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
+ std::size_t limit_dimension) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square, limit_dimension);
+ simplex_tree->initialize_filtration();
+ }
+
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree,
+ double max_alpha_square) {
+ witness_complex_->create_complex(*simplex_tree, max_alpha_square);
+ simplex_tree->initialize_filtration();
+ }
+
+ private:
+ Witness_complex<Nearest_landmark_table>* witness_complex_;
+};
+
+} // namespace witness_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_WITNESS_COMPLEX_INTERFACE_H_
+
diff --git a/src/python/setup.py.in b/src/python/setup.py.in
new file mode 100644
index 00000000..3f1d4424
--- /dev/null
+++ b/src/python/setup.py.in
@@ -0,0 +1,53 @@
+from setuptools import setup, Extension
+from Cython.Build import cythonize
+from numpy import get_include as numpy_get_include
+
+"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2019 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+modules = [@GUDHI_PYTHON_MODULES_TO_COMPILE@]
+
+source_dir='@CMAKE_CURRENT_SOURCE_DIR@/gudhi/'
+extra_compile_args=[@GUDHI_PYTHON_EXTRA_COMPILE_ARGS@]
+extra_link_args=[@GUDHI_PYTHON_EXTRA_LINK_ARGS@]
+libraries=[@GUDHI_PYTHON_LIBRARIES@]
+library_dirs=[@GUDHI_PYTHON_LIBRARY_DIRS@]
+include_dirs = [numpy_get_include(), '@CMAKE_CURRENT_SOURCE_DIR@/gudhi/', @GUDHI_PYTHON_INCLUDE_DIRS@]
+runtime_library_dirs=[@GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS@]
+
+# Create ext_modules list from module list
+ext_modules = []
+for module in modules:
+ ext_modules.append(Extension(
+ 'gudhi.' + module,
+ sources = [source_dir + module + '.pyx',],
+ language = 'c++',
+ extra_compile_args=extra_compile_args,
+ extra_link_args=extra_link_args,
+ libraries=libraries,
+ library_dirs=library_dirs,
+ include_dirs=include_dirs,
+ runtime_library_dirs=runtime_library_dirs,))
+
+setup(
+ name = 'gudhi',
+ packages=["gudhi",],
+ author='GUDHI Editorial Board',
+ author_email='gudhi-contact@lists.gforge.inria.fr',
+ version='@GUDHI_VERSION@',
+ url='http://gudhi.gforge.inria.fr/',
+ ext_modules = cythonize(ext_modules),
+ install_requires = ['cython','numpy >= 1.9',],
+ setup_requires = ['numpy >= 1.9',],
+)
diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py
new file mode 100755
index 00000000..24f8bf53
--- /dev/null
+++ b/src/python/test/test_alpha_complex.py
@@ -0,0 +1,90 @@
+from gudhi import AlphaComplex, SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_empty_alpha():
+ alpha_complex = AlphaComplex(points=[[0, 0]])
+ assert alpha_complex.__is_defined() == True
+
+
+def test_infinite_alpha():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ alpha_complex = AlphaComplex(points=point_list)
+ assert alpha_complex.__is_defined() == True
+
+ simplex_tree = alpha_complex.create_simplex_tree()
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 11
+ assert simplex_tree.num_vertices() == 4
+
+ assert simplex_tree.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([2], 0.0),
+ ([3], 0.0),
+ ([0, 1], 0.25),
+ ([0, 2], 0.25),
+ ([1, 3], 0.25),
+ ([2, 3], 0.25),
+ ([1, 2], 0.5),
+ ([0, 1, 2], 0.5),
+ ([1, 2, 3], 0.5),
+ ]
+ assert simplex_tree.get_star([0]) == [
+ ([0], 0.0),
+ ([0, 1], 0.25),
+ ([0, 1, 2], 0.5),
+ ([0, 2], 0.25),
+ ]
+ assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)]
+
+ assert point_list[0] == alpha_complex.get_point(0)
+ assert point_list[1] == alpha_complex.get_point(1)
+ assert point_list[2] == alpha_complex.get_point(2)
+ assert point_list[3] == alpha_complex.get_point(3)
+ assert alpha_complex.get_point(4) == []
+ assert alpha_complex.get_point(125) == []
+
+
+def test_filtered_alpha():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ filtered_alpha = AlphaComplex(points=point_list)
+
+ simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25)
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
+
+ assert point_list[0] == filtered_alpha.get_point(0)
+ assert point_list[1] == filtered_alpha.get_point(1)
+ assert point_list[2] == filtered_alpha.get_point(2)
+ assert point_list[3] == filtered_alpha.get_point(3)
+ assert filtered_alpha.get_point(4) == []
+ assert filtered_alpha.get_point(125) == []
+
+ assert simplex_tree.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([2], 0.0),
+ ([3], 0.0),
+ ([0, 1], 0.25),
+ ([0, 2], 0.25),
+ ([1, 3], 0.25),
+ ([2, 3], 0.25),
+ ]
+ assert simplex_tree.get_star([0]) == [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)]
+ assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)]
diff --git a/src/python/test/test_bottleneck_distance.py b/src/python/test/test_bottleneck_distance.py
new file mode 100755
index 00000000..f5f019b9
--- /dev/null
+++ b/src/python/test/test_bottleneck_distance.py
@@ -0,0 +1,23 @@
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_basic_bottleneck():
+ diag1 = [[2.7, 3.7], [9.6, 14.0], [34.2, 34.974], [3.0, float("Inf")]]
+ diag2 = [[2.8, 4.45], [9.5, 14.1], [3.2, float("Inf")]]
+
+ assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == 0.8081763781405569
+ assert gudhi.bottleneck_distance(diag1, diag2) == 0.75
diff --git a/src/python/test/test_cover_complex.py b/src/python/test/test_cover_complex.py
new file mode 100755
index 00000000..8cd12272
--- /dev/null
+++ b/src/python/test/test_cover_complex.py
@@ -0,0 +1,85 @@
+from gudhi import CoverComplex
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "MIT"
+
+
+def test_empty_constructor():
+ # Try to create an empty CoverComplex
+ cover = CoverComplex()
+ assert cover.__is_defined() == True
+
+
+def test_non_existing_file_read():
+ # Try to open a non existing file
+ cover = CoverComplex()
+ assert cover.read_point_cloud("pouetpouettralala.toubiloubabdou") == False
+
+
+def test_files_creation():
+ # Create test file
+ cloud_file = open("cloud", "w")
+ cloud_file.write("nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0")
+ cloud_file.close()
+ cover_file = open("cover", "w")
+ cover_file.write("1\n2\n3")
+ cover_file.close()
+ graph_file = open("graph", "w")
+ graph_file.write("0 1\n0 2\n1 2")
+ graph_file.close()
+
+
+def test_nerve():
+ nerve = CoverComplex()
+ nerve.set_type("Nerve")
+ assert nerve.read_point_cloud("cloud") == True
+ nerve.set_color_from_coordinate()
+ nerve.set_graph_from_file("graph")
+ nerve.set_cover_from_file("cover")
+ nerve.find_simplices()
+ stree = nerve.create_simplex_tree()
+
+ assert stree.num_vertices() == 3
+ assert (stree.num_simplices() - stree.num_vertices()) == 0
+ assert stree.dimension() == 0
+
+
+def test_graph_induced_complex():
+ gic = CoverComplex()
+ gic.set_type("GIC")
+ assert gic.read_point_cloud("cloud") == True
+ gic.set_color_from_coordinate()
+ gic.set_graph_from_file("graph")
+ gic.set_cover_from_file("cover")
+ gic.find_simplices()
+ stree = gic.create_simplex_tree()
+
+ assert stree.num_vertices() == 3
+ assert (stree.num_simplices() - stree.num_vertices()) == 4
+ assert stree.dimension() == 2
+
+
+def test_voronoi_graph_induced_complex():
+ gic = CoverComplex()
+ gic.set_type("GIC")
+ assert gic.read_point_cloud("cloud") == True
+ gic.set_color_from_coordinate()
+ gic.set_graph_from_file("graph")
+ gic.set_cover_from_Voronoi(2)
+ gic.find_simplices()
+ stree = gic.create_simplex_tree()
+
+ assert stree.num_vertices() == 2
+ assert (stree.num_simplices() - stree.num_vertices()) == 1
+ assert stree.dimension() == 1
diff --git a/src/python/test/test_cubical_complex.py b/src/python/test/test_cubical_complex.py
new file mode 100755
index 00000000..68f54fbe
--- /dev/null
+++ b/src/python/test/test_cubical_complex.py
@@ -0,0 +1,98 @@
+from gudhi import CubicalComplex
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_empty_constructor():
+ # Try to create an empty CubicalComplex
+ cub = CubicalComplex()
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+
+def test_non_existing_perseus_file_constructor():
+ # Try to open a non existing file
+ cub = CubicalComplex(perseus_file="pouetpouettralala.toubiloubabdou")
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+
+def test_dimension_or_perseus_file_constructor():
+ # Create test file
+ test_file = open("CubicalOneSphere.txt", "w")
+ test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n")
+ test_file.close()
+ # CubicalComplex can be constructed from dimensions and
+ # top_dimensional_cells OR from a Perseus-style file name.
+ cub = CubicalComplex(
+ dimensions=[3, 3],
+ top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9],
+ perseus_file="CubicalOneSphere.txt",
+ )
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+ cub = CubicalComplex(
+ top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9],
+ perseus_file="CubicalOneSphere.txt",
+ )
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+ cub = CubicalComplex(dimensions=[3, 3], perseus_file="CubicalOneSphere.txt")
+ assert cub.__is_defined() == False
+ assert cub.__is_persistence_defined() == False
+
+
+def test_dimension_simple_constructor():
+ cub = CubicalComplex(
+ dimensions=[3, 3], top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9]
+ )
+ assert cub.__is_defined() == True
+ assert cub.__is_persistence_defined() == False
+ assert cub.persistence() == [(0, (1.0, float("inf")))]
+ assert cub.__is_persistence_defined() == True
+ assert cub.betti_numbers() == [1, 0, 0]
+ assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0]
+
+
+def test_user_case_simple_constructor():
+ cub = CubicalComplex(
+ dimensions=[3, 3],
+ top_dimensional_cells=[float("inf"), 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
+ )
+ assert cub.__is_defined() == True
+ assert cub.__is_persistence_defined() == False
+ assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))]
+ assert cub.__is_persistence_defined() == True
+ other_cub = CubicalComplex(
+ dimensions=[3, 3],
+ top_dimensional_cells=[1000.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
+ )
+ assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))]
+
+
+def test_dimension_file_constructor():
+ # Create test file
+ test_file = open("CubicalOneSphere.txt", "w")
+ test_file.write("2\n3\n3\n0\n0\n0\n0\n100\n0\n0\n0\n0\n")
+ test_file.close()
+ cub = CubicalComplex(perseus_file="CubicalOneSphere.txt")
+ assert cub.__is_defined() == True
+ assert cub.__is_persistence_defined() == False
+ assert cub.persistence() == [(1, (0.0, 100.0)), (0, (0.0, float("inf")))]
+ assert cub.__is_persistence_defined() == True
+ assert cub.betti_numbers() == [1, 0, 0]
+ assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0]
diff --git a/src/python/test/test_euclidean_witness_complex.py b/src/python/test/test_euclidean_witness_complex.py
new file mode 100755
index 00000000..f5eae5fa
--- /dev/null
+++ b/src/python/test/test_euclidean_witness_complex.py
@@ -0,0 +1,95 @@
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_empty_euclidean_witness_complex():
+ euclidean_witness = gudhi.EuclideanWitnessComplex()
+ assert euclidean_witness.__is_defined() == False
+
+
+def test_witness_complex():
+ point_cloud = [
+ [1.0, 1.0],
+ [7.0, 0.0],
+ [4.0, 6.0],
+ [9.0, 6.0],
+ [0.0, 14.0],
+ [2.0, 19.0],
+ [9.0, 17.0],
+ ]
+ landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]]
+ euclidean_witness_complex = gudhi.EuclideanWitnessComplex(
+ landmarks=landmarks, witnesses=point_cloud
+ )
+ simplex_tree = euclidean_witness_complex.create_simplex_tree(max_alpha_square=4.1)
+
+ assert landmarks[0] == euclidean_witness_complex.get_point(0)
+ assert landmarks[1] == euclidean_witness_complex.get_point(1)
+ assert landmarks[2] == euclidean_witness_complex.get_point(2)
+
+ assert simplex_tree.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([0, 1], 0.0),
+ ([2], 0.0),
+ ([0, 2], 0.0),
+ ([1, 2], 0.0),
+ ([0, 1, 2], 0.0),
+ ]
+
+
+def test_empty_euclidean_strong_witness_complex():
+ euclidean_strong_witness = gudhi.EuclideanStrongWitnessComplex()
+ assert euclidean_strong_witness.__is_defined() == False
+
+
+def test_strong_witness_complex():
+ point_cloud = [
+ [1.0, 1.0],
+ [7.0, 0.0],
+ [4.0, 6.0],
+ [9.0, 6.0],
+ [0.0, 14.0],
+ [2.0, 19.0],
+ [9.0, 17.0],
+ ]
+ landmarks = [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0]]
+ euclidean_strong_witness_complex = gudhi.EuclideanStrongWitnessComplex(
+ landmarks=landmarks, witnesses=point_cloud
+ )
+ simplex_tree = euclidean_strong_witness_complex.create_simplex_tree(
+ max_alpha_square=14.9
+ )
+
+ assert landmarks[0] == euclidean_strong_witness_complex.get_point(0)
+ assert landmarks[1] == euclidean_strong_witness_complex.get_point(1)
+ assert landmarks[2] == euclidean_strong_witness_complex.get_point(2)
+
+ assert simplex_tree.get_filtration() == [([0], 0.0), ([1], 0.0), ([2], 0.0)]
+
+ simplex_tree = euclidean_strong_witness_complex.create_simplex_tree(
+ max_alpha_square=100.0
+ )
+
+ assert simplex_tree.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([2], 0.0),
+ ([1, 2], 15.0),
+ ([0, 2], 34.0),
+ ([0, 1], 37.0),
+ ([0, 1, 2], 37.0),
+ ]
diff --git a/src/python/test/test_reader_utils.py b/src/python/test/test_reader_utils.py
new file mode 100755
index 00000000..4c7b32c2
--- /dev/null
+++ b/src/python/test/test_reader_utils.py
@@ -0,0 +1,126 @@
+import gudhi
+import numpy as np
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2017 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2017 Inria"
+__license__ = "MIT"
+
+
+def test_non_existing_csv_file():
+ # Try to open a non existing file
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ csv_file="pouetpouettralala.toubiloubabdou"
+ )
+ assert matrix == []
+
+
+def test_full_square_distance_matrix_csv_file():
+ # Create test file
+ test_file = open("full_square_distance_matrix.csv", "w")
+ test_file.write("0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;")
+ test_file.close()
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ csv_file="full_square_distance_matrix.csv"
+ )
+ assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]]
+
+
+def test_lower_triangular_distance_matrix_csv_file():
+ # Create test file
+ test_file = open("lower_triangular_distance_matrix.csv", "w")
+ test_file.write("\n1,\n2,3,\n4,5,6,\n7,8,9,10,")
+ test_file.close()
+ matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ csv_file="lower_triangular_distance_matrix.csv", separator=","
+ )
+ assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]]
+
+
+def test_non_existing_persistence_file():
+ # Try to open a non existing file
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(
+ persistence_file="pouetpouettralala.toubiloubabdou"
+ )
+ assert persistence == []
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="pouetpouettralala.toubiloubabdou", only_this_dim=1
+ )
+ np.testing.assert_array_equal(persistence, [])
+
+
+def test_read_persistence_intervals_without_dimension():
+ # Create test file
+ test_file = open("persistence_intervals_without_dimension.pers", "w")
+ test_file.write(
+ "# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf"
+ )
+ test_file.close()
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_without_dimension.pers"
+ )
+ np.testing.assert_array_equal(
+ persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))]
+ )
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=0
+ )
+ np.testing.assert_array_equal(persistence, [])
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=1
+ )
+ np.testing.assert_array_equal(persistence, [])
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(
+ persistence_file="persistence_intervals_without_dimension.pers"
+ )
+ assert persistence == {
+ -1: [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))]
+ }
+
+
+def test_read_persistence_intervals_with_dimension():
+ # Create test file
+ test_file = open("persistence_intervals_with_dimension.pers", "w")
+ test_file.write(
+ "# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf"
+ )
+ test_file.close()
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_with_dimension.pers"
+ )
+ np.testing.assert_array_equal(
+ persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))]
+ )
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=0
+ )
+ np.testing.assert_array_equal(persistence, [(2.7, 3.7)])
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=1
+ )
+ np.testing.assert_array_equal(persistence, [(9.6, 14.0), (3.0, float("Inf"))])
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=2
+ )
+ np.testing.assert_array_equal(persistence, [])
+ persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=3
+ )
+ np.testing.assert_array_equal(persistence, [(34.2, 34.974)])
+ persistence = gudhi.read_persistence_intervals_grouped_by_dimension(
+ persistence_file="persistence_intervals_with_dimension.pers"
+ )
+ assert persistence == {
+ 0: [(2.7, 3.7)],
+ 1: [(9.6, 14.0), (3.0, float("Inf"))],
+ 3: [(34.2, 34.974)],
+ }
diff --git a/src/python/test/test_rips_complex.py b/src/python/test/test_rips_complex.py
new file mode 100755
index 00000000..d55ae22f
--- /dev/null
+++ b/src/python/test/test_rips_complex.py
@@ -0,0 +1,133 @@
+from gudhi import RipsComplex
+from math import sqrt
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_empty_rips():
+ rips_complex = RipsComplex()
+
+
+def test_rips_from_points():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ rips_complex = RipsComplex(points=point_list, max_edge_length=42)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 10
+ assert simplex_tree.num_vertices() == 4
+
+ assert simplex_tree.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([2], 0.0),
+ ([3], 0.0),
+ ([0, 1], 1.0),
+ ([0, 2], 1.0),
+ ([1, 3], 1.0),
+ ([2, 3], 1.0),
+ ([1, 2], 1.4142135623730951),
+ ([0, 3], 1.4142135623730951),
+ ]
+ assert simplex_tree.get_star([0]) == [
+ ([0], 0.0),
+ ([0, 1], 1.0),
+ ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951),
+ ]
+ assert simplex_tree.get_cofaces([0], 1) == [
+ ([0, 1], 1.0),
+ ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951),
+ ]
+
+
+def test_filtered_rips_from_points():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0)
+
+ simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
+
+
+def test_sparse_filtered_rips_from_points():
+ point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
+ filtered_rips = RipsComplex(points=point_list, max_edge_length=1.0, sparse=0.001)
+
+ simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
+
+
+def test_rips_from_distance_matrix():
+ distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]]
+ rips_complex = RipsComplex(distance_matrix=distance_matrix, max_edge_length=42)
+
+ simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 10
+ assert simplex_tree.num_vertices() == 4
+
+ assert simplex_tree.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([2], 0.0),
+ ([3], 0.0),
+ ([0, 1], 1.0),
+ ([0, 2], 1.0),
+ ([1, 3], 1.0),
+ ([2, 3], 1.0),
+ ([1, 2], 1.4142135623730951),
+ ([0, 3], 1.4142135623730951),
+ ]
+ assert simplex_tree.get_star([0]) == [
+ ([0], 0.0),
+ ([0, 1], 1.0),
+ ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951),
+ ]
+ assert simplex_tree.get_cofaces([0], 1) == [
+ ([0, 1], 1.0),
+ ([0, 2], 1.0),
+ ([0, 3], 1.4142135623730951),
+ ]
+
+
+def test_filtered_rips_from_distance_matrix():
+ distance_matrix = [[0], [1, 0], [1, sqrt(2), 0], [sqrt(2), 1, 1, 0]]
+ filtered_rips = RipsComplex(distance_matrix=distance_matrix, max_edge_length=1.0)
+
+ simplex_tree = filtered_rips.create_simplex_tree(max_dimension=1)
+
+ assert simplex_tree.__is_defined() == True
+ assert simplex_tree.__is_persistence_defined() == False
+
+ assert simplex_tree.num_simplices() == 8
+ assert simplex_tree.num_vertices() == 4
diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py
new file mode 100755
index 00000000..8d8971c1
--- /dev/null
+++ b/src/python/test/test_simplex_tree.py
@@ -0,0 +1,250 @@
+from gudhi import SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_insertion():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # insert test
+ assert st.insert([0, 1]) == True
+
+ assert st.dimension() == 1
+
+ assert st.insert([0, 1, 2], filtration=4.0) == True
+
+ assert st.dimension() == 2
+
+ assert st.num_simplices() == 7
+ assert st.num_vertices() == 3
+
+ # find test
+ assert st.find([0, 1, 2]) == True
+ assert st.find([0, 1]) == True
+ assert st.find([0, 2]) == True
+ assert st.find([0]) == True
+ assert st.find([1]) == True
+ assert st.find([2]) == True
+ assert st.find([3]) == False
+ assert st.find([0, 3]) == False
+ assert st.find([1, 3]) == False
+ assert st.find([2, 3]) == False
+
+ # filtration test
+ st.initialize_filtration()
+ assert st.filtration([0, 1, 2]) == 4.0
+ assert st.filtration([0, 2]) == 4.0
+ assert st.filtration([1, 2]) == 4.0
+ assert st.filtration([2]) == 4.0
+ assert st.filtration([0, 1]) == 0.0
+ assert st.filtration([0]) == 0.0
+ assert st.filtration([1]) == 0.0
+
+ # skeleton test
+ assert st.get_skeleton(2) == [
+ ([0, 1, 2], 4.0),
+ ([0, 1], 0.0),
+ ([0, 2], 4.0),
+ ([0], 0.0),
+ ([1, 2], 4.0),
+ ([1], 0.0),
+ ([2], 4.0),
+ ]
+ assert st.get_skeleton(1) == [
+ ([0, 1], 0.0),
+ ([0, 2], 4.0),
+ ([0], 0.0),
+ ([1, 2], 4.0),
+ ([1], 0.0),
+ ([2], 4.0),
+ ]
+ assert st.get_skeleton(0) == [([0], 0.0), ([1], 0.0), ([2], 4.0)]
+
+ # remove_maximal_simplex test
+ assert st.get_cofaces([0, 1, 2], 1) == []
+ st.remove_maximal_simplex([0, 1, 2])
+ assert st.get_skeleton(2) == [
+ ([0, 1], 0.0),
+ ([0, 2], 4.0),
+ ([0], 0.0),
+ ([1, 2], 4.0),
+ ([1], 0.0),
+ ([2], 4.0),
+ ]
+ assert st.find([0, 1, 2]) == False
+ assert st.find([0, 1]) == True
+ assert st.find([0, 2]) == True
+ assert st.find([0]) == True
+ assert st.find([1]) == True
+ assert st.find([2]) == True
+
+ st.initialize_filtration()
+ assert st.persistence(persistence_dim_max=True) == [
+ (1, (4.0, float("inf"))),
+ (0, (0.0, float("inf"))),
+ ]
+ assert st.__is_persistence_defined() == True
+
+ assert st.betti_numbers() == [1, 1]
+ assert st.persistent_betti_numbers(-0.1, 10000.0) == [0, 0]
+ assert st.persistent_betti_numbers(0.0, 10000.0) == [1, 0]
+ assert st.persistent_betti_numbers(3.9, 10000.0) == [1, 0]
+ assert st.persistent_betti_numbers(4.0, 10000.0) == [1, 1]
+ assert st.persistent_betti_numbers(9999.0, 10000.0) == [1, 1]
+
+
+def test_expansion():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # insert test
+ assert st.insert([3, 2], 0.1) == True
+ assert st.insert([2, 0], 0.2) == True
+ assert st.insert([1, 0], 0.3) == True
+ assert st.insert([3, 1], 0.4) == True
+ assert st.insert([2, 1], 0.5) == True
+ assert st.insert([6, 5], 0.6) == True
+ assert st.insert([4, 2], 0.7) == True
+ assert st.insert([3, 0], 0.8) == True
+ assert st.insert([6, 4], 0.9) == True
+ assert st.insert([6, 3], 1.0) == True
+
+ assert st.num_vertices() == 7
+ assert st.num_simplices() == 17
+ assert st.get_filtration() == [
+ ([2], 0.1),
+ ([3], 0.1),
+ ([2, 3], 0.1),
+ ([0], 0.2),
+ ([0, 2], 0.2),
+ ([1], 0.3),
+ ([0, 1], 0.3),
+ ([1, 3], 0.4),
+ ([1, 2], 0.5),
+ ([5], 0.6),
+ ([6], 0.6),
+ ([5, 6], 0.6),
+ ([4], 0.7),
+ ([2, 4], 0.7),
+ ([0, 3], 0.8),
+ ([4, 6], 0.9),
+ ([3, 6], 1.0),
+ ]
+
+ st.expansion(3)
+ assert st.num_vertices() == 7
+ assert st.num_simplices() == 22
+ st.initialize_filtration()
+
+ assert st.get_filtration() == [
+ ([2], 0.1),
+ ([3], 0.1),
+ ([2, 3], 0.1),
+ ([0], 0.2),
+ ([0, 2], 0.2),
+ ([1], 0.3),
+ ([0, 1], 0.3),
+ ([1, 3], 0.4),
+ ([1, 2], 0.5),
+ ([0, 1, 2], 0.5),
+ ([1, 2, 3], 0.5),
+ ([5], 0.6),
+ ([6], 0.6),
+ ([5, 6], 0.6),
+ ([4], 0.7),
+ ([2, 4], 0.7),
+ ([0, 3], 0.8),
+ ([0, 1, 3], 0.8),
+ ([0, 2, 3], 0.8),
+ ([0, 1, 2, 3], 0.8),
+ ([4, 6], 0.9),
+ ([3, 6], 1.0),
+ ]
+
+
+def test_automatic_dimension():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # insert test
+ assert st.insert([0, 1, 3], filtration=0.5) == True
+ assert st.insert([0, 1, 2], filtration=1.0) == True
+
+ assert st.num_vertices() == 4
+ assert st.num_simplices() == 11
+
+ assert st.dimension() == 2
+ assert st.upper_bound_dimension() == 2
+
+ assert st.prune_above_filtration(0.6) == True
+ assert st.dimension() == 2
+ assert st.upper_bound_dimension() == 2
+
+ st.assign_filtration([0, 1, 3], 0.7)
+ assert st.filtration([0, 1, 3]) == 0.7
+
+ st.remove_maximal_simplex([0, 1, 3])
+ assert st.upper_bound_dimension() == 2
+ assert st.dimension() == 1
+ assert st.upper_bound_dimension() == 1
+
+
+def test_make_filtration_non_decreasing():
+ st = SimplexTree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ # Inserted simplex:
+ # 1
+ # o
+ # /X\
+ # o---o---o---o
+ # 2 0 3\X/4
+ # o
+ # 5
+ assert st.insert([2, 1, 0], filtration=2.0) == True
+ assert st.insert([3, 0], filtration=2.0) == True
+ assert st.insert([3, 4, 5], filtration=2.0) == True
+
+ assert st.make_filtration_non_decreasing() == False
+
+ # Because of non decreasing property of simplex tree, { 0 } , { 1 } and
+ # { 0, 1 } are going to be set from value 2.0 to 1.0
+ st.insert([0, 1, 6, 7], filtration=1.0)
+
+ assert st.make_filtration_non_decreasing() == False
+
+ # Modify specific values to test make_filtration_non_decreasing
+ st.assign_filtration([0, 1, 6, 7], 0.8)
+ st.assign_filtration([0, 1, 6], 0.9)
+ st.assign_filtration([0, 6], 0.6)
+ st.assign_filtration([3, 4, 5], 1.2)
+ st.assign_filtration([3, 4], 1.1)
+ st.assign_filtration([4, 5], 1.99)
+
+ assert st.make_filtration_non_decreasing() == True
+
+ assert st.filtration([0, 1, 6, 7]) == 1.0
+ assert st.filtration([0, 1, 6]) == 1.0
+ assert st.filtration([0, 1]) == 1.0
+ assert st.filtration([0]) == 1.0
+ assert st.filtration([1]) == 1.0
+ assert st.filtration([3, 4, 5]) == 2.0
+ assert st.filtration([3, 4]) == 2.0
+ assert st.filtration([4, 5]) == 2.0
diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py
new file mode 100755
index 00000000..c816e203
--- /dev/null
+++ b/src/python/test/test_subsampling.py
@@ -0,0 +1,179 @@
+import gudhi
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_write_off_file_for_tests():
+ file = open("subsample.off", "w")
+ file.write("nOFF\n")
+ file.write("2 7 0 0\n")
+ file.write("1.0 1.0\n")
+ file.write("7.0 0.0\n")
+ file.write("4.0 6.0\n")
+ file.write("9.0 6.0\n")
+ file.write("0.0 14.0\n")
+ file.write("2.0 19.0\n")
+ file.write("9.0 17.0\n")
+ file.close()
+
+
+def test_simple_choose_n_farthest_points_with_a_starting_point():
+ point_set = [[0, 1], [0, 0], [1, 0], [1, 1]]
+ i = 0
+ for point in point_set:
+ # The iteration starts with the given starting point
+ sub_set = gudhi.choose_n_farthest_points(
+ points=point_set, nb_points=1, starting_point=i
+ )
+ assert sub_set[0] == point_set[i]
+ i = i + 1
+
+ # The iteration finds then the farthest
+ sub_set = gudhi.choose_n_farthest_points(
+ points=point_set, nb_points=2, starting_point=1
+ )
+ assert sub_set[1] == point_set[3]
+ sub_set = gudhi.choose_n_farthest_points(
+ points=point_set, nb_points=2, starting_point=3
+ )
+ assert sub_set[1] == point_set[1]
+ sub_set = gudhi.choose_n_farthest_points(
+ points=point_set, nb_points=2, starting_point=0
+ )
+ assert sub_set[1] == point_set[2]
+ sub_set = gudhi.choose_n_farthest_points(
+ points=point_set, nb_points=2, starting_point=2
+ )
+ assert sub_set[1] == point_set[0]
+
+ # Test the limits
+ assert (
+ gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=0) == []
+ )
+ assert (
+ gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=0) == []
+ )
+ assert (
+ gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=1) == []
+ )
+ assert (
+ gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=1) == []
+ )
+
+ # From off file test
+ for i in range(0, 7):
+ assert (
+ len(
+ gudhi.choose_n_farthest_points(
+ off_file="subsample.off", nb_points=i, starting_point=i
+ )
+ )
+ == i
+ )
+
+
+def test_simple_choose_n_farthest_points_randomed():
+ point_set = [[0, 1], [0, 0], [1, 0], [1, 1]]
+ # Test the limits
+ assert gudhi.choose_n_farthest_points(points=[], nb_points=0) == []
+ assert gudhi.choose_n_farthest_points(points=[], nb_points=1) == []
+ assert gudhi.choose_n_farthest_points(points=point_set, nb_points=0) == []
+
+ # Go furter than point set on purpose
+ for iter in range(1, 10):
+ sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=iter)
+ for sub in sub_set:
+ found = False
+ for point in point_set:
+ if point == sub:
+ found = True
+ # Check each sub set point is existing in the point set
+ assert found == True
+
+ # From off file test
+ for i in range(0, 7):
+ assert (
+ len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i))
+ == i
+ )
+
+
+def test_simple_pick_n_random_points():
+ point_set = [[0, 1], [0, 0], [1, 0], [1, 1]]
+ # Test the limits
+ assert gudhi.pick_n_random_points(points=[], nb_points=0) == []
+ assert gudhi.pick_n_random_points(points=[], nb_points=1) == []
+ assert gudhi.pick_n_random_points(points=point_set, nb_points=0) == []
+
+ # Go furter than point set on purpose
+ for iter in range(1, 10):
+ sub_set = gudhi.pick_n_random_points(points=point_set, nb_points=iter)
+ print(5)
+ for sub in sub_set:
+ found = False
+ for point in point_set:
+ if point == sub:
+ found = True
+ # Check each sub set point is existing in the point set
+ assert found == True
+
+ # From off file test
+ for i in range(0, 7):
+ assert (
+ len(gudhi.pick_n_random_points(off_file="subsample.off", nb_points=i)) == i
+ )
+
+
+def test_simple_sparsify_points():
+ point_set = [[0, 1], [0, 0], [1, 0], [1, 1]]
+ # Test the limits
+ # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 0.0) == []
+ # assert gudhi.sparsify_point_set(points = [], min_squared_dist = 10.0) == []
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=0.0) == point_set
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.0) == point_set
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.0) == [
+ [0, 1],
+ [1, 0],
+ ]
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.01) == [[0, 1]]
+
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0))
+ == 7
+ )
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=30.0))
+ == 5
+ )
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.0))
+ == 4
+ )
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=90.0))
+ == 3
+ )
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=100.0))
+ == 2
+ )
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.0))
+ == 2
+ )
+ assert (
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.01))
+ == 1
+ )
diff --git a/src/python/test/test_tangential_complex.py b/src/python/test/test_tangential_complex.py
new file mode 100755
index 00000000..0f828d8e
--- /dev/null
+++ b/src/python/test/test_tangential_complex.py
@@ -0,0 +1,55 @@
+from gudhi import TangentialComplex, SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_tangential():
+ point_list = [[0.0, 0.0], [1.0, 0.0], [0.0, 1.0], [1.0, 1.0]]
+ tc = TangentialComplex(intrisic_dim=1, points=point_list)
+ assert tc.__is_defined() == True
+ assert tc.num_vertices() == 4
+ assert tc.num_simplices() == 0
+ assert tc.num_inconsistent_simplices() == 0
+ assert tc.num_inconsistent_stars() == 0
+
+ tc.compute_tangential_complex()
+ assert tc.num_vertices() == 4
+ assert tc.num_simplices() == 4
+ assert tc.num_inconsistent_simplices() == 0
+ assert tc.num_inconsistent_stars() == 0
+
+ st = tc.create_simplex_tree()
+ assert st.__is_defined() == True
+ assert st.__is_persistence_defined() == False
+
+ assert st.num_simplices() == 6
+ assert st.num_vertices() == 4
+
+ assert st.get_filtration() == [
+ ([0], 0.0),
+ ([1], 0.0),
+ ([2], 0.0),
+ ([0, 2], 0.0),
+ ([3], 0.0),
+ ([1, 3], 0.0),
+ ]
+ assert st.get_cofaces([0], 1) == [([0, 2], 0.0)]
+
+ assert point_list[0] == tc.get_point(0)
+ assert point_list[1] == tc.get_point(1)
+ assert point_list[2] == tc.get_point(2)
+ assert point_list[3] == tc.get_point(3)
+ assert tc.get_point(4) == []
+ assert tc.get_point(125) == []
diff --git a/src/python/test/test_witness_complex.py b/src/python/test/test_witness_complex.py
new file mode 100755
index 00000000..36ced635
--- /dev/null
+++ b/src/python/test/test_witness_complex.py
@@ -0,0 +1,62 @@
+from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 Inria"
+__license__ = "MIT"
+
+
+def test_empty_witness_complex():
+ witness = WitnessComplex()
+ assert witness.__is_defined() == False
+
+
+def test_witness_complex():
+ nearest_landmark_table = [
+ [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]],
+ [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]],
+ [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]],
+ [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]],
+ [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]],
+ ]
+
+ witness_complex = WitnessComplex(nearest_landmark_table=nearest_landmark_table)
+ simplex_tree = witness_complex.create_simplex_tree(max_alpha_square=4.1)
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 31
+ simplex_tree = witness_complex.create_simplex_tree(
+ max_alpha_square=4.1, limit_dimension=2
+ )
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 25
+
+
+def test_strong_witness_complex():
+ nearest_landmark_table = [
+ [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]],
+ [[1, 0], [2, 1], [3, 2], [4, 3], [0, 4]],
+ [[2, 0], [3, 1], [4, 2], [0, 3], [1, 4]],
+ [[3, 0], [4, 1], [0, 2], [1, 3], [2, 4]],
+ [[4, 0], [0, 1], [1, 2], [2, 3], [3, 4]],
+ ]
+
+ strong_witness_complex = StrongWitnessComplex(
+ nearest_landmark_table=nearest_landmark_table
+ )
+ simplex_tree = strong_witness_complex.create_simplex_tree(max_alpha_square=4.1)
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 31
+ simplex_tree = strong_witness_complex.create_simplex_tree(
+ max_alpha_square=4.1, limit_dimension=2
+ )
+ assert simplex_tree.num_vertices() == 5
+ assert simplex_tree.num_simplices() == 25