summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGard Spreemann <gspreemann@gmail.com>2018-09-05 12:52:26 +0200
committerGard Spreemann <gspreemann@gmail.com>2018-09-05 12:52:26 +0200
commitef5c01b599c6a6b23b1f3e92736ec67a6e62b55f (patch)
tree593f9ca6e7b661645f27243619652953b11e8a4f
parentc524232f734de875d69e2f190f01a6c976024368 (diff)
GUDHI 2.3.0 as released by upstream in a tarball.upstream/2.3.0upstream/latest
-rw-r--r--CMakeGUDHIVersion.txt2
-rw-r--r--CMakeLists.txt2
-rw-r--r--Doxyfile54
-rw-r--r--GUDHIConfig.cmake.in7
-rw-r--r--cmake/modules/FindCython.cmake44
-rw-r--r--cmake/modules/GUDHI_doxygen_target.cmake7
-rw-r--r--cmake/modules/GUDHI_third_party_libraries.cmake32
-rw-r--r--cmake/modules/GUDHI_user_version_target.cmake171
-rw-r--r--cython/CMakeLists.txt228
-rw-r--r--cython/cython/nerve_gic.pyx401
-rwxr-xr-xcython/cython/persistence_graphical_tools.py377
-rw-r--r--cython/cython/rips_complex.pyx2
-rw-r--r--cython/cython/subsampling.pyx8
-rw-r--r--cython/doc/_templates/layout.html2
-rw-r--r--cython/doc/examples.rst4
-rw-r--r--cython/doc/index.rst5
-rw-r--r--cython/doc/installation.rst54
-rw-r--r--cython/doc/nerve_gic_complex_ref.rst10
-rw-r--r--cython/doc/nerve_gic_complex_sum.rst15
-rw-r--r--cython/doc/nerve_gic_complex_user.rst312
-rwxr-xr-xcython/example/coordinate_graph_induced_complex.py68
-rwxr-xr-xcython/example/functional_graph_induced_complex.py69
-rwxr-xr-xcython/example/nerve_of_a_covering.py70
-rwxr-xr-xcython/example/voronoi_graph_induced_complex.py65
-rw-r--r--cython/gudhi.pyx.in2
-rw-r--r--cython/include/Nerve_gic_interface.h61
-rw-r--r--cython/setup.py.in6
-rwxr-xr-xcython/test/test_cover_complex.py92
-rw-r--r--data/points/human.COPYRIGHT77
-rw-r--r--doc/common/header.html2
-rw-r--r--doc/common/installation.h24
-rw-r--r--example/Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp2
-rw-r--r--example/Cech_complex/cech_complex_step_by_step.cpp2
-rw-r--r--example/Nerve_GIC/CoordGIC.cpp4
-rw-r--r--example/Persistent_cohomology/persistence_from_file.cpp2
-rw-r--r--example/Persistent_cohomology/rips_multifield_persistence.cpp2
-rw-r--r--example/Persistent_cohomology/rips_persistence_step_by_step.cpp2
-rw-r--r--example/Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp2
-rw-r--r--example/Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp2
-rw-r--r--include/gudhi/Bottleneck.h4
-rw-r--r--include/gudhi/GIC.h12
-rw-r--r--include/gudhi/Persistent_cohomology.h5
-rw-r--r--include/gudhi/Simplex_tree.h5
-rw-r--r--include/gudhi/Tangential_complex.h1090
-rw-r--r--utilities/Alpha_complex/alpha_complex_3d_persistence.cpp2
-rw-r--r--utilities/Alpha_complex/alpha_complex_persistence.cpp2
-rw-r--r--utilities/Alpha_complex/exact_alpha_complex_3d_persistence.cpp2
-rw-r--r--utilities/Alpha_complex/periodic_alpha_complex_3d_persistence.cpp2
-rw-r--r--utilities/Alpha_complex/weighted_alpha_complex_3d_persistence.cpp2
-rw-r--r--utilities/Cech_complex/cech_persistence.cpp2
-rw-r--r--utilities/Rips_complex/rips_correlation_matrix_persistence.cpp2
-rw-r--r--utilities/Rips_complex/rips_distance_matrix_persistence.cpp2
-rw-r--r--utilities/Rips_complex/rips_persistence.cpp2
-rw-r--r--utilities/Rips_complex/sparse_rips_persistence.cpp2
-rw-r--r--utilities/Witness_complex/strong_witness_persistence.cpp2
-rw-r--r--utilities/Witness_complex/weak_witness_persistence.cpp2
56 files changed, 2301 insertions, 1130 deletions
diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt
index 6811d7e1..ebaddd47 100644
--- a/CMakeGUDHIVersion.txt
+++ b/CMakeGUDHIVersion.txt
@@ -1,5 +1,5 @@
set (GUDHI_MAJOR_VERSION 2)
-set (GUDHI_MINOR_VERSION 2)
+set (GUDHI_MINOR_VERSION 3)
set (GUDHI_PATCH_VERSION 0)
set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION})
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c60346d5..6c446104 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -71,7 +71,7 @@ export(PACKAGE GUDHI)
message("++ make install will install ${PROJECT_NAME} in the following directory : ${CMAKE_INSTALL_PREFIX}")
# Create the GUDHIConfig.cmake and GUDHIConfigVersion files
-set(CONF_INCLUDE_DIRS "${CMAKE_INSTALL_PREFIX}/include")
+set(CONF_INCLUDE_DIRS "${CMAKE_SOURCE_DIR}/include;${CMAKE_INSTALL_PREFIX}/include")
configure_file(GUDHIConfig.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake" @ONLY)
configure_file(GUDHIConfigVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake" @ONLY)
diff --git a/Doxyfile b/Doxyfile
index 020667e9..a16431ad 100644
--- a/Doxyfile
+++ b/Doxyfile
@@ -38,7 +38,7 @@ PROJECT_NAME = "GUDHI"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "2.2.0"
+PROJECT_NUMBER = "2.3.0"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -780,12 +780,12 @@ RECURSIVE = YES
# Note that relative paths are relative to the directory from which doxygen is
# run.
-EXCLUDE = data/ \
- example/ \
- GudhUI/ \
- cmake/ \
- src/cython/ \
- include/gudhi_patches/
+EXCLUDE = data/ \
+ example/ \
+ GudhUI/ \
+ cmake/ \
+ src/cython/ \
+ include/gudhi_patches/
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
# directories that are symbolic links (a Unix file system feature) are excluded
@@ -818,9 +818,9 @@ EXCLUDE_SYMBOLS =
# that contain example code fragments that are included (see the \include
# command).
-EXAMPLE_PATH = biblio/ \
- example/ \
- utilities/
+EXAMPLE_PATH = biblio/ \
+ example/ \
+ utilities/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -840,22 +840,24 @@ EXAMPLE_RECURSIVE = NO
# that contain images that are to be included in the documentation (see the
# \image command).
-IMAGE_PATH = doc/Skeleton_blocker/ \
- doc/Alpha_complex/ \
- doc/common/ \
- doc/Cech_complex/ \
- doc/Contraction/ \
- doc/Simplex_tree/ \
- doc/Persistent_cohomology/ \
- doc/Witness_complex/ \
- doc/Bitmap_cubical_complex/ \
- doc/Rips_complex/ \
- doc/Subsampling/ \
- doc/Spatial_searching/ \
- doc/Tangential_complex/ \
- doc/Bottleneck_distance/ \
- doc/Nerve_GIC/ \
- doc/Persistence_representations/
+IMAGE_PATH = doc/common/ \
+ doc/Alpha_complex/ \
+ doc/Bitmap_cubical_complex/ \
+ doc/Bottleneck_distance/ \
+ doc/Contraction/ \
+ doc/Cech_complex/ \
+ doc/Hasse_complex/ \
+ doc/Persistence_representations/ \
+ doc/Persistent_cohomology/ \
+ doc/Rips_complex/ \
+ doc/Simplex_tree/ \
+ doc/Skeleton_blocker/ \
+ doc/Spatial_searching/ \
+ doc/Subsampling/ \
+ doc/Tangential_complex/ \
+ doc/Witness_complex/ \
+ doc/Nerve_GIC/ \
+
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program
diff --git a/GUDHIConfig.cmake.in b/GUDHIConfig.cmake.in
index 02b540dc..8d82f235 100644
--- a/GUDHIConfig.cmake.in
+++ b/GUDHIConfig.cmake.in
@@ -1,7 +1,12 @@
# - Config file for the GUDHI package
# It defines the following variables
# GUDHI_INCLUDE_DIRS - include directories for GUDHI
+#
+# Order is :
+# 1. user defined GUDHI_INCLUDE_DIRS
+# 2. ${CMAKE_SOURCE_DIR}/include => Where the 'cmake' has been done
+# 3. ${CMAKE_INSTALL_PREFIX}/include => Where the 'make install' has been performed
# Compute paths
-set(GUDHI_INCLUDE_DIRS "@CONF_INCLUDE_DIRS@")
+set(GUDHI_INCLUDE_DIRS "${GUDHI_INCLUDE_DIRS};@CONF_INCLUDE_DIRS@")
diff --git a/cmake/modules/FindCython.cmake b/cmake/modules/FindCython.cmake
deleted file mode 100644
index 04aed1f8..00000000
--- a/cmake/modules/FindCython.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-# Find the Cython compiler.
-#
-# This code sets the following variables:
-#
-# CYTHON_EXECUTABLE
-#
-# See also UseCython.cmake
-
-#=============================================================================
-# Copyright 2011 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Use the Cython executable that lives next to the Python executable
-# if it is a local installation.
-find_package( PythonInterp )
-if( PYTHONINTERP_FOUND )
- get_filename_component( _python_path ${PYTHON_EXECUTABLE} PATH )
- find_program( CYTHON_EXECUTABLE
- NAMES cython cython.bat cython3
- HINTS ${_python_path}
- )
-else()
- find_program( CYTHON_EXECUTABLE
- NAMES cython cython.bat cython3
- )
-endif()
-
-
-include( FindPackageHandleStandardArgs )
-FIND_PACKAGE_HANDLE_STANDARD_ARGS( Cython REQUIRED_VARS CYTHON_EXECUTABLE )
-
-mark_as_advanced( CYTHON_EXECUTABLE )
diff --git a/cmake/modules/GUDHI_doxygen_target.cmake b/cmake/modules/GUDHI_doxygen_target.cmake
index f3e2d9f5..9e10e566 100644
--- a/cmake/modules/GUDHI_doxygen_target.cmake
+++ b/cmake/modules/GUDHI_doxygen_target.cmake
@@ -3,14 +3,17 @@ find_package(Doxygen)
if(DOXYGEN_FOUND)
# configure_file(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
- #starting from cmake 3.9 the usage of DOXYGEN_EXECUTABLE is deprecated
+ # starting from cmake 3.9 the usage of DOXYGEN_EXECUTABLE is deprecated
if(TARGET Doxygen::doxygen)
get_property(DOXYGEN_EXECUTABLE TARGET Doxygen::doxygen PROPERTY IMPORTED_LOCATION)
endif()
add_custom_target(doxygen ${DOXYGEN_EXECUTABLE} ${GUDHI_USER_VERSION_DIR}/Doxyfile
WORKING_DIRECTORY ${GUDHI_USER_VERSION_DIR}
- DEPENDS ${GUDHI_USER_VERSION_DIR}/Doxyfile ${GUDHI_DOXYGEN_DEPENDENCY}
COMMENT "Generating API documentation with Doxygen in ${GUDHI_USER_VERSION_DIR}/doc/html/" VERBATIM)
+ if(TARGET user_version)
+ # In dev version, doxygen target depends on user_version target. Not existing in user version
+ add_dependencies(doxygen user_version)
+ endif()
endif(DOXYGEN_FOUND)
diff --git a/cmake/modules/GUDHI_third_party_libraries.cmake b/cmake/modules/GUDHI_third_party_libraries.cmake
index 7433f2f3..f03c2177 100644
--- a/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -8,11 +8,9 @@ endif(NOT Boost_FOUND)
find_package(GMP)
if(GMP_FOUND)
- message(STATUS "GMP_LIBRARIES = ${GMP_LIBRARIES}")
INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR})
find_package(GMPXX)
if(GMPXX_FOUND)
- message(STATUS "GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
INCLUDE_DIRECTORIES(${GMPXX_INCLUDE_DIR})
endif()
endif()
@@ -79,7 +77,6 @@ endif(WITH_GUDHI_USE_TBB)
set(CGAL_WITH_EIGEN3_VERSION 0.0.0)
find_package(Eigen3 3.1.0)
if (EIGEN3_FOUND)
- message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
include( ${EIGEN3_USE_FILE} )
set(CGAL_WITH_EIGEN3_VERSION ${CGAL_VERSION})
endif (EIGEN3_FOUND)
@@ -119,7 +116,34 @@ message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
# Find the correct Python interpreter.
# Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
-find_package(Cython)
+find_package( PythonInterp )
+
+# find_python_module tries to import module in Python interpreter and to retrieve its version number
+# returns ${PYTHON_MODULE_NAME_UP}_VERSION and ${PYTHON_MODULE_NAME_UP}_FOUND
+function( find_python_module PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}; print(${PYTHON_MODULE_NAME}.__version__)"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ OUTPUT_VARIABLE PYTHON_MODULE_VERSION
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove carriage return
+ string(STRIP ${PYTHON_MODULE_VERSION} PYTHON_MODULE_VERSION)
+ set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+endfunction( find_python_module )
+
+if( PYTHONINTERP_FOUND )
+ find_python_module("cython")
+ find_python_module("pytest")
+ find_python_module("matplotlib")
+ find_python_module("numpy")
+endif()
if(NOT GUDHI_CYTHON_PATH)
message(FATAL_ERROR "ERROR: GUDHI_CYTHON_PATH is not valid.")
diff --git a/cmake/modules/GUDHI_user_version_target.cmake b/cmake/modules/GUDHI_user_version_target.cmake
index 1205966a..d43a6fa6 100644
--- a/cmake/modules/GUDHI_user_version_target.cmake
+++ b/cmake/modules/GUDHI_user_version_target.cmake
@@ -1,94 +1,95 @@
-# Some functionnalities requires CMake 2.8.11 minimum
-if (NOT CMAKE_VERSION VERSION_LESS 2.8.11)
+# Definition of the custom target user_version
+add_custom_target(user_version)
- # Definition of the custom target user_version
- add_custom_target(user_version)
-
- if(DEFINED USER_VERSION_DIR)
- # set the GUDHI_USER_VERSION_DIR with USER_VERSION_DIR defined by the user
- set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${USER_VERSION_DIR})
- else()
- # set the GUDHI_USER_VERSION_DIR with timestamp and Gudhi version number
- string(TIMESTAMP DATE_AND_TIME "%Y-%m-%d-%H-%M-%S")
- set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${DATE_AND_TIME}_GUDHI_${GUDHI_VERSION})
- endif()
+if(DEFINED USER_VERSION_DIR)
+ # set the GUDHI_USER_VERSION_DIR with USER_VERSION_DIR defined by the user
+ set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${USER_VERSION_DIR})
+else()
+ # set the GUDHI_USER_VERSION_DIR with timestamp and Gudhi version number
+ string(TIMESTAMP DATE_AND_TIME "%Y-%m-%d-%H-%M-%S")
+ set(GUDHI_USER_VERSION_DIR ${CMAKE_CURRENT_BINARY_DIR}/${DATE_AND_TIME}_GUDHI_${GUDHI_VERSION})
+endif()
- set(GUDHI_DOXYGEN_DEPENDENCY user_version)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ make_directory ${GUDHI_USER_VERSION_DIR}
+ COMMENT "user_version creation in ${GUDHI_USER_VERSION_DIR}")
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- make_directory ${GUDHI_USER_VERSION_DIR}
- COMMENT "user_version creation in ${GUDHI_USER_VERSION_DIR}")
-
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/Conventions.txt ${GUDHI_USER_VERSION_DIR}/Conventions.txt)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/README ${GUDHI_USER_VERSION_DIR}/README)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/COPYING ${GUDHI_USER_VERSION_DIR}/COPYING)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/CMakeLists.txt ${GUDHI_USER_VERSION_DIR}/CMakeLists.txt)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/Doxyfile ${GUDHI_USER_VERSION_DIR}/Doxyfile)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfigVersion.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfigVersion.cmake.in)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfig.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfig.cmake.in)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/CMakeGUDHIVersion.txt ${GUDHI_USER_VERSION_DIR}/CMakeGUDHIVersion.txt)
-
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/cython ${GUDHI_USER_VERSION_DIR}/cython)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/cmake ${GUDHI_USER_VERSION_DIR}/cmake)
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
-
- set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
- if (CGAL_VERSION VERSION_LESS 4.11.0)
- set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/Miniball;include/gudhi_patches")
- else ()
- set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/Miniball")
- endif ()
+foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
+ set(GUDHI_DOXYGEN_IMAGE_PATH "${GUDHI_DOXYGEN_IMAGE_PATH} doc/${GUDHI_MODULE}/ \\ \n")
+endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
- foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
- foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
- # Find files
- file(GLOB GUDHI_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_DIRECTORY}/*)
+# Generate setup.py file to cythonize Gudhi - This file must be named setup.py by convention
+configure_file(${CMAKE_SOURCE_DIR}/src/Doxyfile.in "${CMAKE_CURRENT_BINARY_DIR}/src/Doxyfile" @ONLY)
- foreach(GUDHI_FILE ${GUDHI_FILES})
- get_filename_component(GUDHI_FILE_NAME ${GUDHI_FILE} NAME)
- # GUDHI_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
- endif()
- endforeach()
- endforeach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_CURRENT_BINARY_DIR}/src/Doxyfile ${GUDHI_USER_VERSION_DIR}/Doxyfile)
- foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
- # include files
- file(GLOB GUDHI_INCLUDE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_INCLUDE_DIRECTORY}/*)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/Conventions.txt ${GUDHI_USER_VERSION_DIR}/Conventions.txt)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/README ${GUDHI_USER_VERSION_DIR}/README)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/COPYING ${GUDHI_USER_VERSION_DIR}/COPYING)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/src/CMakeLists.txt ${GUDHI_USER_VERSION_DIR}/CMakeLists.txt)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfigVersion.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfigVersion.cmake.in)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/src/GUDHIConfig.cmake.in ${GUDHI_USER_VERSION_DIR}/GUDHIConfig.cmake.in)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${CMAKE_SOURCE_DIR}/CMakeGUDHIVersion.txt ${GUDHI_USER_VERSION_DIR}/CMakeGUDHIVersion.txt)
- foreach(GUDHI_INCLUDE_FILE ${GUDHI_INCLUDE_FILES})
- get_filename_component(GUDHI_INCLUDE_FILE_NAME ${GUDHI_INCLUDE_FILE} NAME)
- # GUDHI_INCLUDE_FILE can be a file or a directory
- if(IS_DIRECTORY ${GUDHI_INCLUDE_FILE})
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
- else()
- add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
- endif()
- endforeach()
- endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/cython ${GUDHI_USER_VERSION_DIR}/cython)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/data ${GUDHI_USER_VERSION_DIR}/data)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/cmake ${GUDHI_USER_VERSION_DIR}/cmake)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
- endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
+set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
+if (CGAL_VERSION VERSION_LESS 4.11.0)
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi;include/gudhi_patches")
+else ()
+ set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi")
+endif ()
-endif()
+foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
+ foreach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+ # Find files
+ file(GLOB GUDHI_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_DIRECTORY}/*)
+
+ foreach(GUDHI_FILE ${GUDHI_FILES})
+ get_filename_component(GUDHI_FILE_NAME ${GUDHI_FILE} NAME)
+ # GUDHI_FILE can be a file or a directory
+ if(IS_DIRECTORY ${GUDHI_FILE})
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
+ else()
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${GUDHI_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_DIRECTORY}/${GUDHI_MODULE}/${GUDHI_FILE_NAME})
+ endif()
+ endforeach()
+ endforeach(GUDHI_DIRECTORY ${GUDHI_DIRECTORIES})
+
+ foreach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+ # include files
+ file(GLOB GUDHI_INCLUDE_FILES ${CMAKE_SOURCE_DIR}/src/${GUDHI_MODULE}/${GUDHI_INCLUDE_DIRECTORY}/*)
+
+ foreach(GUDHI_INCLUDE_FILE ${GUDHI_INCLUDE_FILES})
+ get_filename_component(GUDHI_INCLUDE_FILE_NAME ${GUDHI_INCLUDE_FILE} NAME)
+ # GUDHI_INCLUDE_FILE can be a file or a directory
+ if(IS_DIRECTORY ${GUDHI_INCLUDE_FILE})
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
+ else()
+ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy ${GUDHI_INCLUDE_FILE} ${GUDHI_USER_VERSION_DIR}/${GUDHI_INCLUDE_DIRECTORY}/${GUDHI_INCLUDE_FILE_NAME})
+ endif()
+ endforeach()
+ endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
+
+endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST}) \ No newline at end of file
diff --git a/cython/CMakeLists.txt b/cython/CMakeLists.txt
index 17d440ee..09ea28f1 100644
--- a/cython/CMakeLists.txt
+++ b/cython/CMakeLists.txt
@@ -16,17 +16,51 @@ endfunction( add_gudhi_cython_lib )
# THE_TEST is the python test file name (without .py extension) containing tests functions
function( add_gudhi_py_test THE_TEST )
- # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
- # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
- add_test(NAME ${THE_TEST}_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+ if(PYTEST_FOUND)
+ # use ${PYTHON_EXECUTABLE} -B, otherwise a __pycache__ directory is created in sources by python
+ # use py.test no cache provider, otherwise a .cache file is created in sources by py.test
+ add_test(NAME ${THE_TEST}_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${PYTHON_EXECUTABLE} -B -m pytest -p no:cacheprovider ${CMAKE_CURRENT_SOURCE_DIR}/test/${THE_TEST}.py)
+ endif()
endfunction( add_gudhi_py_test )
+# Set gudhi.__debug_info__
+# WARNING : to be done before gudhi.pyx.in configure_file
+function( add_gudhi_debug_info DEBUG_INFO )
+ set(GUDHI_CYTHON_DEBUG_INFO "${GUDHI_CYTHON_DEBUG_INFO} \"${DEBUG_INFO}\\n\" \\\n" PARENT_SCOPE)
+endfunction( add_gudhi_debug_info )
+
if(CYTHON_FOUND)
- message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_EXECUTABLE} - Sphinx is ${SPHINX_PATH}")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}off_reader;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}simplex_tree;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}rips_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}cubical_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}periodic_cubical_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}reader_utils;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}witness_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}strong_witness_complex;")
+
+ add_gudhi_debug_info("Python version ${PYTHON_VERSION_STRING}")
+ add_gudhi_debug_info("Cython version ${CYTHON_VERSION}")
+ if(PYTEST_FOUND)
+ add_gudhi_debug_info("Pytest version ${PYTEST_VERSION}")
+ endif()
+ if(MATPLOTLIB_FOUND)
+ add_gudhi_debug_info("Matplotlib version ${MATPLOTLIB_VERSION}")
+ endif()
+ if(NUMPY_FOUND)
+ add_gudhi_debug_info("Numpy version ${NUMPY_VERSION}")
+ endif()
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MODULES}persistence_graphical_tools;")
+ endif()
+ message("++ ${PYTHON_EXECUTABLE} v.${PYTHON_VERSION_STRING} - Cython is ${CYTHON_VERSION} - Sphinx is ${SPHINX_PATH}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_SYSTEM_NO_DEPRECATED', ")
@@ -49,49 +83,86 @@ if(CYTHON_FOUND)
endif()
if (EIGEN3_FOUND)
+ add_gudhi_debug_info("Eigen3 version ${EIGEN3_VERSION}")
# No problem, even if no CGAL found
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
endif (EIGEN3_FOUND)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
set(GUDHI_CYTHON_BOTTLENECK_DISTANCE "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/bottleneck_distance.pyx'")
- endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}bottleneck_distance;")
+ set(GUDHI_CYTHON_NERVE_GIC "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/nerve_gic.pyx'")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}nerve_gic;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}bottleneck_distance;")
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}nerve_gic;")
+ endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
set(GUDHI_CYTHON_SUBSAMPLING "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/subsampling.pyx'")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}subsampling;")
set(GUDHI_CYTHON_TANGENTIAL_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/tangential_complex.pyx'")
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}tangential_complex;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}subsampling;")
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}tangential_complex;")
+ endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
set(GUDHI_CYTHON_ALPHA_COMPLEX "include '${CMAKE_CURRENT_SOURCE_DIR}/cython/alpha_complex.pyx'")
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}alpha_complex;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}alpha_complex;")
+ endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
set(GUDHI_CYTHON_EUCLIDEAN_WITNESS_COMPLEX
"include '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_witness_complex.pyx'\ninclude '${CMAKE_CURRENT_SOURCE_DIR}/cython/euclidean_strong_witness_complex.pyx'\n")
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_witness_complex;")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}euclidean_strong_witness_complex;")
+ else()
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}euclidean_witness_complex;")
+ set(GUDHI_CYTHON_MISSING_MODULES "${GUDHI_CYTHON_MISSING_MODULES}euclidean_strong_witness_complex;")
+ endif ()
+
+ add_gudhi_debug_info("Installed modules are: ${GUDHI_CYTHON_MODULES}")
+ if(GUDHI_CYTHON_MISSING_MODULES)
+ add_gudhi_debug_info("Missing modules are: ${GUDHI_CYTHON_MISSING_MODULES}")
+ endif()
if(CGAL_FOUND)
can_cgal_use_cxx11_thread_local()
if (NOT CGAL_CAN_USE_CXX11_THREAD_LOCAL_RESULT)
- add_gudhi_cython_lib(${Boost_THREAD_LIBRARY})
+ if(CMAKE_BUILD_TYPE MATCHES Debug)
+ add_gudhi_cython_lib("${Boost_THREAD_LIBRARY_DEBUG}")
+ else()
+ add_gudhi_cython_lib("${Boost_THREAD_LIBRARY_RELEASE}")
+ endif()
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
endif()
# Add CGAL compilation args
if(CGAL_HEADER_ONLY)
+ add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_HEADER_ONLY', ")
else(CGAL_HEADER_ONLY)
- add_gudhi_cython_lib(${CGAL_LIBRARY})
+ add_gudhi_debug_info("CGAL version ${CGAL_VERSION}")
+ add_gudhi_cython_lib("${CGAL_LIBRARY}")
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${CGAL_LIBRARIES_DIR}', ")
# If CGAL is not header only, CGAL library may link with boost system,
- add_gudhi_cython_lib(${Boost_SYSTEM_LIBRARY})
+ if(CMAKE_BUILD_TYPE MATCHES Debug)
+ add_gudhi_cython_lib("${Boost_SYSTEM_LIBRARY_DEBUG}")
+ else()
+ add_gudhi_cython_lib("${Boost_SYSTEM_LIBRARY_RELEASE}")
+ endif()
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${Boost_LIBRARY_DIRS}', ")
endif(CGAL_HEADER_ONLY)
# GMP and GMPXX are not required, but if present, CGAL will link with them.
if(GMP_FOUND)
+ add_gudhi_debug_info("GMP_LIBRARIES = ${GMP_LIBRARIES}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMP', ")
- add_gudhi_cython_lib(${GMP_LIBRARIES})
+ add_gudhi_cython_lib("${GMP_LIBRARIES}")
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMP_LIBRARIES_DIR}', ")
if(GMPXX_FOUND)
+ add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
- add_gudhi_cython_lib(${GMPXX_LIBRARIES})
+ add_gudhi_cython_lib("${GMPXX_LIBRARIES}")
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ")
endif(GMPXX_FOUND)
endif(GMP_FOUND)
@@ -111,9 +182,15 @@ if(CYTHON_FOUND)
set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${CMAKE_SOURCE_DIR}/${GUDHI_CYTHON_PATH}/include', ")
if (TBB_FOUND AND WITH_GUDHI_USE_TBB)
+ add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used")
set(GUDHI_CYTHON_EXTRA_COMPILE_ARGS "${GUDHI_CYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
- add_gudhi_cython_lib(${TBB_RELEASE_LIBRARY})
- add_gudhi_cython_lib(${TBB_MALLOC_RELEASE_LIBRARY})
+ if(CMAKE_BUILD_TYPE MATCHES Debug)
+ add_gudhi_cython_lib("${TBB_DEBUG_LIBRARY}")
+ add_gudhi_cython_lib("${TBB_MALLOC_DEBUG_LIBRARY}")
+ else()
+ add_gudhi_cython_lib("${TBB_RELEASE_LIBRARY}")
+ add_gudhi_cython_lib("${TBB_MALLOC_RELEASE_LIBRARY}")
+ endif()
set(GUDHI_CYTHON_LIBRARY_DIRS "${GUDHI_CYTHON_LIBRARY_DIRS}'${TBB_LIBRARY_DIRS}', ")
set(GUDHI_CYTHON_INCLUDE_DIRS "${GUDHI_CYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ")
endif()
@@ -151,40 +228,73 @@ if(CYTHON_FOUND)
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
-f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
- # Tangential
- add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ # Tangential
+ add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
+ --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
+
+ add_gudhi_py_test(test_tangential_complex)
+
+ # Witness complex AND Subsampling
+ add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+
+ add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ endif()
+
+ # Subsampling
+ add_gudhi_py_test(test_subsampling)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ # Bottleneck
+ add_test(NAME bottleneck_basic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
- --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
- add_gudhi_py_test(test_tangential_complex)
+ add_gudhi_py_test(test_bottleneck_distance)
- # Witness complex AND Subsampling
- add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ # Cover complex
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/human.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ add_test(NAME cover_complex_nerve_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py"
+ -f human.off -c 2 -r 10 -g 0.3)
- add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ add_test(NAME cover_complex_coordinate_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py"
+ -f human.off -c 0 -v)
- # Subsampling
- add_gudhi_py_test(test_subsampling)
+ add_test(NAME cover_complex_functional_gic_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py"
+ -o lucky_cat.off
+ -f lucky_cat_PCA1 -v)
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
- if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- # Bottleneck
- add_test(NAME bottleneck_basic_example_py_test
+ add_test(NAME cover_complex_voronoi_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py"
+ -f human.off -n 700 -v)
- add_gudhi_py_test(test_bottleneck_distance)
+ add_gudhi_py_test(test_cover_complex)
endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
@@ -194,11 +304,13 @@ if(CYTHON_FOUND)
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
- add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
+ endif()
add_gudhi_py_test(test_alpha_complex)
@@ -217,26 +329,30 @@ if(CYTHON_FOUND)
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
--no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt)
- add_test(NAME random_cubical_complex_persistence_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
- 10 10 10)
+ if(NUMPY_FOUND)
+ add_test(NAME random_cubical_complex_persistence_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
+ 10 10 10)
+ endif()
add_gudhi_py_test(test_cubical_complex)
# Rips
- add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
-
- add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
+ if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
+ add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
+
+ add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
+ endif()
add_test(NAME rips_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
@@ -265,7 +381,7 @@ if(CYTHON_FOUND)
add_gudhi_py_test(test_reader_utils)
# Documentation generation is available through sphinx - requires all modules
- if(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ if(SPHINX_PATH AND MATPLOTLIB_FOUND AND NUMPY_FOUND AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
# User warning - Sphinx is a static pages generator, and configured to work fine with user_version
# Images and biblio warnings because not found on developper version
@@ -285,5 +401,5 @@ if(CYTHON_FOUND)
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
- endif(SPHINX_PATH AND NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ endif()
endif(CYTHON_FOUND)
diff --git a/cython/cython/nerve_gic.pyx b/cython/cython/nerve_gic.pyx
new file mode 100644
index 00000000..30a14d3b
--- /dev/null
+++ b/cython/cython/nerve_gic.pyx
@@ -0,0 +1,401 @@
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+cdef extern from "Nerve_gic_interface.h" namespace "Gudhi":
+ cdef cppclass Nerve_gic_interface "Gudhi::cover_complex::Nerve_gic_interface":
+ Nerve_gic_interface()
+ double compute_confidence_level_from_distance(double distance)
+ double compute_distance_from_confidence_level(double alpha)
+ void compute_distribution(int N)
+ double compute_p_value()
+ void compute_PD()
+ void find_simplices()
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree)
+ bool read_point_cloud(string off_file_name)
+ double set_automatic_resolution()
+ void set_color_from_coordinate(int k)
+ void set_color_from_file(string color_file_name)
+ void set_color_from_vector(vector[double] color)
+ void set_cover_from_file(string cover_file_name)
+ void set_cover_from_function()
+ void set_cover_from_Euclidean_Voronoi(int m)
+ void set_function_from_coordinate(int k)
+ void set_function_from_file(string func_file_name)
+ void set_function_from_range(vector[double] function)
+ void set_gain(double g)
+ double set_graph_from_automatic_euclidean_rips(int N)
+ void set_graph_from_file(string graph_file_name)
+ void set_graph_from_OFF()
+ void set_graph_from_euclidean_rips(double threshold)
+ void set_mask(int nodemask)
+ void set_resolution_with_interval_length(double resolution)
+ void set_resolution_with_interval_number(int resolution)
+ void set_subsampling(double constant, double power)
+ void set_type(string type)
+ void set_verbose(bool verbose)
+ vector[int] subpopulation(int c)
+ void write_info()
+ void plot_DOT()
+ void plot_OFF()
+
+# CoverComplex python interface
+cdef class CoverComplex:
+ """Cover complex data structure.
+
+ The data structure is a simplicial complex, representing a Graph Induced
+ simplicial Complex (GIC) or a Nerve, and whose simplices are computed with
+ a cover C of a point cloud P, which often comes from the preimages of
+ intervals covering the image of a function f defined on P. These intervals
+ are parameterized by their resolution (either their length or their number)
+ and their gain (percentage of overlap). To compute a GIC, one also needs a
+ graph G built on top of P, whose cliques with vertices belonging to
+ different elements of C correspond to the simplices of the GIC.
+ """
+
+ cdef Nerve_gic_interface * thisptr
+
+ # Fake constructor that does nothing but documenting the constructor
+ def __init__(self):
+ """CoverComplex constructor.
+ """
+
+ # The real cython constructor
+ def __cinit__(self):
+ self.thisptr = new Nerve_gic_interface()
+
+ def __dealloc__(self):
+ if self.thisptr != NULL:
+ del self.thisptr
+
+ def __is_defined(self):
+ """Returns true if CoverComplex pointer is not NULL.
+ """
+ return self.thisptr != NULL
+
+ def compute_confidence_level_from_distance(self, distance):
+ """Computes the confidence level of a specific bottleneck distance
+ threshold.
+
+ :param distance: Bottleneck distance.
+ :type distance: double
+ :rtype: double
+ :returns: Confidence level.
+ """
+ return self.thisptr.compute_confidence_level_from_distance(distance)
+
+ def compute_distance_from_confidence_level(self, alpha):
+ """Computes the bottleneck distance threshold corresponding to a
+ specific confidence level.
+
+ :param alpha: Confidence level.
+ :type alpha: double
+ :rtype: double
+ :returns: Bottleneck distance.
+ """
+ return self.thisptr.compute_distance_from_confidence_level(alpha)
+
+ def compute_distribution(self, N=100):
+ """Computes bootstrapped distances distribution.
+
+ :param N: Loop number (default value is 100).
+ :type alpha: int
+ """
+ self.thisptr.compute_distribution(N)
+
+ def compute_p_value(self):
+ """Computes the p-value, i.e. the opposite of the confidence level of
+ the largest bottleneck distance preserving the points in the
+ persistence diagram of the output simplicial complex.
+
+ :rtype: double
+ :returns: p-value.
+ """
+ return self.thisptr.compute_p_value()
+
+ def compute_PD(self):
+ """Computes the extended persistence diagram of the complex.
+ """
+ self.thisptr.compute_PD()
+
+ def create_simplex_tree(self):
+ """
+ :returns: A simplex tree created from the Cover complex.
+ :rtype: SimplexTree
+ """
+ simplex_tree = SimplexTree()
+ self.thisptr.create_simplex_tree(simplex_tree.thisptr)
+ return simplex_tree
+
+ def find_simplices(self):
+ """Computes the simplices of the simplicial complex.
+ """
+ self.thisptr.find_simplices()
+
+ def read_point_cloud(self, off_file):
+ """Reads and stores the input point cloud.
+
+ :param off_file: Name of the input .OFF or .nOFF file.
+ :type off_file: string
+ :rtype: bool
+ :returns: Read file status.
+ """
+ if os.path.isfile(off_file):
+ return self.thisptr.read_point_cloud(str.encode(off_file))
+ else:
+ print("file " + off_file + " not found.")
+ return False
+
+ def set_automatic_resolution(self):
+ """Computes the optimal length of intervals (i.e. the smallest interval
+ length avoiding discretization artifacts—see :cite:`Carriere17c`) for a
+ functional cover.
+
+ :rtype: double
+ :returns: reso interval length used to compute the cover.
+ """
+ return self.thisptr.set_automatic_resolution()
+
+ def set_color_from_coordinate(self, k=0):
+ """Computes the function used to color the nodes of the simplicial
+ complex from the k-th coordinate.
+
+ :param k: Coordinate to use (start at 0). Default value is 0.
+ :type k: int
+ """
+ return self.thisptr.set_color_from_coordinate(k)
+
+ def set_color_from_file(self, color_file_name):
+ """Computes the function used to color the nodes of the simplicial
+ complex from a file containing the function values.
+
+ :param color_file_name: Name of the input color file.
+ :type color_file_name: string
+ """
+ if os.path.isfile(color_file_name):
+ self.thisptr.set_color_from_file(str.encode(color_file_name))
+ else:
+ print("file " + color_file_name + " not found.")
+
+ def set_color_from_vector(self, color):
+ """Computes the function used to color the nodes of the simplicial
+ complex from a vector stored in memory.
+
+ :param color: Input vector of values.
+ :type color: vector[double]
+ """
+ self.thisptr.set_color_from_vector(color)
+
+ def set_cover_from_file(self, cover_file_name):
+ """Creates the cover C from a file containing the cover elements of
+ each point (the order has to be the same as in the input file!).
+
+ :param cover_file_name: Name of the input cover file.
+ :type cover_file_name: string
+ """
+ if os.path.isfile(cover_file_name):
+ self.thisptr.set_cover_from_file(str.encode(cover_file_name))
+ else:
+ print("file " + cover_file_name + " not found.")
+
+ def set_cover_from_function(self):
+ """Creates a cover C from the preimages of the function f.
+ """
+ self.thisptr.set_cover_from_function()
+
+ def set_cover_from_Voronoi(self, m=100):
+ """Creates the cover C from the Voronoï cells of a subsampling of the
+ point cloud.
+
+ :param m: Number of points in the subsample. Default value is 100.
+ :type m: int
+ """
+ self.thisptr.set_cover_from_Euclidean_Voronoi(m)
+
+ def set_function_from_coordinate(self, k):
+ """Creates the function f from the k-th coordinate of the point cloud.
+
+ :param k: Coordinate to use (start at 0).
+ :type k: int
+ """
+ self.thisptr.set_function_from_coordinate(k)
+
+ def set_function_from_file(self, func_file_name):
+ """Creates the function f from a file containing the function values.
+
+ :param func_file_name: Name of the input function file.
+ :type func_file_name: string
+ """
+ if os.path.isfile(func_file_name):
+ self.thisptr.set_function_from_file(str.encode(func_file_name))
+ else:
+ print("file " + func_file_name + " not found.")
+
+ def set_function_from_range(self, function):
+ """Creates the function f from a vector stored in memory.
+
+ :param function: Input vector of values.
+ :type function: vector[double]
+ """
+ self.thisptr.set_function_from_range(function)
+
+ def set_gain(self, g = 0.3):
+ """Sets a gain from a value stored in memory.
+
+ :param g: Gain (default value is 0.3).
+ :type g: double
+ """
+ self.thisptr.set_gain(g)
+
+ def set_graph_from_automatic_rips(self, N=100):
+ """Creates a graph G from a Rips complex whose threshold value is
+ automatically tuned with subsampling—see.
+
+ :param N: Number of subsampling iteration (the default reasonable value
+ is 100, but there is no guarantee on how to choose it).
+ :type N: int
+ :rtype: double
+ :returns: Delta threshold used for computing the Rips complex.
+ """
+ return self.thisptr.set_graph_from_automatic_euclidean_rips(N)
+
+ def set_graph_from_file(self, graph_file_name):
+ """Creates a graph G from a file containing the edges.
+
+ :param graph_file_name: Name of the input graph file. The graph file
+ contains one edge per line, each edge being represented by the IDs of
+ its two nodes.
+ :type graph_file_name: string
+ """
+ if os.path.isfile(graph_file_name):
+ self.thisptr.set_graph_from_file(str.encode(graph_file_name))
+ else:
+ print("file " + graph_file_name + " not found.")
+
+ def set_graph_from_OFF(self):
+ """Creates a graph G from the triangulation given by the input OFF
+ file.
+ """
+ self.thisptr.set_graph_from_OFF()
+
+ def set_graph_from_rips(self, threshold):
+ """Creates a graph G from a Rips complex.
+
+ :param threshold: Threshold value for the Rips complex.
+ :type threshold: double
+ """
+ self.thisptr.set_graph_from_euclidean_rips(threshold)
+
+ def set_mask(self, nodemask):
+ """Sets the mask, which is a threshold integer such that nodes in the
+ complex that contain a number of data points which is less than or
+ equal to this threshold are not displayed.
+
+ :param nodemask: Threshold.
+ :type nodemask: int
+ """
+ self.thisptr.set_mask(nodemask)
+
+ def set_resolution_with_interval_length(self, resolution):
+ """Sets a length of intervals from a value stored in memory.
+
+ :param resolution: Length of intervals.
+ :type resolution: double
+ """
+ self.thisptr.set_resolution_with_interval_length(resolution)
+
+ def set_resolution_with_interval_number(self, resolution):
+ """Sets a number of intervals from a value stored in memory.
+
+ :param resolution: Number of intervals.
+ :type resolution: int
+ """
+ self.thisptr.set_resolution_with_interval_number(resolution)
+
+ def set_subsampling(self, constant, power):
+ """Sets the constants used to subsample the data set. These constants
+ are explained in :cite:`Carriere17c`.
+
+ :param constant: Constant.
+ :type constant: double
+ :param power: Power.
+ :type resolution: double
+ """
+ self.thisptr.set_subsampling(constant, power)
+
+ def set_type(self, type):
+ """Specifies whether the type of the output simplicial complex.
+
+ :param type: either "GIC" or "Nerve".
+ :type type: string
+ """
+ self.thisptr.set_type(str.encode(type))
+
+ def set_verbose(self, verbose):
+ """Specifies whether the program should display information or not.
+
+ :param verbose: true = display info, false = do not display info.
+ :type verbose: boolean
+ """
+ self.thisptr.set_verbose(verbose)
+
+ def subpopulation(self, c):
+ """Returns the data subset corresponding to a specific node of the
+ created complex.
+
+ :param c: ID of the node.
+ :type c: int
+ :rtype: vector[int]
+ :returns: Vector of IDs of data points.
+ """
+ return self.thisptr.subpopulation(c)
+
+ def write_info(self):
+ """Creates a .txt file called SC.txt describing the 1-skeleton, which can
+ then be plotted with e.g. KeplerMapper.
+ """
+ return self.thisptr.write_info()
+
+ def plot_dot(self):
+ """Creates a .dot file called SC.dot for neato (part of the graphviz
+ package) once the simplicial complex is computed to get a visualization of
+ its 1-skeleton in a .pdf file.
+ """
+ return self.thisptr.plot_DOT()
+
+ def plot_off(self):
+ """Creates a .off file called SC.off for 3D visualization, which contains
+ the 2-skeleton of the GIC. This function assumes that the cover has been
+ computed with Voronoi. If data points are in 1D or 2D, the remaining
+ coordinates of the points embedded in 3D are set to 0.
+ """
+ return self.thisptr.plot_OFF()
diff --git a/cython/cython/persistence_graphical_tools.py b/cython/cython/persistence_graphical_tools.py
index 216ab8d6..314bd6db 100755
--- a/cython/cython/persistence_graphical_tools.py
+++ b/cython/cython/persistence_graphical_tools.py
@@ -1,8 +1,3 @@
-import matplotlib.pyplot as plt
-import matplotlib.patches as mpatches
-import numpy as np
-import os
-
"""This file is part of the Gudhi Library. The Gudhi library
(Geometric Understanding in Higher Dimensions) is a generic C++
library for computational topology.
@@ -29,187 +24,197 @@ __author__ = "Vincent Rouvreau, Bertrand Michel"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
-def __min_birth_max_death(persistence, band=0.):
- """This function returns (min_birth, max_death) from the persistence.
-
- :param persistence: The persistence to plot.
- :type persistence: list of tuples(dimension, tuple(birth, death)).
- :param band: band
- :type band: float.
- :returns: (float, float) -- (min_birth, max_death).
- """
- # Look for minimum birth date and maximum death date for plot optimisation
- max_death = 0
- min_birth = persistence[0][1][0]
- for interval in reversed(persistence):
- if float(interval[1][1]) != float('inf'):
- if float(interval[1][1]) > max_death:
- max_death = float(interval[1][1])
- if float(interval[1][0]) > max_death:
- max_death = float(interval[1][0])
- if float(interval[1][0]) < min_birth:
- min_birth = float(interval[1][0])
- if band > 0.:
- max_death += band
- return (min_birth, max_death)
+try:
+ import matplotlib.pyplot as plt
+ import matplotlib.patches as mpatches
+ import numpy as np
+ import os
+
+ def __min_birth_max_death(persistence, band=0.):
+ """This function returns (min_birth, max_death) from the persistence.
+
+ :param persistence: The persistence to plot.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param band: band
+ :type band: float.
+ :returns: (float, float) -- (min_birth, max_death).
+ """
+ # Look for minimum birth date and maximum death date for plot optimisation
+ max_death = 0
+ min_birth = persistence[0][1][0]
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ if float(interval[1][1]) > max_death:
+ max_death = float(interval[1][1])
+ if float(interval[1][0]) > max_death:
+ max_death = float(interval[1][0])
+ if float(interval[1][0]) < min_birth:
+ min_birth = float(interval[1][0])
+ if band > 0.:
+ max_death += band
+ return (min_birth, max_death)
-"""
-Only 13 colors for the palette
-"""
-palette = ['#ff0000', '#00ff00', '#0000ff', '#00ffff', '#ff00ff', '#ffff00',
- '#000000', '#880000', '#008800', '#000088', '#888800', '#880088',
- '#008888']
-
-def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6,
- max_barcodes=1000, inf_delta=0.1, legend=False):
- """This function plots the persistence bar code from persistence values list
- or from a :doc:`persistence file <fileformats>`.
-
- :param persistence: Persistence values list.
- :type persistence: list of tuples(dimension, tuple(birth, death)).
- :param persistence_file: A :doc:`persistence file <fileformats>` style name
- (reset persistence if both are set).
- :type persistence_file: string
- :param alpha: barcode transparency value (0.0 transparent through 1.0 opaque - default is 0.6).
- :type alpha: float.
- :param max_barcodes: number of maximal barcodes to be displayed.
- Set it to 0 to see all, Default value is 1000.
- (persistence will be sorted by life time if max_barcodes is set)
- :type max_barcodes: int.
- :param inf_delta: Infinity is placed at ((max_death - min_birth) x inf_delta).
- A reasonable value is between 0.05 and 0.5 - default is 0.1.
- :type inf_delta: float.
- :returns: A matplotlib object containing horizontal bar plot of persistence
- (launch `show()` method on it to display it).
"""
- if persistence_file is not '':
- if os.path.isfile(persistence_file):
- # Reset persistence
- persistence = []
- diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
- for key in diag.keys():
- for persistence_interval in diag[key]:
- persistence.append((key, persistence_interval))
- else:
- print("file " + persistence_file + " not found.")
- return None
-
- if max_barcodes > 0 and max_barcodes < len(persistence):
- # Sort by life time, then takes only the max_plots elements
- persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_barcodes]
-
- persistence = sorted(persistence, key=lambda birth: birth[1][0])
-
- (min_birth, max_death) = __min_birth_max_death(persistence)
- ind = 0
- delta = ((max_death - min_birth) * inf_delta)
- # Replace infinity values with max_death + delta for bar code to be more
- # readable
- infinity = max_death + delta
- axis_start = min_birth - delta
- # Draw horizontal bars in loop
- for interval in reversed(persistence):
- if float(interval[1][1]) != float('inf'):
- # Finite death case
- plt.barh(ind, (interval[1][1] - interval[1][0]), height=0.8,
- left = interval[1][0], alpha=alpha,
- color = palette[interval[0]],
- linewidth=0)
- else:
- # Infinite death case for diagram to be nicer
- plt.barh(ind, (infinity - interval[1][0]), height=0.8,
- left = interval[1][0], alpha=alpha,
- color = palette[interval[0]],
- linewidth=0)
- ind = ind + 1
-
- if legend:
- dimensions = list(set(item[0] for item in persistence))
- plt.legend(handles=[mpatches.Patch(color=palette[dim],
- label=str(dim)) for dim in dimensions],
- loc='lower right')
- plt.title('Persistence barcode')
- # Ends plot on infinity value and starts a little bit before min_birth
- plt.axis([axis_start, infinity, 0, ind])
- return plt
-
-def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6,
- band=0., max_plots=1000, inf_delta=0.1, legend=False):
- """This function plots the persistence diagram from persistence values list
- or from a :doc:`persistence file <fileformats>`.
-
- :param persistence: Persistence values list.
- :type persistence: list of tuples(dimension, tuple(birth, death)).
- :param persistence_file: A :doc:`persistence file <fileformats>` style name
- (reset persistence if both are set).
- :type persistence_file: string
- :param alpha: plot transparency value (0.0 transparent through 1.0 opaque - default is 0.6).
- :type alpha: float.
- :param band: band (not displayed if :math:`\leq` 0. - default is 0.)
- :type band: float.
- :param max_plots: number of maximal plots to be displayed
- Set it to 0 to see all, Default value is 1000.
- (persistence will be sorted by life time if max_plots is set)
- :type max_plots: int.
- :param inf_delta: Infinity is placed at ((max_death - min_birth) x inf_delta).
- A reasonable value is between 0.05 and 0.5 - default is 0.1.
- :type inf_delta: float.
- :returns: A matplotlib object containing diagram plot of persistence
- (launch `show()` method on it to display it).
+ Only 13 colors for the palette
"""
- if persistence_file is not '':
- if os.path.isfile(persistence_file):
- # Reset persistence
- persistence = []
- diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
- for key in diag.keys():
- for persistence_interval in diag[key]:
- persistence.append((key, persistence_interval))
- else:
- print("file " + persistence_file + " not found.")
- return None
-
- if max_plots > 0 and max_plots < len(persistence):
- # Sort by life time, then takes only the max_plots elements
- persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_plots]
-
- (min_birth, max_death) = __min_birth_max_death(persistence, band)
- ind = 0
- delta = ((max_death - min_birth) * inf_delta)
- # Replace infinity values with max_death + delta for diagram to be more
- # readable
- infinity = max_death + delta
- axis_start = min_birth - delta
-
- # line display of equation : birth = death
- x = np.linspace(axis_start, infinity, 1000)
- # infinity line and text
- plt.plot(x, x, color='k', linewidth=1.0)
- plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
- plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
- # bootstrap band
- if band > 0.:
- plt.fill_between(x, x, x+band, alpha=alpha, facecolor='red')
-
- # Draw points in loop
- for interval in reversed(persistence):
- if float(interval[1][1]) != float('inf'):
- # Finite death case
- plt.scatter(interval[1][0], interval[1][1], alpha=alpha,
- color = palette[interval[0]])
- else:
- # Infinite death case for diagram to be nicer
- plt.scatter(interval[1][0], infinity, alpha=alpha,
- color = palette[interval[0]])
- ind = ind + 1
-
- if legend:
- dimensions = list(set(item[0] for item in persistence))
- plt.legend(handles=[mpatches.Patch(color=palette[dim], label=str(dim)) for dim in dimensions])
-
- plt.title('Persistence diagram')
- plt.xlabel('Birth')
- plt.ylabel('Death')
- # Ends plot on infinity value and starts a little bit before min_birth
- plt.axis([axis_start, infinity, axis_start, infinity + delta])
- return plt
+ palette = ['#ff0000', '#00ff00', '#0000ff', '#00ffff', '#ff00ff', '#ffff00',
+ '#000000', '#880000', '#008800', '#000088', '#888800', '#880088',
+ '#008888']
+
+ def plot_persistence_barcode(persistence=[], persistence_file='', alpha=0.6,
+ max_barcodes=1000, inf_delta=0.1, legend=False):
+ """This function plots the persistence bar code from persistence values list
+ or from a :doc:`persistence file <fileformats>`.
+
+ :param persistence: Persistence values list.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>` style name
+ (reset persistence if both are set).
+ :type persistence_file: string
+ :param alpha: barcode transparency value (0.0 transparent through 1.0 opaque - default is 0.6).
+ :type alpha: float.
+ :param max_barcodes: number of maximal barcodes to be displayed.
+ Set it to 0 to see all, Default value is 1000.
+ (persistence will be sorted by life time if max_barcodes is set)
+ :type max_barcodes: int.
+ :param inf_delta: Infinity is placed at ((max_death - min_birth) x inf_delta).
+ A reasonable value is between 0.05 and 0.5 - default is 0.1.
+ :type inf_delta: float.
+ :returns: A matplotlib object containing horizontal bar plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_barcodes > 0 and max_barcodes < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_barcodes]
+
+ persistence = sorted(persistence, key=lambda birth: birth[1][0])
+
+ (min_birth, max_death) = __min_birth_max_death(persistence)
+ ind = 0
+ delta = ((max_death - min_birth) * inf_delta)
+ # Replace infinity values with max_death + delta for bar code to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+ # Draw horizontal bars in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ # Finite death case
+ plt.barh(ind, (interval[1][1] - interval[1][0]), height=0.8,
+ left = interval[1][0], alpha=alpha,
+ color = palette[interval[0]],
+ linewidth=0)
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.barh(ind, (infinity - interval[1][0]), height=0.8,
+ left = interval[1][0], alpha=alpha,
+ color = palette[interval[0]],
+ linewidth=0)
+ ind = ind + 1
+
+ if legend:
+ dimensions = list(set(item[0] for item in persistence))
+ plt.legend(handles=[mpatches.Patch(color=palette[dim],
+ label=str(dim)) for dim in dimensions],
+ loc='lower right')
+ plt.title('Persistence barcode')
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, 0, ind])
+ return plt
+
+ def plot_persistence_diagram(persistence=[], persistence_file='', alpha=0.6,
+ band=0., max_plots=1000, inf_delta=0.1, legend=False):
+ """This function plots the persistence diagram from persistence values list
+ or from a :doc:`persistence file <fileformats>`.
+
+ :param persistence: Persistence values list.
+ :type persistence: list of tuples(dimension, tuple(birth, death)).
+ :param persistence_file: A :doc:`persistence file <fileformats>` style name
+ (reset persistence if both are set).
+ :type persistence_file: string
+ :param alpha: plot transparency value (0.0 transparent through 1.0 opaque - default is 0.6).
+ :type alpha: float.
+ :param band: band (not displayed if :math:`\leq` 0. - default is 0.)
+ :type band: float.
+ :param max_plots: number of maximal plots to be displayed
+ Set it to 0 to see all, Default value is 1000.
+ (persistence will be sorted by life time if max_plots is set)
+ :type max_plots: int.
+ :param inf_delta: Infinity is placed at ((max_death - min_birth) x inf_delta).
+ A reasonable value is between 0.05 and 0.5 - default is 0.1.
+ :type inf_delta: float.
+ :returns: A matplotlib object containing diagram plot of persistence
+ (launch `show()` method on it to display it).
+ """
+ if persistence_file is not '':
+ if os.path.isfile(persistence_file):
+ # Reset persistence
+ persistence = []
+ diag = read_persistence_intervals_grouped_by_dimension(persistence_file=persistence_file)
+ for key in diag.keys():
+ for persistence_interval in diag[key]:
+ persistence.append((key, persistence_interval))
+ else:
+ print("file " + persistence_file + " not found.")
+ return None
+
+ if max_plots > 0 and max_plots < len(persistence):
+ # Sort by life time, then takes only the max_plots elements
+ persistence = sorted(persistence, key=lambda life_time: life_time[1][1]-life_time[1][0], reverse=True)[:max_plots]
+
+ (min_birth, max_death) = __min_birth_max_death(persistence, band)
+ ind = 0
+ delta = ((max_death - min_birth) * inf_delta)
+ # Replace infinity values with max_death + delta for diagram to be more
+ # readable
+ infinity = max_death + delta
+ axis_start = min_birth - delta
+
+ # line display of equation : birth = death
+ x = np.linspace(axis_start, infinity, 1000)
+ # infinity line and text
+ plt.plot(x, x, color='k', linewidth=1.0)
+ plt.plot(x, [infinity] * len(x), linewidth=1.0, color='k', alpha=alpha)
+ plt.text(axis_start, infinity, r'$\infty$', color='k', alpha=alpha)
+ # bootstrap band
+ if band > 0.:
+ plt.fill_between(x, x, x+band, alpha=alpha, facecolor='red')
+
+ # Draw points in loop
+ for interval in reversed(persistence):
+ if float(interval[1][1]) != float('inf'):
+ # Finite death case
+ plt.scatter(interval[1][0], interval[1][1], alpha=alpha,
+ color = palette[interval[0]])
+ else:
+ # Infinite death case for diagram to be nicer
+ plt.scatter(interval[1][0], infinity, alpha=alpha,
+ color = palette[interval[0]])
+ ind = ind + 1
+
+ if legend:
+ dimensions = list(set(item[0] for item in persistence))
+ plt.legend(handles=[mpatches.Patch(color=palette[dim], label=str(dim)) for dim in dimensions])
+
+ plt.title('Persistence diagram')
+ plt.xlabel('Birth')
+ plt.ylabel('Death')
+ # Ends plot on infinity value and starts a little bit before min_birth
+ plt.axis([axis_start, infinity, axis_start, infinity + delta])
+ return plt
+
+except ImportError:
+ # Continue in case of import error, functions won't be available
+ pass
diff --git a/cython/cython/rips_complex.pyx b/cython/cython/rips_complex.pyx
index 59c16bff..30ca4443 100644
--- a/cython/cython/rips_complex.pyx
+++ b/cython/cython/rips_complex.pyx
@@ -51,7 +51,7 @@ cdef class RipsComplex:
"""RipsComplex constructor.
:param max_edge_length: Rips value.
- :type max_edge_length: int
+ :type max_edge_length: float
:param points: A list of points in d-Dimension.
:type points: list of list of double
diff --git a/cython/cython/subsampling.pyx b/cython/cython/subsampling.pyx
index ac09b7a3..e9d61a37 100644
--- a/cython/cython/subsampling.pyx
+++ b/cython/cython/subsampling.pyx
@@ -112,7 +112,8 @@ def pick_n_random_points(points=None, off_file='', nb_points=0):
return subsampling_n_random_points(points, nb_points)
def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
- """Subsample a point set by picking random vertices.
+ """Outputs a subset of the input points so that the squared distance
+ between any two points is greater than or equal to min_squared_dist.
:param points: The input point set.
:type points: vector[vector[double]].
@@ -122,8 +123,9 @@ def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
:param off_file: An OFF file style name.
:type off_file: string
- :param min_squared_dist: Number of points of the subsample.
- :type min_squared_dist: unsigned.
+ :param min_squared_dist: Minimum squared distance separating the output \
+ points.
+ :type min_squared_dist: float.
:returns: The subsample point set.
:rtype: vector[vector[double]]
"""
diff --git a/cython/doc/_templates/layout.html b/cython/doc/_templates/layout.html
index 1161ed8e..bc0e9658 100644
--- a/cython/doc/_templates/layout.html
+++ b/cython/doc/_templates/layout.html
@@ -107,7 +107,7 @@
{%- macro css() %}
<!-- GUDHI website css for header BEGIN -->
-<link rel="stylesheet" type="text/css" href="http://gudhi.gforge.inria.fr/assets/css/styles_feeling_responsive.css" />
+<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
<!-- GUDHI website css for header END -->
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
<link rel="stylesheet" href="{{ pathto('_static/pygments.css', 1) }}" type="text/css" />
diff --git a/cython/doc/examples.rst b/cython/doc/examples.rst
index d42f5a92..1f02f8a2 100644
--- a/cython/doc/examples.rst
+++ b/cython/doc/examples.rst
@@ -23,3 +23,7 @@ Examples
* :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
* :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
* :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
+ * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>`
+ * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>`
+ * :download:`voronoi_graph_induced_complex.py <../example/voronoi_graph_induced_complex.py>`
+ * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>`
diff --git a/cython/doc/index.rst b/cython/doc/index.rst
index 4e444fb0..15cbe267 100644
--- a/cython/doc/index.rst
+++ b/cython/doc/index.rst
@@ -36,6 +36,11 @@ Alpha complex
.. include:: alpha_complex_sum.inc
+Cover complexes
+===============
+
+.. include:: nerve_gic_complex_sum.rst
+
Cubical complex
===============
diff --git a/cython/doc/installation.rst b/cython/doc/installation.rst
index 43ff85c5..43576ec9 100644
--- a/cython/doc/installation.rst
+++ b/cython/doc/installation.rst
@@ -47,9 +47,61 @@ following command in a terminal:
export PYTHONPATH='$PYTHONPATH:/path-to-gudhi/build/cython'
ctest -R py_test
-If tests fail, please try to :code:`import gudhi` and check the errors.
+Debugging issues
+================
+
+If tests fail, please check your PYTHONPATH and try to :code:`import gudhi`
+and check the errors.
The problem can come from a third-party library bad link or installation.
+If :code:`import gudhi` succeeds, please have a look to debug informations:
+
+.. code-block:: python
+
+ import gudhi
+ print(gudhi.__debug_info__)
+
+You shall have something like:
+
+.. code-block:: none
+
+ Python version 2.7.15
+ Cython version 0.26.1
+ Eigen3 version 3.1.1
+ Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
+ persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;alpha_complex;
+ euclidean_witness_complex;euclidean_strong_witness_complex;
+ Missing modules are: bottleneck_distance;nerve_gic;subsampling;tangential_complex;persistence_graphical_tools;
+ CGAL version 4.7.1000
+ GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so
+ GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
+ TBB version 9107 found and used
+
+Here, you can see that bottleneck_distance, nerve_gic, subsampling and
+tangential_complex are missing because of the CGAL version.
+persistence_graphical_tools is not available as numpy and matplotlib are not
+available.
+Unitary tests cannot be run as pytest is missing.
+
+A complete configuration would be :
+
+.. code-block:: none
+
+ Python version 3.6.5
+ Cython version 0.28.2
+ Pytest version 3.3.2
+ Matplotlib version 2.2.2
+ Numpy version 1.14.5
+ Eigen3 version 3.3.4
+ Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
+ persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;persistence_graphical_tools;
+ bottleneck_distance;nerve_gic;subsampling;tangential_complex;alpha_complex;euclidean_witness_complex;
+ euclidean_strong_witness_complex;
+ CGAL header only version 4.11.0
+ GMP_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmp.so
+ GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
+ TBB version 9107 found and used
+
Documentation
=============
diff --git a/cython/doc/nerve_gic_complex_ref.rst b/cython/doc/nerve_gic_complex_ref.rst
new file mode 100644
index 00000000..e24e01fc
--- /dev/null
+++ b/cython/doc/nerve_gic_complex_ref.rst
@@ -0,0 +1,10 @@
+================================
+Cover complexes reference manual
+================================
+
+.. autoclass:: gudhi.CoverComplex
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+ .. automethod:: gudhi.CoverComplex.__init__
diff --git a/cython/doc/nerve_gic_complex_sum.rst b/cython/doc/nerve_gic_complex_sum.rst
new file mode 100644
index 00000000..72782c7a
--- /dev/null
+++ b/cython/doc/nerve_gic_complex_sum.rst
@@ -0,0 +1,15 @@
+================================================================= =================================== ===================================
+:Author: Mathieu Carrière :Introduced in: GUDHI 2.1.0 :Copyright: GPL v3
+:Requires: CGAL :math:`\geq` 4.8.1
+================================================================= =================================== ===================================
+
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| .. figure:: | Nerves and Graph Induced Complexes are cover complexes, i.e. |
+| ../../doc/Nerve_GIC/gicvisu.jpg | simplicial complexes that provably contain topological information |
+| :alt: Graph Induced Complex of a point cloud. | about the input data. They can be computed with a cover of the data, |
+| :figclass: align-center | that comes i.e. from the preimage of a family of intervals covering |
+| | the image of a scalar-valued function defined on the data. |
+| Graph Induced Complex of a point cloud. | |
++----------------------------------------------------------------+------------------------------------------------------------------------+
+| :doc:`nerve_gic_complex_user` | :doc:`nerve_gic_complex_ref` |
++----------------------------------------------------------------+------------------------------------------------------------------------+
diff --git a/cython/doc/nerve_gic_complex_user.rst b/cython/doc/nerve_gic_complex_user.rst
new file mode 100644
index 00000000..d774827e
--- /dev/null
+++ b/cython/doc/nerve_gic_complex_user.rst
@@ -0,0 +1,312 @@
+Cover complexes user manual
+===========================
+Definition
+----------
+
+.. include:: nerve_gic_complex_sum.rst
+
+Visualizations of the simplicial complexes can be done with either
+neato (from `graphviz <http://www.graphviz.org/>`_),
+`geomview <http://www.geomview.org/>`_,
+`KeplerMapper <https://github.com/MLWave/kepler-mapper>`_.
+Input point clouds are assumed to be
+`OFF files <http://www.geomview.org/docs/html/OFF.html>`_.
+
+Covers
+------
+
+Nerves and Graph Induced Complexes require a cover C of the input point cloud P,
+that is a set of subsets of P whose union is P itself.
+Very often, this cover is obtained from the preimage of a family of intervals covering
+the image of some scalar-valued function f defined on P. This family is parameterized
+by its resolution, which can be either the number or the length of the intervals,
+and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values).
+
+Nerves
+------
+
+Nerve definition
+^^^^^^^^^^^^^^^^
+
+Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover
+is the simplicial complex that has one k-simplex per k-fold intersection of cover elements.
+See also `Wikipedia <https://en.wikipedia.org/wiki/Nerve_of_a_covering>`_.
+
+.. figure::
+ ../../doc/Nerve_GIC/nerve.png
+ :figclass: align-center
+ :alt: Nerve of a double torus
+
+ Nerve of a double torus
+
+Example
+^^^^^^^
+
+This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off).
+The cover C comes from the preimages of intervals (10 intervals with gain 0.3)
+covering the height function (coordinate 2),
+which are then refined into their connected components using the triangulation of the .OFF file.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+ nerve_complex.set_verbose(True)
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/human.off')):
+ nerve_complex.set_type('Nerve')
+ nerve_complex.set_color_from_coordinate(2)
+ nerve_complex.set_function_from_coordinate(2)
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_resolution_with_interval_number(10)
+ nerve_complex.set_gain(0.3)
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.write_info()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ result_str = 'Nerve is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
+
+the program output is:
+
+.. code-block:: none
+
+ Min function value = -0.979672 and Max function value = 0.816414
+ Interval 0 = [-0.979672, -0.761576]
+ Interval 1 = [-0.838551, -0.581967]
+ Interval 2 = [-0.658942, -0.402359]
+ Interval 3 = [-0.479334, -0.22275]
+ Interval 4 = [-0.299725, -0.0431414]
+ Interval 5 = [-0.120117, 0.136467]
+ Interval 6 = [0.059492, 0.316076]
+ Interval 7 = [0.239101, 0.495684]
+ Interval 8 = [0.418709, 0.675293]
+ Interval 9 = [0.598318, 0.816414]
+ Computing preimages...
+ Computing connected components...
+ 5 interval(s) in dimension 0:
+ [-0.909111, 0.0081753]
+ [-0.171433, 0.367393]
+ [-0.171433, 0.367393]
+ [-0.909111, 0.745853]
+ 0 interval(s) in dimension 1:
+
+.. testoutput::
+
+ Nerve is of dimension 1 - 41 simplices - 21 vertices.
+ [0]
+ [1]
+ [4]
+ [1, 4]
+ [2]
+ [0, 2]
+ [8]
+ [2, 8]
+ [5]
+ [4, 5]
+ [9]
+ [8, 9]
+ [13]
+ [5, 13]
+ [14]
+ [9, 14]
+ [19]
+ [13, 19]
+ [25]
+ [32]
+ [20]
+ [20, 32]
+ [33]
+ [25, 33]
+ [26]
+ [14, 26]
+ [19, 26]
+ [42]
+ [26, 42]
+ [34]
+ [33, 34]
+ [27]
+ [20, 27]
+ [35]
+ [27, 35]
+ [34, 35]
+ [35, 42]
+ [44]
+ [35, 44]
+ [54]
+ [44, 54]
+
+
+The program also writes a file ../../data/points/human.off_sc.txt. The first
+three lines in this file are the location of the input point cloud and the
+function used to compute the cover.
+The fourth line contains the number of vertices nv and edges ne of the Nerve.
+The next nv lines represent the vertices. Each line contains the vertex ID,
+the number of data points it contains, and their average color function value.
+Finally, the next ne lines represent the edges, characterized by the ID of
+their vertices.
+
+Using KeplerMapper, one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/nervevisu.jpg
+ :figclass: align-center
+ :alt: Visualization with KeplerMapper
+
+ Visualization with KeplerMapper
+
+Graph Induced Complexes (GIC)
+-----------------------------
+
+GIC definition
+^^^^^^^^^^^^^^
+
+Again, assume you are given a cover C of your point cloud P. Moreover, assume
+you are also given a graph G built on top of P. Then, for any clique in G
+whose nodes all belong to different elements of C, the GIC includes a
+corresponding simplex, whose dimension is the number of nodes in the clique
+minus one.
+See :cite:`Dey13` for more details.
+
+.. figure::
+ ../../doc/Nerve_GIC/GIC.jpg
+ :figclass: align-center
+ :alt: GIC of a point cloud
+
+ GIC of a point cloud
+
+Example with cover from Voronoï
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This example builds the GIC of a point cloud sampled on a 3D human shape
+(human.off).
+We randomly subsampled 100 points in the point cloud, which act as seeds of
+a geodesic Voronoï diagram. Each cell of the diagram is then an element of C.
+The graph G (used to compute both the geodesics for Voronoï and the GIC)
+comes from the triangulation of the human shape. Note that the resulting
+simplicial complex is in dimension 3 in this example.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/human.off')):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate()
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_cover_from_Voronoi(700)
+ nerve_complex.find_simplices()
+ nerve_complex.plot_off()
+
+the program outputs SC.off. Using e.g.
+
+.. code-block:: none
+
+ geomview ../../data/points/human.off_sc.off
+
+one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/gicvoronoivisu.jpg
+ :figclass: align-center
+ :alt: Visualization with Geomview
+
+ Visualization with Geomview
+
+Functional GIC
+^^^^^^^^^^^^^^
+
+If one restricts to the cliques in G whose nodes all belong to preimages of
+consecutive intervals (assuming the cover of the height function is minimal,
+i.e. no more than two intervals can intersect at a time), the GIC is of
+dimension one, i.e. a graph.
+We call this graph the functional GIC. See :cite:`Carriere16` for more details.
+
+Example
+^^^^^^^
+
+Functional GIC comes with automatic selection of the Rips threshold,
+the resolution and the gain of the function cover. See :cite:`Carriere17c` for
+more details. In this example, we compute the functional GIC of a Klein bottle
+embedded in R^5, where the graph G comes from a Rips complex with automatic
+threshold, and the cover C comes from the preimages of intervals covering the
+first coordinate, with automatic resolution and gain. Note that automatic
+threshold, resolution and gain can be computed as well for the Nerve.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/KleinBottle5D.off')):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate(0)
+ nerve_complex.set_function_from_coordinate(0)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+
+the program outputs SC.dot. Using e.g.
+
+.. code-block:: none
+
+ neato ../../data/points/KleinBottle5D.off_sc.dot -Tpdf -o ../../data/points/KleinBottle5D.off_sc.pdf
+
+one can obtain the following visualization:
+
+.. figure::
+ ../../doc/Nerve_GIC/coordGICvisu2.jpg
+ :figclass: align-center
+ :alt: Visualization with neato
+
+ Visualization with neato
+
+where nodes are colored by the filter function values and, for each node, the
+first number is its ID and the second is the number of data points that its
+contain.
+
+We also provide an example on a set of 72 pictures taken around the same object
+(lucky_cat.off).
+The function is now the first eigenfunction given by PCA, whose values are
+written in a file (lucky_cat_PCA1). Threshold, resolution and gain are
+automatically selected as before.
+
+.. testcode::
+
+ import gudhi
+ nerve_complex = gudhi.CoverComplex()
+
+ if (nerve_complex.read_point_cloud(gudhi.__root_source_dir__ + \
+ '/data/points/COIL_database/lucky_cat.off')):
+ nerve_complex.set_type('GIC')
+ pca_file = gudhi.__root_source_dir__ + \
+ '/data/points/COIL_database/lucky_cat_PCA1'
+ nerve_complex.set_color_from_file(pca_file)
+ nerve_complex.set_function_from_file(pca_file)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+
+the program outputs again SC.dot which gives the following visualization after using neato:
+
+.. figure::
+ ../../doc/Nerve_GIC/funcGICvisu.jpg
+ :figclass: align-center
+ :alt: Visualization with neato
+
+ Visualization with neato
diff --git a/cython/example/coordinate_graph_induced_complex.py b/cython/example/coordinate_graph_induced_complex.py
new file mode 100755
index 00000000..9e93109a
--- /dev/null
+++ b/cython/example/coordinate_graph_induced_complex.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Coordinate GIC '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/coordinate_graph_induced_complex.py '
+ '-f ../data/points/KleinBottle5D.off -c 0 -v'
+ '- Constructs the coordinate GIC with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--coordinate", type=int, default=0)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.file)):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate(args.coordinate)
+ nerve_complex.set_function_from_coordinate(args.coordinate)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on coordinate GIC simplices')
+ result_str = 'Coordinate GIC is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/cython/example/functional_graph_induced_complex.py b/cython/example/functional_graph_induced_complex.py
new file mode 100755
index 00000000..6ad7c2ec
--- /dev/null
+++ b/cython/example/functional_graph_induced_complex.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Functional GIC '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/functional_graph_induced_complex.py '
+ '-o ../data/points/COIL_database/lucky_cat.off '
+ '-f ../data/points/COIL_database/lucky_cat_PCA1'
+ '- Constructs the functional GIC with the '
+ 'points from the given OFF and function files.')
+parser.add_argument("-o", "--off-file", type=str, required=True)
+parser.add_argument("-f", "--function-file", type=str, required=True)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.off_file)):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_file(args.function_file)
+ nerve_complex.set_function_from_file(args.function_file)
+ nerve_complex.set_graph_from_automatic_rips()
+ nerve_complex.set_automatic_resolution()
+ nerve_complex.set_gain()
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.plot_dot()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on functional GIC simplices')
+ result_str = 'Functional GIC is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/cython/example/nerve_of_a_covering.py b/cython/example/nerve_of_a_covering.py
new file mode 100755
index 00000000..c5577cb1
--- /dev/null
+++ b/cython/example/nerve_of_a_covering.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Nerve of a covering creation '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/nerve_of_a_covering.py '
+ '-f ../data/points/human.off -c 2 -r 10 -g 0.3'
+ '- Constructs Nerve of a covering with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-c", "--coordinate", type=int, default=0)
+parser.add_argument("-r", "--resolution", type=int, default=10)
+parser.add_argument("-g", "--gain", type=float, default=0.3)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.file)):
+ nerve_complex.set_type('Nerve')
+ nerve_complex.set_color_from_coordinate(args.coordinate)
+ nerve_complex.set_function_from_coordinate(args.coordinate)
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_resolution_with_interval_number(args.resolution)
+ nerve_complex.set_gain(args.gain)
+ nerve_complex.set_cover_from_function()
+ nerve_complex.find_simplices()
+ nerve_complex.write_info()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on graph induced complex simplices')
+ result_str = 'Nerve is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/cython/example/voronoi_graph_induced_complex.py b/cython/example/voronoi_graph_induced_complex.py
new file mode 100755
index 00000000..8266a0e4
--- /dev/null
+++ b/cython/example/voronoi_graph_induced_complex.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+import gudhi
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Voronoi GIC '
+ 'from points read in a OFF file.',
+ epilog='Example: '
+ 'example/voronoi_graph_induced_complex.py '
+ '-f ../data/points/human.off -n 700 -v'
+ '- Constructs the Voronoi GIC with the '
+ 'points from the given OFF file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+parser.add_argument("-n", "--subsample-nb-points", type=int, default=100)
+parser.add_argument("-v", "--verbose", default=False, action='store_true' , help='Flag for program verbosity')
+
+args = parser.parse_args()
+
+nerve_complex = gudhi.CoverComplex()
+nerve_complex.set_verbose(args.verbose)
+
+if (nerve_complex.read_point_cloud(args.file)):
+ nerve_complex.set_type('GIC')
+ nerve_complex.set_color_from_coordinate()
+ nerve_complex.set_graph_from_OFF()
+ nerve_complex.set_cover_from_Voronoi(args.subsample_nb_points)
+ nerve_complex.find_simplices()
+ nerve_complex.plot_off()
+ simplex_tree = nerve_complex.create_simplex_tree()
+ nerve_complex.compute_PD()
+ if (args.verbose):
+ print('Iterator on graph induced complex simplices')
+ result_str = 'Graph induced complex is of dimension ' + \
+ repr(simplex_tree.dimension()) + ' - ' + \
+ repr(simplex_tree.num_simplices()) + ' simplices - ' + \
+ repr(simplex_tree.num_vertices()) + ' vertices.'
+ print(result_str)
+ for filtered_value in simplex_tree.get_filtration():
+ print(filtered_value[0])
diff --git a/cython/gudhi.pyx.in b/cython/gudhi.pyx.in
index b94f2251..0d4b966b 100644
--- a/cython/gudhi.pyx.in
+++ b/cython/gudhi.pyx.in
@@ -26,6 +26,7 @@ __license__ = "GPL v3"
__version__ = "@GUDHI_VERSION@"
# This variable is used by doctest to find files
__root_source_dir__ = "@CMAKE_SOURCE_DIR@"
+__debug_info__ = @GUDHI_CYTHON_DEBUG_INFO@
include '@CMAKE_CURRENT_SOURCE_DIR@/cython/off_reader.pyx'
include '@CMAKE_CURRENT_SOURCE_DIR@/cython/simplex_tree.pyx'
@@ -41,3 +42,4 @@ include '@CMAKE_CURRENT_SOURCE_DIR@/cython/strong_witness_complex.pyx'
@GUDHI_CYTHON_SUBSAMPLING@
@GUDHI_CYTHON_TANGENTIAL_COMPLEX@
@GUDHI_CYTHON_BOTTLENECK_DISTANCE@
+@GUDHI_CYTHON_NERVE_GIC@
diff --git a/cython/include/Nerve_gic_interface.h b/cython/include/Nerve_gic_interface.h
new file mode 100644
index 00000000..aa71e2a6
--- /dev/null
+++ b/cython/include/Nerve_gic_interface.h
@@ -0,0 +1,61 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2018 Inria
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INCLUDE_NERVE_GIC_INTERFACE_H_
+#define INCLUDE_NERVE_GIC_INTERFACE_H_
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/GIC.h>
+
+#include "Simplex_tree_interface.h"
+
+#include <iostream>
+#include <vector>
+#include <string>
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+class Nerve_gic_interface : public Cover_complex<std::vector<double>> {
+ public:
+ void create_simplex_tree(Simplex_tree_interface<>* simplex_tree) {
+ create_complex(*simplex_tree);
+ simplex_tree->initialize_filtration();
+ }
+ void set_cover_from_Euclidean_Voronoi(int m) {
+ set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m);
+ }
+ double set_graph_from_automatic_euclidean_rips(int N) {
+ return set_graph_from_automatic_rips(Gudhi::Euclidean_distance(), N);
+ }
+ void set_graph_from_euclidean_rips(double threshold) {
+ set_graph_from_rips(threshold, Gudhi::Euclidean_distance());
+ }
+};
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // INCLUDE_NERVE_GIC_INTERFACE_H_
diff --git a/cython/setup.py.in b/cython/setup.py.in
index ee381a1b..4037aab6 100644
--- a/cython/setup.py.in
+++ b/cython/setup.py.in
@@ -46,9 +46,5 @@ setup(
version='@GUDHI_VERSION@',
url='http://gudhi.gforge.inria.fr/',
ext_modules = cythonize(gudhi),
- install_requires = [
- "matplotlib",
- "numpy",
- "cython",
- ],
+ install_requires = ["cython",],
)
diff --git a/cython/test/test_cover_complex.py b/cython/test/test_cover_complex.py
new file mode 100755
index 00000000..58935264
--- /dev/null
+++ b/cython/test/test_cover_complex.py
@@ -0,0 +1,92 @@
+from gudhi import CoverComplex
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2018 Inria
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2018 Inria"
+__license__ = "GPL v3"
+
+
+def test_empty_constructor():
+ # Try to create an empty CoverComplex
+ cover = CoverComplex()
+ assert cover.__is_defined() == True
+
+def test_non_existing_file_read():
+ # Try to open a non existing file
+ cover = CoverComplex()
+ assert (cover.read_point_cloud('pouetpouettralala.toubiloubabdou') == False)
+
+def test_files_creation():
+ # Create test file
+ cloud_file = open('cloud', 'w')
+ cloud_file.write('nOFF\n3\n3 0 0\n0 0 0\n2 1 0\n4 0 0')
+ cloud_file.close()
+ cover_file = open('cover', 'w')
+ cover_file.write('1\n2\n3')
+ cover_file.close()
+ graph_file = open('graph', 'w')
+ graph_file.write('0 1\n0 2\n1 2')
+ graph_file.close()
+
+def test_nerve():
+ nerve = CoverComplex()
+ nerve.set_type('Nerve')
+ assert (nerve.read_point_cloud('cloud') == True)
+ nerve.set_color_from_coordinate()
+ nerve.set_graph_from_file('graph')
+ nerve.set_cover_from_file('cover')
+ nerve.find_simplices()
+ stree = nerve.create_simplex_tree()
+
+ assert (stree.num_vertices() == 3)
+ assert ((stree.num_simplices() - stree.num_vertices()) == 0)
+ assert (stree.dimension() == 0)
+
+def test_graph_induced_complex():
+ gic = CoverComplex()
+ gic.set_type('GIC')
+ assert (gic.read_point_cloud('cloud') == True)
+ gic.set_color_from_coordinate()
+ gic.set_graph_from_file('graph')
+ gic.set_cover_from_file('cover')
+ gic.find_simplices()
+ stree = gic.create_simplex_tree()
+
+ assert (stree.num_vertices() == 3)
+ assert ((stree.num_simplices() - stree.num_vertices()) == 4)
+ assert (stree.dimension() == 2)
+
+def test_voronoi_graph_induced_complex():
+ gic = CoverComplex()
+ gic.set_type('GIC')
+ assert (gic.read_point_cloud('cloud') == True)
+ gic.set_color_from_coordinate()
+ gic.set_graph_from_file('graph')
+ gic.set_cover_from_Voronoi(2)
+ gic.find_simplices()
+ stree = gic.create_simplex_tree()
+
+ assert (stree.num_vertices() == 2)
+ assert ((stree.num_simplices() - stree.num_vertices()) == 1)
+ assert (stree.dimension() == 1)
diff --git a/data/points/human.COPYRIGHT b/data/points/human.COPYRIGHT
new file mode 100644
index 00000000..cb9bdb59
--- /dev/null
+++ b/data/points/human.COPYRIGHT
@@ -0,0 +1,77 @@
+The human.off point cloud is available at this webpage :
+http://segeval.cs.princeton.edu/
+
+LICENSE
+=======
+
+AIM@SHAPE General License for Shapes
+Applicable terms
+----------------
+
+This is the general AIM@SHAPE license applicable to models in the
+Shape Repository. It should be noted that each model is a
+representation of, and is distinct from, a shape, whether physical or
+imaginary. While the shape may be subject to its own terms, the terms
+governing the model you are about to download are described herein.
+
+For some models, the owners have defined specific licenses. The terms
+and conditions laid down in these licenses are in addition to the
+terms prescribed here, and are to be adhered to strictly when using
+such models.
+
+Acknowledgements
+----------------
+
+When including models from the Shape Repository in your website or
+research work, or when using them for other purposes allowed by the
+terms described herein, the AIM@SHAPE project and the model owner must
+be acknowledged as the sources of the models, for example with the
+phrase, "... model is provided courtesy of <model_owner> by the
+AIM@SHAPE Shape Repository."
+
+Information on <model_owner> is present in the accompanying metadata
+files and, where present, owner licenses.
+
+Metadata
+--------
+
+Each model is accompanied by its metadata file. Please keep this file
+with the model as it contains important information about the
+model. Please let us know if you find any errors in the metadata.
+
+(Im)proper use
+--------------
+
+Some models in the Shape Repository represent artifacts of religious,
+cultural and/or historical significance, e.g. the Max Planck
+model. Such models have been entrusted to the Shape Repository under
+the hope that they will be used respectfully and
+conscientiously. Please refrain from conducting experiments on them
+that may be rash or insensitive to people's feelings. Such experiments
+include, but are not limited to, morphing, animation, boolean
+operations, simulations of burning, breaking, exploding and melting.
+
+Models in the Shape Repository are made freely available for research
+and non-commercial purposes only. Use of these models for commercial
+purposes is permitted only after the express approval of the Shape
+Repository and the onwner has been obtained. Please contact us using
+the webform on our site in this regard.
+
+
+CITATION
+========
+
+If you use any part of this benchmark, please cite:
+Xiaobai Chen, Aleksey Golovinskiy, and Thomas Funkhouser,
+A Benchmark for 3D Mesh Segmentation
+ACM Transactions on Graphics (Proc. SIGGRAPH), 28(3), 2009.
+
+@article{Chen:2009:ABF,
+ author = "Xiaobai Chen and Aleksey Golovinskiy and Thomas Funkhouser",
+ title = "A Benchmark for {3D} Mesh Segmentation",
+ journal = "ACM Transactions on Graphics (Proc. SIGGRAPH)",
+ year = "2009",
+ month = aug,
+ volume = "28",
+ number = "3"
+} \ No newline at end of file
diff --git a/doc/common/header.html b/doc/common/header.html
index f8b13ec4..c12d2816 100644
--- a/doc/common/header.html
+++ b/doc/common/header.html
@@ -9,7 +9,7 @@
<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
<!-- GUDHI website css for header BEGIN -->
-<link rel="stylesheet" type="text/css" href="http://gudhi.gforge.inria.fr/assets/css/styles_feeling_responsive.css" />
+<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
<!-- GUDHI website css for header END -->
<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="$relpath^jquery.js"></script>
diff --git a/doc/common/installation.h b/doc/common/installation.h
index 12407c18..c27e4f56 100644
--- a/doc/common/installation.h
+++ b/doc/common/installation.h
@@ -2,22 +2,34 @@
* \tableofcontents
* As GUDHI is a header only library, there is no need to install the library.
*
- * Examples of GUDHI headers inclusion can be found in \ref demos.
+ * Examples of GUDHI headers inclusion can be found in \ref utilities.
*
* \section compiling Compiling
* The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> &ge; 1.48.0
* and <a target="_blank" href="https://www.cmake.org/">CMake</a> &ge; 3.1.
* It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015.
*
- * \subsection demos Demos and examples
- * To build the demos and examples, run the following commands in a terminal:
+ * \subsection utilities Utilities and examples
+ * To build the utilities, run the following commands in a terminal:
\verbatim cd /path-to-gudhi/
mkdir build
cd build/
cmake ..
make \endverbatim
- * A list of examples is available <a href="examples.html">here</a>.
+ * By default, examples are disabled. You can activate their compilation with
+ * <a href="https://cmake.org/cmake/help/v3.0/manual/ccmake.1.html">ccmake</a> (on Linux and Mac OSX),
+ * <a href="https://cmake.org/cmake/help/v3.0/manual/cmake-gui.1.html">cmake-gui</a> (on Windows) or y mofifying the
+ * cmake command as follows :
+\verbatim cmake -DWITH_GUDHI_EXAMPLE=ON ..
+make \endverbatim
+ * A list of utilities and examples is available <a href="examples.html">here</a>.
*
+ * \subsection libraryinstallation Installation
+ * To install the library (headers and activated utilities), run the following command in a terminal:
+ * \verbatim make install \endverbatim
+ * This action may require to be in the sudoer or administrator of the machine in function of the operating system and
+ * of <a href="https://cmake.org/cmake/help/v3.0/variable/CMAKE_INSTALL_PREFIX.html">CMAKE_INSTALL_PREFIX</a>.
+ *
* \subsection testsuites Test suites
* To test your build, run the following command in a terminal:
* \verbatim make test \endverbatim
@@ -31,6 +43,10 @@ make doxygen
# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder`
\endverbatim
*
+ * \subsection helloworld Hello world !
+ * The <a target="_blank" href="https://gitlab.inria.fr/GUDHI/hello-gudhi-world">Hello world for GUDHI</a>
+ * project is an example to help developers to make their own C++ project on top of the GUDHI library.
+ *
* \section optionallibrary Optional third-party library
* \subsection gmp GMP
* The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
diff --git a/example/Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp b/example/Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
index 1e27887c..2db1ef80 100644
--- a/example/Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
+++ b/example/Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
@@ -185,6 +185,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/example/Cech_complex/cech_complex_step_by_step.cpp b/example/Cech_complex/cech_complex_step_by_step.cpp
index d2dc8b65..6fbbde5b 100644
--- a/example/Cech_complex/cech_complex_step_by_step.cpp
+++ b/example/Cech_complex/cech_complex_step_by_step.cpp
@@ -161,6 +161,6 @@ void program_options(int argc, char* argv[], std::string& off_file_points, Filtr
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/example/Nerve_GIC/CoordGIC.cpp b/example/Nerve_GIC/CoordGIC.cpp
index 73edae18..9889b198 100644
--- a/example/Nerve_GIC/CoordGIC.cpp
+++ b/example/Nerve_GIC/CoordGIC.cpp
@@ -79,10 +79,10 @@ int main(int argc, char **argv) {
// --------------------------------------------
if (verb) {
- std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ std::cout << "Coordinate GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
<< " simplices - " << stree.num_vertices() << " vertices." << std::endl;
- std::cout << "Iterator on functional GIC simplices" << std::endl;
+ std::cout << "Iterator on coordinate GIC simplices" << std::endl;
for (auto f_simplex : stree.filtration_simplex_range()) {
for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
std::cout << vertex << " ";
diff --git a/example/Persistent_cohomology/persistence_from_file.cpp b/example/Persistent_cohomology/persistence_from_file.cpp
index c40434a4..53456919 100644
--- a/example/Persistent_cohomology/persistence_from_file.cpp
+++ b/example/Persistent_cohomology/persistence_from_file.cpp
@@ -138,6 +138,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/example/Persistent_cohomology/rips_multifield_persistence.cpp b/example/Persistent_cohomology/rips_multifield_persistence.cpp
index 626ec2ef..d6a5bdad 100644
--- a/example/Persistent_cohomology/rips_multifield_persistence.cpp
+++ b/example/Persistent_cohomology/rips_multifield_persistence.cpp
@@ -149,6 +149,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/example/Persistent_cohomology/rips_persistence_step_by_step.cpp b/example/Persistent_cohomology/rips_persistence_step_by_step.cpp
index 7c81fcfb..796cfa3a 100644
--- a/example/Persistent_cohomology/rips_persistence_step_by_step.cpp
+++ b/example/Persistent_cohomology/rips_persistence_step_by_step.cpp
@@ -161,6 +161,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/example/Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp b/example/Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
index c7607dce..71fc0802 100644
--- a/example/Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
+++ b/example/Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
@@ -167,6 +167,6 @@ void program_options(int argc, char * argv[]
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/example/Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp b/example/Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp
index 08ed74bb..34092ef6 100644
--- a/example/Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp
+++ b/example/Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp
@@ -171,7 +171,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, Filtr
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/include/gudhi/Bottleneck.h b/include/gudhi/Bottleneck.h
index b0fc3949..7a553006 100644
--- a/include/gudhi/Bottleneck.h
+++ b/include/gudhi/Bottleneck.h
@@ -36,7 +36,7 @@ namespace Gudhi {
namespace persistence_diagram {
-double bottleneck_distance_approx(Persistence_graph& g, double e) {
+inline double bottleneck_distance_approx(Persistence_graph& g, double e) {
double b_lower_bound = 0.;
double b_upper_bound = g.diameter_bound();
const double alpha = std::pow(g.size(), 1. / 5.);
@@ -66,7 +66,7 @@ double bottleneck_distance_approx(Persistence_graph& g, double e) {
return (b_lower_bound + b_upper_bound) / 2.;
}
-double bottleneck_distance_exact(Persistence_graph& g) {
+inline double bottleneck_distance_exact(Persistence_graph& g) {
std::vector<double> sd = g.sorted_distances();
long lower_bound_i = 0;
long upper_bound_i = sd.size() - 1;
diff --git a/include/gudhi/GIC.h b/include/gudhi/GIC.h
index 7aa95210..fea0b861 100644
--- a/include/gudhi/GIC.h
+++ b/include/gudhi/GIC.h
@@ -1193,8 +1193,8 @@ class Cover_complex {
}
Cboot.set_graph_from_automatic_rips(Gudhi::Euclidean_distance());
- Cboot.set_automatic_resolution();
Cboot.set_gain();
+ Cboot.set_automatic_resolution();
Cboot.set_cover_from_function();
Cboot.find_simplices();
Cboot.compute_PD();
@@ -1215,7 +1215,9 @@ class Cover_complex {
*/
double compute_distance_from_confidence_level(double alpha) {
unsigned int N = distribution.size();
- return distribution[std::floor(alpha * N)];
+ double d = distribution[std::floor(alpha * N)];
+ if (verbose) std::cout << "Distance corresponding to confidence " << alpha << " is " << d << std::endl;
+ return d;
}
public:
@@ -1229,6 +1231,7 @@ class Cover_complex {
double level = 1;
for (unsigned int i = 0; i < N; i++)
if (distribution[i] > d){ level = i * 1.0 / N; break; }
+ if (verbose) std::cout << "Confidence level of distance " << d << " is " << level << std::endl;
return level;
}
@@ -1238,9 +1241,8 @@ class Cover_complex {
*
*/
double compute_p_value() {
- double distancemin = -std::numeric_limits<double>::lowest();
- int N = PD.size();
- for (int i = 0; i < N; i++) distancemin = std::min(distancemin, 0.5 * (PD[i].second - PD[i].first));
+ double distancemin = std::numeric_limits<double>::max(); int N = PD.size();
+ for (int i = 0; i < N; i++) distancemin = std::min(distancemin, 0.5 * std::abs(PD[i].second - PD[i].first));
double p_value = 1 - compute_confidence_level_from_distance(distancemin);
if (verbose) std::cout << "p value = " << p_value << std::endl;
return p_value;
diff --git a/include/gudhi/Persistent_cohomology.h b/include/gudhi/Persistent_cohomology.h
index c68b5c0b..c51e47a5 100644
--- a/include/gudhi/Persistent_cohomology.h
+++ b/include/gudhi/Persistent_cohomology.h
@@ -300,7 +300,10 @@ class Persistent_cohomology {
// with multiplicity. We used to sum the coefficients directly in
// annotations_in_boundary by using a map, we now do it later.
typedef std::pair<Column *, int> annotation_t;
- thread_local std::vector<annotation_t> annotations_in_boundary;
+#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ thread_local
+#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ std::vector<annotation_t> annotations_in_boundary;
annotations_in_boundary.clear();
int sign = 1 - 2 * (dim_sigma % 2); // \in {-1,1} provides the sign in the
// alternate sum in the boundary.
diff --git a/include/gudhi/Simplex_tree.h b/include/gudhi/Simplex_tree.h
index ee96d5a2..3ab23c12 100644
--- a/include/gudhi/Simplex_tree.h
+++ b/include/gudhi/Simplex_tree.h
@@ -1057,7 +1057,10 @@ class Simplex_tree {
Dictionary_it next = siblings->members().begin();
++next;
- thread_local std::vector<std::pair<Vertex_handle, Node> > inter;
+#ifdef GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ thread_local
+#endif // GUDHI_CAN_USE_CXX11_THREAD_LOCAL
+ std::vector<std::pair<Vertex_handle, Node> > inter;
for (Dictionary_it s_h = siblings->members().begin();
s_h != siblings->members().end(); ++s_h, ++next) {
Simplex_handle root_sh = find_vertex(s_h->first);
diff --git a/include/gudhi/Tangential_complex.h b/include/gudhi/Tangential_complex.h
index 9d8fdcd3..d1c846cf 100644
--- a/include/gudhi/Tangential_complex.h
+++ b/include/gudhi/Tangential_complex.h
@@ -83,16 +83,11 @@ using namespace internal;
class Vertex_data {
public:
- Vertex_data(std::size_t data = (std::numeric_limits<std::size_t>::max)())
- : m_data(data) { }
+ Vertex_data(std::size_t data = (std::numeric_limits<std::size_t>::max)()) : m_data(data) {}
- operator std::size_t() {
- return m_data;
- }
+ operator std::size_t() { return m_data; }
- operator std::size_t() const {
- return m_data;
- }
+ operator std::size_t() const { return m_data; }
private:
std::size_t m_data;
@@ -101,9 +96,9 @@ class Vertex_data {
/**
* \class Tangential_complex Tangential_complex.h gudhi/Tangential_complex.h
* \brief Tangential complex data structure.
- *
+ *
* \ingroup tangential_complex
- *
+ *
* \details
* The class Tangential_complex represents a tangential complex.
* After the computation of the complex, an optional post-processing called perturbation can
@@ -118,17 +113,14 @@ class Vertex_data {
* or <a target="_blank"
* href="http://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Dynamic__dimension__tag.html">CGAL::Dynamic_dimension_tag</a>
* if you don't.
- * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the default) and `CGAL::Sequential_tag`.
- * \tparam Triangulation_ is the type used for storing the local regular triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`).
+ * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the
+ * default) and `CGAL::Sequential_tag`. \tparam Triangulation_ is the type used for storing the local regular
+ * triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`).
*
*/
-template
-<
- typename Kernel_, // ambiant kernel
- typename DimensionTag, // intrinsic dimension
- typename Concurrency_tag = CGAL::Parallel_tag,
- typename Triangulation_ = CGAL::Default
->
+template <typename Kernel_, // ambiant kernel
+ typename DimensionTag, // intrinsic dimension
+ typename Concurrency_tag = CGAL::Parallel_tag, typename Triangulation_ = CGAL::Default>
class Tangential_complex {
typedef Kernel_ K;
typedef typename K::FT FT;
@@ -136,23 +128,16 @@ class Tangential_complex {
typedef typename K::Weighted_point_d Weighted_point;
typedef typename K::Vector_d Vector;
- typedef typename CGAL::Default::Get
- <
- Triangulation_,
- CGAL::Regular_triangulation
- <
- CGAL::Epick_d<DimensionTag>,
- CGAL::Triangulation_data_structure
- <
- typename CGAL::Epick_d<DimensionTag>::Dimension,
- CGAL::Triangulation_vertex
- <
- CGAL::Regular_triangulation_traits_adapter< CGAL::Epick_d<DimensionTag> >, Vertex_data
- >,
- CGAL::Triangulation_full_cell<CGAL::Regular_triangulation_traits_adapter< CGAL::Epick_d<DimensionTag> > >
- >
- >
- >::type Triangulation;
+ typedef typename CGAL::Default::Get<
+ Triangulation_,
+ CGAL::Regular_triangulation<
+ CGAL::Epick_d<DimensionTag>,
+ CGAL::Triangulation_data_structure<
+ typename CGAL::Epick_d<DimensionTag>::Dimension,
+ CGAL::Triangulation_vertex<CGAL::Regular_triangulation_traits_adapter<CGAL::Epick_d<DimensionTag> >,
+ Vertex_data>,
+ CGAL::Triangulation_full_cell<
+ CGAL::Regular_triangulation_traits_adapter<CGAL::Epick_d<DimensionTag> > > > > >::type Triangulation;
typedef typename Triangulation::Geom_traits Tr_traits;
typedef typename Triangulation::Weighted_point Tr_point;
typedef typename Tr_traits::Base::Point_d Tr_bare_point;
@@ -174,17 +159,13 @@ class Tangential_complex {
struct Tr_and_VH {
public:
- Tr_and_VH()
- : m_tr(NULL) { }
+ Tr_and_VH() : m_tr(NULL) {}
- Tr_and_VH(int dim)
- : m_tr(new Triangulation(dim)) { }
+ Tr_and_VH(int dim) : m_tr(new Triangulation(dim)) {}
- ~Tr_and_VH() {
- destroy_triangulation();
- }
+ ~Tr_and_VH() { destroy_triangulation(); }
- Triangulation & construct_triangulation(int dim) {
+ Triangulation &construct_triangulation(int dim) {
delete m_tr;
m_tr = new Triangulation(dim);
return tr();
@@ -195,24 +176,16 @@ class Tangential_complex {
m_tr = NULL;
}
- Triangulation & tr() {
- return *m_tr;
- }
+ Triangulation &tr() { return *m_tr; }
- Triangulation const& tr() const {
- return *m_tr;
- }
+ Triangulation const &tr() const { return *m_tr; }
- Tr_vertex_handle const& center_vertex() const {
- return m_center_vertex;
- }
+ Tr_vertex_handle const &center_vertex() const { return m_center_vertex; }
- Tr_vertex_handle & center_vertex() {
- return m_center_vertex;
- }
+ Tr_vertex_handle &center_vertex() { return m_center_vertex; }
private:
- Triangulation* m_tr;
+ Triangulation *m_tr;
Tr_vertex_handle m_center_vertex;
};
@@ -243,9 +216,7 @@ class Tangential_complex {
// For transform_iterator
- static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) {
- return vh->point();
- }
+ static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) { return vh->point(); }
template <typename P, typename VH>
static const P &vertex_handle_to_point(VH vh) {
@@ -265,111 +236,97 @@ class Tangential_complex {
* @param[in] k Kernel instance.
*/
template <typename Point_range>
- Tangential_complex(Point_range points,
- int intrinsic_dimension,
+ Tangential_complex(Point_range points, int intrinsic_dimension,
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- InputIterator first_for_tse, InputIterator last_for_tse,
+ InputIterator first_for_tse, InputIterator last_for_tse,
#endif
- const K &k = K()
- )
+ const K &k = K())
: m_k(k),
- m_intrinsic_dim(intrinsic_dimension),
- m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())),
- m_points(points.begin(), points.end()),
- m_weights(m_points.size(), FT(0))
+ m_intrinsic_dim(intrinsic_dimension),
+ m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())),
+ m_points(points.begin(), points.end()),
+ m_weights(m_points.size(), FT(0))
#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
- , m_p_perturb_mutexes(NULL)
-#endif
- , m_points_ds(m_points)
- , m_last_max_perturb(0.)
- , m_are_tangent_spaces_computed(m_points.size(), false)
- , m_tangent_spaces(m_points.size(), Tangent_space_basis())
+ ,
+ m_p_perturb_mutexes(NULL)
+#endif
+ ,
+ m_points_ds(m_points),
+ m_last_max_perturb(0.),
+ m_are_tangent_spaces_computed(m_points.size(), false),
+ m_tangent_spaces(m_points.size(), Tangent_space_basis())
#ifdef GUDHI_TC_EXPORT_NORMALS
- , m_orth_spaces(m_points.size(), Orthogonal_space_basis())
+ ,
+ m_orth_spaces(m_points.size(), Orthogonal_space_basis())
#endif
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
- , m_points_for_tse(first_for_tse, last_for_tse)
- , m_points_ds_for_tse(m_points_for_tse)
+ ,
+ m_points_for_tse(first_for_tse, last_for_tse),
+ m_points_ds_for_tse(m_points_for_tse)
#endif
- { }
+ {
+ }
/// Destructor
~Tangential_complex() {
#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
- delete [] m_p_perturb_mutexes;
+ delete[] m_p_perturb_mutexes;
#endif
}
/// Returns the intrinsic dimension of the manifold.
- int intrinsic_dimension() const {
- return m_intrinsic_dim;
- }
+ int intrinsic_dimension() const { return m_intrinsic_dim; }
/// Returns the ambient dimension.
- int ambient_dimension() const {
- return m_ambient_dim;
- }
+ int ambient_dimension() const { return m_ambient_dim; }
- Points const& points() const {
- return m_points;
- }
+ Points const &points() const { return m_points; }
/** \brief Returns the point corresponding to the vertex given as parameter.
*
* @param[in] vertex Vertex handle of the point to retrieve.
* @return The point found.
*/
- Point get_point(std::size_t vertex) const {
- return m_points[vertex];
- }
+ Point get_point(std::size_t vertex) const { return m_points[vertex]; }
/** \brief Returns the perturbed position of the point corresponding to the vertex given as parameter.
*
* @param[in] vertex Vertex handle of the point to retrieve.
* @return The perturbed position of the point found.
*/
- Point get_perturbed_point(std::size_t vertex) const {
- return compute_perturbed_point(vertex);
- }
+ Point get_perturbed_point(std::size_t vertex) const { return compute_perturbed_point(vertex); }
/// Returns the number of vertices.
- std::size_t number_of_vertices() const {
- return m_points.size();
- }
+ std::size_t number_of_vertices() const { return m_points.size(); }
- void set_weights(const Weights& weights) {
- m_weights = weights;
- }
+ void set_weights(const Weights &weights) { m_weights = weights; }
- void set_tangent_planes(const TS_container& tangent_spaces
+ void set_tangent_planes(const TS_container &tangent_spaces
#ifdef GUDHI_TC_EXPORT_NORMALS
- , const OS_container& orthogonal_spaces
+ ,
+ const OS_container &orthogonal_spaces
#endif
- ) {
+ ) {
#ifdef GUDHI_TC_EXPORT_NORMALS
- GUDHI_CHECK(
- m_points.size() == tangent_spaces.size()
- && m_points.size() == orthogonal_spaces.size(),
+ GUDHI_CHECK(m_points.size() == tangent_spaces.size() && m_points.size() == orthogonal_spaces.size(),
std::logic_error("Wrong sizes"));
#else
- GUDHI_CHECK(
- m_points.size() == tangent_spaces.size(),
- std::logic_error("Wrong sizes"));
+ GUDHI_CHECK(m_points.size() == tangent_spaces.size(), std::logic_error("Wrong sizes"));
#endif
m_tangent_spaces = tangent_spaces;
#ifdef GUDHI_TC_EXPORT_NORMALS
m_orth_spaces = orthogonal_spaces;
#endif
- for (std::size_t i = 0; i < m_points.size(); ++i)
- m_are_tangent_spaces_computed[i] = true;
+ for (std::size_t i = 0; i < m_points.size(); ++i) m_are_tangent_spaces_computed[i] = true;
}
/// Computes the tangential complex.
void compute_tangential_complex() {
#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
std::cerr << red << "WARNING: GUDHI_TC_PERFORM_EXTRA_CHECKS is defined. "
- << "Computation might be slower than usual.\n" << white;
+ << "Computation might be slower than usual.\n"
+ << white;
#endif
#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
@@ -386,10 +343,9 @@ class Tangential_complex {
if (m_points.empty())
m_translations.clear();
else
- m_translations.resize(m_points.size(),
- m_k.construct_vector_d_object()(m_ambient_dim));
+ m_translations.resize(m_points.size(), m_k.construct_vector_d_object()(m_ambient_dim));
#if defined(GUDHI_USE_TBB)
- delete [] m_p_perturb_mutexes;
+ delete[] m_p_perturb_mutexes;
m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()];
#endif
#endif
@@ -397,21 +353,18 @@ class Tangential_complex {
#ifdef GUDHI_USE_TBB
// Parallel
if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
- tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
- Compute_tangent_triangulation(*this));
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()), Compute_tangent_triangulation(*this));
} else {
#endif // GUDHI_USE_TBB
// Sequential
- for (std::size_t i = 0; i < m_points.size(); ++i)
- compute_tangent_triangulation(i);
+ for (std::size_t i = 0; i < m_points.size(); ++i) compute_tangent_triangulation(i);
#ifdef GUDHI_USE_TBB
}
#endif // GUDHI_USE_TBB
#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
t.end();
- std::cerr << "Tangential complex computed in " << t.num_seconds()
- << " seconds.\n";
+ std::cerr << "Tangential complex computed in " << t.num_seconds() << " seconds.\n";
#endif
}
@@ -437,14 +390,12 @@ class Tangential_complex {
Fix_inconsistencies_info fix_inconsistencies_using_perturbation(double max_perturb, double time_limit = -1.) {
Fix_inconsistencies_info info;
- if (time_limit == 0.)
- return info;
+ if (time_limit == 0.) return info;
Gudhi::Clock t;
#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
- std::tuple<std::size_t, std::size_t, std::size_t> stats_before =
- number_of_inconsistent_simplices(false);
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_before = number_of_inconsistent_simplices(false);
if (std::get<1>(stats_before) == 0) {
#ifdef DEBUG_TRACES
@@ -462,22 +413,17 @@ class Tangential_complex {
info.num_steps = 0;
while (!done) {
#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
- std::cerr
- << "\nBefore fix step:\n"
- << " * Total number of simplices in stars (incl. duplicates): "
- << std::get<0>(stats_before) << "\n"
- << " * Num inconsistent simplices in stars (incl. duplicates): "
- << red << std::get<1>(stats_before) << white << " ("
- << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n"
- << " * Number of stars containing inconsistent simplices: "
- << red << std::get<2>(stats_before) << white << " ("
- << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n";
+ std::cerr << "\nBefore fix step:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << std::get<0>(stats_before) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): " << red << std::get<1>(stats_before)
+ << white << " (" << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << red << std::get<2>(stats_before)
+ << white << " (" << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n";
#endif
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
- std::cerr << yellow
- << "\nAttempt to fix inconsistencies using perturbations - step #"
- << info.num_steps + 1 << "... " << white;
+ std::cerr << yellow << "\nAttempt to fix inconsistencies using perturbations - step #" << info.num_steps + 1
+ << "... " << white;
#endif
std::size_t num_inconsistent_stars = 0;
@@ -492,29 +438,24 @@ class Tangential_complex {
if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
tbb::combinable<std::size_t> num_inconsistencies;
tbb::combinable<std::vector<std::size_t> > tls_updated_points;
- tbb::parallel_for(
- tbb::blocked_range<size_t>(0, m_triangulations.size()),
- Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb,
- num_inconsistencies,
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_triangulations.size()),
+ Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb, num_inconsistencies,
tls_updated_points));
- num_inconsistent_stars =
- num_inconsistencies.combine(std::plus<std::size_t>());
- updated_points = tls_updated_points.combine(
- [](std::vector<std::size_t> const& x,
- std::vector<std::size_t> const& y) {
- std::vector<std::size_t> res;
- res.reserve(x.size() + y.size());
- res.insert(res.end(), x.begin(), x.end());
- res.insert(res.end(), y.begin(), y.end());
- return res;
- });
+ num_inconsistent_stars = num_inconsistencies.combine(std::plus<std::size_t>());
+ updated_points =
+ tls_updated_points.combine([](std::vector<std::size_t> const &x, std::vector<std::size_t> const &y) {
+ std::vector<std::size_t> res;
+ res.reserve(x.size() + y.size());
+ res.insert(res.end(), x.begin(), x.end());
+ res.insert(res.end(), y.begin(), y.end());
+ return res;
+ });
} else {
#endif // GUDHI_USE_TBB
// Sequential
for (std::size_t i = 0; i < m_triangulations.size(); ++i) {
num_inconsistent_stars +=
- try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb,
- std::back_inserter(updated_points));
+ try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb, std::back_inserter(updated_points));
}
#if defined(GUDHI_USE_TBB)
}
@@ -525,57 +466,44 @@ class Tangential_complex {
#endif
#if defined(GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES) || defined(DEBUG_TRACES)
- std::cerr
- << "\nEncountered during fix:\n"
- << " * Num stars containing inconsistent simplices: "
- << red << num_inconsistent_stars << white
- << " (" << 100. * num_inconsistent_stars / m_points.size() << "%)\n";
+ std::cerr << "\nEncountered during fix:\n"
+ << " * Num stars containing inconsistent simplices: " << red << num_inconsistent_stars << white << " ("
+ << 100. * num_inconsistent_stars / m_points.size() << "%)\n";
#endif
#ifdef GUDHI_TC_PROFILING
- std::cerr << yellow << "done in " << t_fix_step.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t_fix_step.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
- if (num_inconsistent_stars > 0)
- refresh_tangential_complex(updated_points);
+ if (num_inconsistent_stars > 0) refresh_tangential_complex(updated_points);
#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
// Confirm that all stars were actually refreshed
- std::size_t num_inc_1 =
- std::get<1>(number_of_inconsistent_simplices(false));
+ std::size_t num_inc_1 = std::get<1>(number_of_inconsistent_simplices(false));
refresh_tangential_complex();
- std::size_t num_inc_2 =
- std::get<1>(number_of_inconsistent_simplices(false));
+ std::size_t num_inc_2 = std::get<1>(number_of_inconsistent_simplices(false));
if (num_inc_1 != num_inc_2)
- std::cerr << red << "REFRESHMENT CHECK: FAILED. ("
- << num_inc_1 << " vs " << num_inc_2 << ")\n" << white;
+ std::cerr << red << "REFRESHMENT CHECK: FAILED. (" << num_inc_1 << " vs " << num_inc_2 << ")\n" << white;
else
std::cerr << green << "REFRESHMENT CHECK: PASSED.\n" << white;
#endif
#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
- std::tuple<std::size_t, std::size_t, std::size_t> stats_after =
- number_of_inconsistent_simplices(false);
-
- std::cerr
- << "\nAfter fix:\n"
- << " * Total number of simplices in stars (incl. duplicates): "
- << std::get<0>(stats_after) << "\n"
- << " * Num inconsistent simplices in stars (incl. duplicates): "
- << red << std::get<1>(stats_after) << white << " ("
- << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n"
- << " * Number of stars containing inconsistent simplices: "
- << red << std::get<2>(stats_after) << white << " ("
- << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n";
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_after = number_of_inconsistent_simplices(false);
+
+ std::cerr << "\nAfter fix:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << std::get<0>(stats_after) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): " << red << std::get<1>(stats_after)
+ << white << " (" << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << red << std::get<2>(stats_after) << white
+ << " (" << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n";
stats_before = stats_after;
#endif
- if (info.num_steps == 0)
- info.initial_num_inconsistent_stars = num_inconsistent_stars;
+ if (info.num_steps == 0) info.initial_num_inconsistent_stars = num_inconsistent_stars;
if (num_inconsistent_stars < info.best_num_inconsistent_stars)
info.best_num_inconsistent_stars = num_inconsistent_stars;
@@ -615,8 +543,7 @@ class Tangential_complex {
/// Returns the number of inconsistencies
/// @param[in] verbose If true, outputs a message into `std::cerr`.
- Num_inconsistencies
- number_of_inconsistent_simplices(
+ Num_inconsistencies number_of_inconsistent_simplices(
#ifdef DEBUG_TRACES
bool verbose = true
#else
@@ -634,8 +561,7 @@ class Tangential_complex {
Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
// Don't check infinite cells
- if (is_infinite(*it_inc_simplex))
- continue;
+ if (is_infinite(*it_inc_simplex)) continue;
Simplex c = *it_inc_simplex;
c.insert(idx); // Add the missing index
@@ -651,18 +577,15 @@ class Tangential_complex {
}
if (verbose) {
- std::cerr
- << "\n==========================================================\n"
- << "Inconsistencies:\n"
- << " * Total number of simplices in stars (incl. duplicates): "
- << stats.num_simplices << "\n"
- << " * Number of inconsistent simplices in stars (incl. duplicates): "
- << stats.num_inconsistent_simplices << " ("
- << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n"
- << " * Number of stars containing inconsistent simplices: "
- << stats.num_inconsistent_stars << " ("
- << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n"
- << "==========================================================\n";
+ std::cerr << "\n==========================================================\n"
+ << "Inconsistencies:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << stats.num_simplices << "\n"
+ << " * Number of inconsistent simplices in stars (incl. duplicates): "
+ << stats.num_inconsistent_simplices << " ("
+ << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << stats.num_inconsistent_stars << " ("
+ << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n"
+ << "==========================================================\n";
}
return stats;
@@ -672,23 +595,22 @@ class Tangential_complex {
*
* \tparam Simplex_tree_ must be a `Simplex_tree`.
*
- * @param[out] tree The result, where each `Vertex_handle` is the index of the
+ * @param[out] tree The result, where each `Vertex_handle` is the index of the
* corresponding point in the range provided to the constructor (it can also be
* retrieved through the `Tangential_complex::get_point` function.
* @param[in] export_inconsistent_simplices Also export inconsistent simplices or not?
* @return The maximal dimension of the simplices.
*/
template <typename Simplex_tree_>
- int create_complex(Simplex_tree_ &tree
- , bool export_inconsistent_simplices = true
+ int create_complex(Simplex_tree_ &tree,
+ bool export_inconsistent_simplices = true
/// \cond ADVANCED_PARAMETERS
- , bool export_infinite_simplices = false
- , Simplex_set *p_inconsistent_simplices = NULL
+ ,
+ bool export_infinite_simplices = false, Simplex_set *p_inconsistent_simplices = NULL
/// \endcond
) const {
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
- std::cerr << yellow
- << "\nExporting the TC as a Simplex_tree... " << white;
+ std::cerr << yellow << "\nExporting the TC as a Simplex_tree... " << white;
#endif
#ifdef GUDHI_TC_PROFILING
Gudhi::Clock t;
@@ -705,14 +627,11 @@ class Tangential_complex {
Simplex c = *it_inc_simplex;
// Don't export infinite cells
- if (!export_infinite_simplices && is_infinite(c))
- continue;
+ if (!export_infinite_simplices && is_infinite(c)) continue;
- if (!export_inconsistent_simplices && !is_simplex_consistent(c))
- continue;
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c)) continue;
- if (static_cast<int> (c.size()) > max_dim)
- max_dim = static_cast<int> (c.size());
+ if (static_cast<int>(c.size()) > max_dim) max_dim = static_cast<int>(c.size());
// Add the missing center vertex
c.insert(idx);
@@ -728,8 +647,7 @@ class Tangential_complex {
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
@@ -747,14 +665,11 @@ class Tangential_complex {
// simplex whose dimension is different from the previous ones.
// N.B.: The check is quite expensive.
- int create_complex(Simplicial_complex &complex,
- bool export_inconsistent_simplices = true,
- bool export_infinite_simplices = false,
- int check_lower_and_higher_dim_simplices = 2,
+ int create_complex(Simplicial_complex &complex, bool export_inconsistent_simplices = true,
+ bool export_infinite_simplices = false, int check_lower_and_higher_dim_simplices = 2,
Simplex_set *p_inconsistent_simplices = NULL) const {
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
- std::cerr << yellow
- << "\nExporting the TC as a Simplicial_complex... " << white;
+ std::cerr << yellow << "\nExporting the TC as a Simplicial_complex... " << white;
#endif
#ifdef GUDHI_TC_PROFILING
Gudhi::Clock t;
@@ -772,31 +687,26 @@ class Tangential_complex {
Simplex c = *it_inc_simplex;
// Don't export infinite cells
- if (!export_infinite_simplices && is_infinite(c))
- continue;
+ if (!export_infinite_simplices && is_infinite(c)) continue;
- if (!export_inconsistent_simplices && !is_simplex_consistent(c))
- continue;
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c)) continue;
// Unusual simplex dim?
- if (check_lower_and_higher_dim_simplices == 2
- && max_dim != -1
- && static_cast<int> (c.size()) != max_dim) {
+ if (check_lower_and_higher_dim_simplices == 2 && max_dim != -1 && static_cast<int>(c.size()) != max_dim) {
// Let's activate the check
- std::cerr << red <<
- "Info: check_lower_and_higher_dim_simplices ACTIVATED. "
- "Export might be take some time...\n" << white;
+ std::cerr << red
+ << "Info: check_lower_and_higher_dim_simplices ACTIVATED. "
+ "Export might be take some time...\n"
+ << white;
check_lower_and_higher_dim_simplices = 1;
}
- if (static_cast<int> (c.size()) > max_dim)
- max_dim = static_cast<int> (c.size());
+ if (static_cast<int>(c.size()) > max_dim) max_dim = static_cast<int>(c.size());
// Add the missing center vertex
c.insert(idx);
// Try to insert the simplex
- bool added =
- complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1);
+ bool added = complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1);
// Inconsistent?
if (p_inconsistent_simplices && added && !is_simplex_consistent(c)) {
@@ -807,8 +717,7 @@ class Tangential_complex {
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
@@ -816,29 +725,24 @@ class Tangential_complex {
return max_dim;
}
- template<typename ProjectionFunctor = CGAL::Identity<Point> >
- std::ostream &export_to_off(
- const Simplicial_complex &complex, std::ostream & os,
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(const Simplicial_complex &complex, std::ostream &os,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
Simplex_set const *p_simpl_to_color_in_blue = NULL,
- ProjectionFunctor const& point_projection = ProjectionFunctor())
- const {
- return export_to_off(
- os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green,
- p_simpl_to_color_in_blue, &complex, point_projection);
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ return export_to_off(os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green, p_simpl_to_color_in_blue,
+ &complex, point_projection);
}
- template<typename ProjectionFunctor = CGAL::Identity<Point> >
- std::ostream &export_to_off(
- std::ostream & os, bool color_inconsistencies = false,
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(std::ostream &os, bool color_inconsistencies = false,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
Simplex_set const *p_simpl_to_color_in_blue = NULL,
const Simplicial_complex *p_complex = NULL,
- ProjectionFunctor const& point_projection = ProjectionFunctor()) const {
- if (m_points.empty())
- return os;
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ if (m_points.empty()) return os;
if (m_ambient_dim < 2) {
std::cerr << "Error: export_to_off => ambient dimension should be >= 2.\n";
@@ -847,14 +751,14 @@ class Tangential_complex {
}
if (m_ambient_dim > 3) {
std::cerr << "Warning: export_to_off => ambient dimension should be "
- "<= 3. Only the first 3 coordinates will be exported.\n";
+ "<= 3. Only the first 3 coordinates will be exported.\n";
}
if (m_intrinsic_dim < 1 || m_intrinsic_dim > 3) {
std::cerr << "Error: export_to_off => intrinsic dimension should be "
- "between 1 and 3.\n";
+ "between 1 and 3.\n";
os << "Error: export_to_off => intrinsic dimension should be "
- "between 1 and 3.\n";
+ "between 1 and 3.\n";
return os;
}
@@ -862,12 +766,10 @@ class Tangential_complex {
std::size_t num_simplices, num_vertices;
export_vertices_to_off(output, num_vertices, false, point_projection);
if (p_complex) {
- export_simplices_to_off(
- *p_complex, output, num_simplices, p_simpl_to_color_in_red,
- p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
+ export_simplices_to_off(*p_complex, output, num_simplices, p_simpl_to_color_in_red, p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue);
} else {
- export_simplices_to_off(
- output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red,
+ export_simplices_to_off(output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red,
p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
}
@@ -876,10 +778,9 @@ class Tangential_complex {
#endif
os << "OFF \n"
- << num_vertices << " "
- << num_simplices << " "
- << "0 \n"
- << output.str();
+ << num_vertices << " " << num_simplices << " "
+ << "0 \n"
+ << output.str();
return os;
}
@@ -896,21 +797,18 @@ class Tangential_complex {
#ifdef GUDHI_USE_TBB
// Parallel
if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
- tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
- Compute_tangent_triangulation(*this));
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()), Compute_tangent_triangulation(*this));
} else {
#endif // GUDHI_USE_TBB
// Sequential
- for (std::size_t i = 0; i < m_points.size(); ++i)
- compute_tangent_triangulation(i);
+ for (std::size_t i = 0; i < m_points.size(); ++i) compute_tangent_triangulation(i);
#ifdef GUDHI_USE_TBB
}
#endif // GUDHI_USE_TBB
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
@@ -918,8 +816,7 @@ class Tangential_complex {
// If the list of perturbed points is provided, it is much faster
template <typename Point_indices_range>
- void refresh_tangential_complex(
- Point_indices_range const& perturbed_points_indices) {
+ void refresh_tangential_complex(Point_indices_range const &perturbed_points_indices) {
#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
std::cerr << yellow << "\nRefreshing TC... " << white;
#endif
@@ -939,22 +836,20 @@ class Tangential_complex {
} else {
#endif // GUDHI_USE_TBB
// Sequential
- for (std::size_t i = 0; i < m_points.size(); ++i)
- refresh_tangent_triangulation(i, updated_pts_ds);
+ for (std::size_t i = 0; i < m_points.size(); ++i) refresh_tangent_triangulation(i, updated_pts_ds);
#ifdef GUDHI_USE_TBB
}
#endif // GUDHI_USE_TBB
#ifdef GUDHI_TC_PROFILING
t.end();
- std::cerr << yellow << "done in " << t.num_seconds()
- << " seconds.\n" << white;
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
#elif defined(DEBUG_TRACES)
std::cerr << yellow << "done.\n" << white;
#endif
}
- void export_inconsistent_stars_to_OFF_files(std::string const& filename_base) const {
+ void export_inconsistent_stars_to_OFF_files(std::string const &filename_base) const {
// For each triangulation
for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
// We build a SC along the way in case it's inconsistent
@@ -963,11 +858,9 @@ class Tangential_complex {
bool is_inconsistent = false;
Star::const_iterator it_inc_simplex = m_stars[idx].begin();
Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
- for (; it_inc_simplex != it_inc_simplex_end;
- ++it_inc_simplex) {
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
// Skip infinite cells
- if (is_infinite(*it_inc_simplex))
- continue;
+ if (is_infinite(*it_inc_simplex)) continue;
Simplex c = *it_inc_simplex;
c.insert(idx); // Add the missing index
@@ -975,8 +868,7 @@ class Tangential_complex {
sc.add_simplex(c);
// If we do not already know this star is inconsistent, test it
- if (!is_inconsistent && !is_simplex_consistent(c))
- is_inconsistent = true;
+ if (!is_inconsistent && !is_simplex_consistent(c)) is_inconsistent = true;
}
if (is_inconsistent) {
@@ -991,66 +883,58 @@ class Tangential_complex {
class Compare_distance_to_ref_point {
public:
- Compare_distance_to_ref_point(Point const& ref, K const& k)
- : m_ref(ref), m_k(k) { }
+ Compare_distance_to_ref_point(Point const &ref, K const &k) : m_ref(ref), m_k(k) {}
- bool operator()(Point const& p1, Point const& p2) {
- typename K::Squared_distance_d sqdist =
- m_k.squared_distance_d_object();
+ bool operator()(Point const &p1, Point const &p2) {
+ typename K::Squared_distance_d sqdist = m_k.squared_distance_d_object();
return sqdist(p1, m_ref) < sqdist(p2, m_ref);
}
private:
- Point const& m_ref;
- K const& m_k;
+ Point const &m_ref;
+ K const &m_k;
};
#ifdef GUDHI_USE_TBB
// Functor for compute_tangential_complex function
class Compute_tangent_triangulation {
- Tangential_complex & m_tc;
+ Tangential_complex &m_tc;
public:
// Constructor
- Compute_tangent_triangulation(Tangential_complex &tc)
- : m_tc(tc) { }
+ Compute_tangent_triangulation(Tangential_complex &tc) : m_tc(tc) {}
// Constructor
- Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt)
- : m_tc(ctt.m_tc) { }
+ Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt) : m_tc(ctt.m_tc) {}
// operator()
- void operator()(const tbb::blocked_range<size_t>& r) const {
- for (size_t i = r.begin(); i != r.end(); ++i)
- m_tc.compute_tangent_triangulation(i);
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) m_tc.compute_tangent_triangulation(i);
}
};
// Functor for refresh_tangential_complex function
class Refresh_tangent_triangulation {
- Tangential_complex & m_tc;
- Points_ds const& m_updated_pts_ds;
+ Tangential_complex &m_tc;
+ Points_ds const &m_updated_pts_ds;
public:
// Constructor
- Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const& updated_pts_ds)
- : m_tc(tc), m_updated_pts_ds(updated_pts_ds) { }
+ Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const &updated_pts_ds)
+ : m_tc(tc), m_updated_pts_ds(updated_pts_ds) {}
// Constructor
Refresh_tangent_triangulation(const Refresh_tangent_triangulation &ctt)
- : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) { }
+ : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) {}
// operator()
- void operator()(const tbb::blocked_range<size_t>& r) const {
- for (size_t i = r.begin(); i != r.end(); ++i)
- m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds);
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds);
}
};
#endif // GUDHI_USE_TBB
- bool is_infinite(Simplex const& s) const {
- return *s.rbegin() == (std::numeric_limits<std::size_t>::max)();
- }
+ bool is_infinite(Simplex const &s) const { return *s.rbegin() == (std::numeric_limits<std::size_t>::max)(); }
// Output: "triangulation" is a Regular Triangulation containing at least the
// star of "center_pt"
@@ -1076,17 +960,16 @@ class Tangential_complex {
Tr_point proj_wp;
if (i == tsb.origin()) {
// Insert {(0, 0, 0...), m_weights[i]}
- proj_wp = local_tr_traits.construct_weighted_point_d_object()(local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN),
- m_weights[i]);
+ proj_wp = local_tr_traits.construct_weighted_point_d_object()(
+ local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN), m_weights[i]);
} else {
- const Weighted_point& wp = compute_perturbed_weighted_point(i);
+ const Weighted_point &wp = compute_perturbed_weighted_point(i);
proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits);
}
Tr_vertex_handle center_vertex = triangulation.insert(proj_wp);
center_vertex->data() = i;
- if (verbose)
- std::cerr << "* Inserted point #" << i << "\n";
+ if (verbose) std::cerr << "* Inserted point #" << i << "\n";
#ifdef GUDHI_TC_VERY_VERBOSE
std::size_t num_attempts_to_insert_points = 1;
@@ -1106,9 +989,7 @@ class Tangential_complex {
// boost::optional<FT> squared_star_sphere_radius_plus_margin;
// Insert points until we find a point which is outside "star sphere"
- for (auto nn_it = ins_range.begin();
- nn_it != ins_range.end();
- ++nn_it) {
+ for (auto nn_it = ins_range.begin(); nn_it != ins_range.end(); ++nn_it) {
std::size_t neighbor_point_idx = nn_it->first;
// ith point = p, which is already inserted
@@ -1123,22 +1004,19 @@ class Tangential_complex {
k_sqdist(center_pt, neighbor_pt) > *squared_star_sphere_radius_plus_margin)
break;
- Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb,
- local_tr_traits);
+ Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb, local_tr_traits);
#ifdef GUDHI_TC_VERY_VERBOSE
++num_attempts_to_insert_points;
#endif
-
Tr_vertex_handle vh = triangulation.insert_if_in_star(proj_pt, center_vertex);
// Tr_vertex_handle vh = triangulation.insert(proj_pt);
if (vh != Tr_vertex_handle() && vh->data() == (std::numeric_limits<std::size_t>::max)()) {
#ifdef GUDHI_TC_VERY_VERBOSE
++num_inserted_points;
#endif
- if (verbose)
- std::cerr << "* Inserted point #" << neighbor_point_idx << "\n";
+ if (verbose) std::cerr << "* Inserted point #" << neighbor_point_idx << "\n";
vh->data() = neighbor_point_idx;
@@ -1147,11 +1025,9 @@ class Tangential_complex {
squared_star_sphere_radius_plus_margin = boost::none;
// Get the incident cells and look for the biggest circumsphere
std::vector<Tr_full_cell_handle> incident_cells;
- triangulation.incident_full_cells(
- center_vertex,
- std::back_inserter(incident_cells));
- for (typename std::vector<Tr_full_cell_handle>::iterator cit =
- incident_cells.begin(); cit != incident_cells.end(); ++cit) {
+ triangulation.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
+ for (typename std::vector<Tr_full_cell_handle>::iterator cit = incident_cells.begin();
+ cit != incident_cells.end(); ++cit) {
Tr_full_cell_handle cell = *cit;
if (triangulation.is_infinite(cell)) {
squared_star_sphere_radius_plus_margin = boost::none;
@@ -1159,12 +1035,11 @@ class Tangential_complex {
} else {
// Note that this uses the perturbed point since it uses
// the points of the local triangulation
- Tr_point c = power_center(boost::make_transform_iterator(cell->vertices_begin(),
- vertex_handle_to_point<Tr_point,
- Tr_vertex_handle>),
- boost::make_transform_iterator(cell->vertices_end(),
- vertex_handle_to_point<Tr_point,
- Tr_vertex_handle>));
+ Tr_point c =
+ power_center(boost::make_transform_iterator(cell->vertices_begin(),
+ vertex_handle_to_point<Tr_point, Tr_vertex_handle>),
+ boost::make_transform_iterator(cell->vertices_end(),
+ vertex_handle_to_point<Tr_point, Tr_vertex_handle>));
FT sq_power_sphere_diam = 4 * point_weight(c);
@@ -1179,12 +1054,11 @@ class Tangential_complex {
// The value depends on whether we perturb weight or position
if (squared_star_sphere_radius_plus_margin) {
// "2*m_last_max_perturb" because both points can be perturbed
- squared_star_sphere_radius_plus_margin = CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin)
- + 2 * m_last_max_perturb);
+ squared_star_sphere_radius_plus_margin =
+ CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin) + 2 * m_last_max_perturb);
// Save it in `m_squared_star_spheres_radii_incl_margin`
- m_squared_star_spheres_radii_incl_margin[i] =
- *squared_star_sphere_radius_plus_margin;
+ m_squared_star_spheres_radii_incl_margin[i] = *squared_star_sphere_radius_plus_margin;
} else {
m_squared_star_spheres_radii_incl_margin[i] = FT(-1);
}
@@ -1196,36 +1070,28 @@ class Tangential_complex {
return center_vertex;
}
- void refresh_tangent_triangulation(std::size_t i, Points_ds const& updated_pts_ds, bool verbose = false) {
- if (verbose)
- std::cerr << "** Refreshing tangent tri #" << i << " **\n";
+ void refresh_tangent_triangulation(std::size_t i, Points_ds const &updated_pts_ds, bool verbose = false) {
+ if (verbose) std::cerr << "** Refreshing tangent tri #" << i << " **\n";
- if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1))
- return compute_tangent_triangulation(i, verbose);
+ if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1)) return compute_tangent_triangulation(i, verbose);
Point center_point = compute_perturbed_point(i);
// Among updated point, what is the closer from our center point?
- std::size_t closest_pt_index =
- updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
+ std::size_t closest_pt_index = updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
- typename K::Construct_weighted_point_d k_constr_wp =
- m_k.construct_weighted_point_d_object();
+ typename K::Construct_weighted_point_d k_constr_wp = m_k.construct_weighted_point_d_object();
typename K::Power_distance_d k_power_dist = m_k.power_distance_d_object();
// Construct a weighted point equivalent to the star sphere
- Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i),
- m_squared_star_spheres_radii_incl_margin[i]);
- Weighted_point closest_updated_point =
- compute_perturbed_weighted_point(closest_pt_index);
+ Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i), m_squared_star_spheres_radii_incl_margin[i]);
+ Weighted_point closest_updated_point = compute_perturbed_weighted_point(closest_pt_index);
// Is the "closest point" inside our star sphere?
- if (k_power_dist(star_sphere, closest_updated_point) <= FT(0))
- compute_tangent_triangulation(i, verbose);
+ if (k_power_dist(star_sphere, closest_updated_point) <= FT(0)) compute_tangent_triangulation(i, verbose);
}
void compute_tangent_triangulation(std::size_t i, bool verbose = false) {
- if (verbose)
- std::cerr << "** Computing tangent tri #" << i << " **\n";
+ if (verbose) std::cerr << "** Computing tangent tri #" << i << " **\n";
// std::cerr << "***********************************************\n";
// No need to lock the mutex here since this will not be called while
@@ -1236,7 +1102,7 @@ class Tangential_complex {
// Estimate the tangent space
if (!m_are_tangent_spaces_computed[i]) {
#ifdef GUDHI_TC_EXPORT_NORMALS
- tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]);
+ tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]);
#else
tsb = compute_tangent_space(center_pt, i);
#endif
@@ -1246,11 +1112,9 @@ class Tangential_complex {
Gudhi::Clock t;
#endif
int tangent_space_dim = tangent_basis_dim(i);
- Triangulation &local_tr =
- m_triangulations[i].construct_triangulation(tangent_space_dim);
+ Triangulation &local_tr = m_triangulations[i].construct_triangulation(tangent_space_dim);
- m_triangulations[i].center_vertex() =
- compute_star(i, center_pt, tsb, local_tr, verbose);
+ m_triangulations[i].center_vertex() = compute_star(i, center_pt, tsb, local_tr, verbose);
#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
t.end();
@@ -1259,8 +1123,8 @@ class Tangential_complex {
#endif
#ifdef GUDHI_TC_VERY_VERBOSE
- std::cerr << "Inserted " << num_inserted_points << " points / "
- << num_attempts_to_insert_points << " attemps to compute the star\n";
+ std::cerr << "Inserted " << num_inserted_points << " points / " << num_attempts_to_insert_points
+ << " attemps to compute the star\n";
#endif
update_star(i);
@@ -1281,8 +1145,7 @@ class Tangential_complex {
int cur_dim_plus_1 = local_tr.current_dimension() + 1;
std::vector<Tr_full_cell_handle> incident_cells;
- local_tr.incident_full_cells(
- center_vertex, std::back_inserter(incident_cells));
+ local_tr.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
typename std::vector<Tr_full_cell_handle>::const_iterator it_c = incident_cells.begin();
typename std::vector<Tr_full_cell_handle>::const_iterator it_c_end = incident_cells.end();
@@ -1292,30 +1155,25 @@ class Tangential_complex {
Incident_simplex incident_simplex;
for (int j = 0; j < cur_dim_plus_1; ++j) {
std::size_t index = (*it_c)->vertex(j)->data();
- if (index != i)
- incident_simplex.insert(index);
+ if (index != i) incident_simplex.insert(index);
}
GUDHI_CHECK(incident_simplex.size() == cur_dim_plus_1 - 1,
- std::logic_error("update_star: wrong size of incident simplex"));
+ std::logic_error("update_star: wrong size of incident simplex"));
star.push_back(incident_simplex);
}
}
// Estimates tangent subspaces using PCA
- Tangent_space_basis compute_tangent_space(const Point &p
- , const std::size_t i
- , bool normalize_basis = true
- , Orthogonal_space_basis *p_orth_space_basis = NULL
- ) {
- unsigned int num_pts_for_pca = (std::min)(static_cast<unsigned int> (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
- static_cast<unsigned int> (m_points.size()));
+ Tangent_space_basis compute_tangent_space(const Point &p, const std::size_t i, bool normalize_basis = true,
+ Orthogonal_space_basis *p_orth_space_basis = NULL) {
+ unsigned int num_pts_for_pca =
+ (std::min)(static_cast<unsigned int>(std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int>(m_points.size()));
// Kernel functors
- typename K::Construct_vector_d constr_vec =
- m_k.construct_vector_d_object();
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
+ typename K::Construct_vector_d constr_vec = m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
@@ -1328,9 +1186,7 @@ class Tangential_complex {
// One row = one point
Eigen::MatrixXd mat_points(num_pts_for_pca, m_ambient_dim);
auto nn_it = kns_range.begin();
- for (unsigned int j = 0;
- j < num_pts_for_pca && nn_it != kns_range.end();
- ++j, ++nn_it) {
+ for (unsigned int j = 0; j < num_pts_for_pca && nn_it != kns_range.end(); ++j, ++nn_it) {
for (int i = 0; i < m_ambient_dim; ++i) {
mat_points(j, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
}
@@ -1343,36 +1199,26 @@ class Tangential_complex {
// The eigenvectors are sorted in increasing order of their corresponding
// eigenvalues
- for (int j = m_ambient_dim - 1;
- j >= m_ambient_dim - m_intrinsic_dim;
- --j) {
+ for (int j = m_ambient_dim - 1; j >= m_ambient_dim - m_intrinsic_dim; --j) {
if (normalize_basis) {
- Vector v = constr_vec(m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim);
tsb.push_back(normalize_vector(v, m_k));
} else {
- tsb.push_back(constr_vec(
- m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ tsb.push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim));
}
}
if (p_orth_space_basis) {
p_orth_space_basis->set_origin(i);
- for (int j = m_ambient_dim - m_intrinsic_dim - 1;
- j >= 0;
- --j) {
+ for (int j = m_ambient_dim - m_intrinsic_dim - 1; j >= 0; --j) {
if (normalize_basis) {
- Vector v = constr_vec(m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim);
p_orth_space_basis->push_back(normalize_vector(v, m_k));
} else {
- p_orth_space_basis->push_back(constr_vec(
- m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ p_orth_space_basis->push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim));
}
}
@@ -1389,29 +1235,23 @@ class Tangential_complex {
// on it. Note that most points are duplicated.
Tangent_space_basis compute_tangent_space(const Simplex &s, bool normalize_basis = true) {
- unsigned int num_pts_for_pca = (std::min)(static_cast<unsigned int> (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
- static_cast<unsigned int> (m_points.size()));
+ unsigned int num_pts_for_pca =
+ (std::min)(static_cast<unsigned int>(std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int>(m_points.size()));
// Kernel functors
- typename K::Construct_vector_d constr_vec =
- m_k.construct_vector_d_object();
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
- typename K::Squared_length_d sqlen =
- m_k.squared_length_d_object();
- typename K::Scaled_vector_d scaled_vec =
- m_k.scaled_vector_d_object();
- typename K::Scalar_product_d scalar_pdct =
- m_k.scalar_product_d_object();
- typename K::Difference_of_vectors_d diff_vec =
- m_k.difference_of_vectors_d_object();
+ typename K::Construct_vector_d constr_vec = m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+ typename K::Squared_length_d sqlen = m_k.squared_length_d_object();
+ typename K::Scaled_vector_d scaled_vec = m_k.scaled_vector_d_object();
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_vectors_d diff_vec = m_k.difference_of_vectors_d_object();
// One row = one point
Eigen::MatrixXd mat_points(s.size() * num_pts_for_pca, m_ambient_dim);
unsigned int current_row = 0;
- for (Simplex::const_iterator it_index = s.begin();
- it_index != s.end(); ++it_index) {
+ for (Simplex::const_iterator it_index = s.begin(); it_index != s.end(); ++it_index) {
const Point &p = m_points[*it_index];
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
@@ -1423,12 +1263,9 @@ class Tangential_complex {
#endif
auto nn_it = kns_range.begin();
- for (;
- current_row < num_pts_for_pca && nn_it != kns_range.end();
- ++current_row, ++nn_it) {
+ for (; current_row < num_pts_for_pca && nn_it != kns_range.end(); ++current_row, ++nn_it) {
for (int i = 0; i < m_ambient_dim; ++i) {
- mat_points(current_row, i) =
- CGAL::to_double(coord(points_for_pca[nn_it->first], i));
+ mat_points(current_row, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
}
}
}
@@ -1440,18 +1277,13 @@ class Tangential_complex {
// The eigenvectors are sorted in increasing order of their corresponding
// eigenvalues
- for (int j = m_ambient_dim - 1;
- j >= m_ambient_dim - m_intrinsic_dim;
- --j) {
+ for (int j = m_ambient_dim - 1; j >= m_ambient_dim - m_intrinsic_dim; --j) {
if (normalize_basis) {
- Vector v = constr_vec(m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim);
tsb.push_back(normalize_vector(v, m_k));
} else {
- tsb.push_back(constr_vec(
- m_ambient_dim,
- eig.eigenvectors().col(j).data(),
+ tsb.push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
eig.eigenvectors().col(j).data() + m_ambient_dim));
}
}
@@ -1461,14 +1293,11 @@ class Tangential_complex {
// Returns the dimension of the ith local triangulation
- int tangent_basis_dim(std::size_t i) const {
- return m_tangent_spaces[i].dimension();
- }
+ int tangent_basis_dim(std::size_t i) const { return m_tangent_spaces[i].dimension(); }
Point compute_perturbed_point(std::size_t pt_idx) const {
#ifdef GUDHI_TC_PERTURB_POSITION
- return m_k.translated_point_d_object()(
- m_points[pt_idx], m_translations[pt_idx]);
+ return m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]);
#else
return m_points[pt_idx];
#endif
@@ -1476,8 +1305,7 @@ class Tangential_complex {
void compute_perturbed_weighted_point(std::size_t pt_idx, Point &p, FT &w) const {
#ifdef GUDHI_TC_PERTURB_POSITION
- p = m_k.translated_point_d_object()(
- m_points[pt_idx], m_translations[pt_idx]);
+ p = m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]);
#else
p = m_points[pt_idx];
#endif
@@ -1485,8 +1313,7 @@ class Tangential_complex {
}
Weighted_point compute_perturbed_weighted_point(std::size_t pt_idx) const {
- typename K::Construct_weighted_point_d k_constr_wp =
- m_k.construct_weighted_point_d_object();
+ typename K::Construct_weighted_point_d k_constr_wp = m_k.construct_weighted_point_d_object();
Weighted_point wp = k_constr_wp(
#ifdef GUDHI_TC_PERTURB_POSITION
@@ -1499,33 +1326,22 @@ class Tangential_complex {
return wp;
}
- Point unproject_point(const Tr_point &p,
- const Tangent_space_basis &tsb,
- const Tr_traits &tr_traits) const {
- typename K::Translated_point_d k_transl =
- m_k.translated_point_d_object();
- typename K::Scaled_vector_d k_scaled_vec =
- m_k.scaled_vector_d_object();
- typename Tr_traits::Compute_coordinate_d coord =
- tr_traits.compute_coordinate_d_object();
+ Point unproject_point(const Tr_point &p, const Tangent_space_basis &tsb, const Tr_traits &tr_traits) const {
+ typename K::Translated_point_d k_transl = m_k.translated_point_d_object();
+ typename K::Scaled_vector_d k_scaled_vec = m_k.scaled_vector_d_object();
+ typename Tr_traits::Compute_coordinate_d coord = tr_traits.compute_coordinate_d_object();
Point global_point = compute_perturbed_point(tsb.origin());
- for (int i = 0; i < m_intrinsic_dim; ++i)
- global_point = k_transl(global_point,
- k_scaled_vec(tsb[i], coord(p, i)));
+ for (int i = 0; i < m_intrinsic_dim; ++i) global_point = k_transl(global_point, k_scaled_vec(tsb[i], coord(p, i)));
return global_point;
}
// Project the point in the tangent space
// Resulting point coords are expressed in tsb's space
- Tr_bare_point project_point(const Point &p,
- const Tangent_space_basis &tsb,
- const Tr_traits &tr_traits) const {
- typename K::Scalar_product_d scalar_pdct =
- m_k.scalar_product_d_object();
- typename K::Difference_of_points_d diff_points =
- m_k.difference_of_points_d_object();
+ Tr_bare_point project_point(const Point &p, const Tangent_space_basis &tsb, const Tr_traits &tr_traits) const {
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points = m_k.difference_of_points_d_object();
Vector v = diff_points(p, compute_perturbed_point(tsb.origin()));
@@ -1538,41 +1354,30 @@ class Tangential_complex {
coords.push_back(coord);
}
- return tr_traits.construct_point_d_object()(
- static_cast<int> (coords.size()), coords.begin(), coords.end());
+ return tr_traits.construct_point_d_object()(static_cast<int>(coords.size()), coords.begin(), coords.end());
}
// Project the point in the tangent space
// The weight will be the squared distance between p and the projection of p
// Resulting point coords are expressed in tsb's space
- Tr_point project_point_and_compute_weight(const Weighted_point &wp,
- const Tangent_space_basis &tsb,
+ Tr_point project_point_and_compute_weight(const Weighted_point &wp, const Tangent_space_basis &tsb,
const Tr_traits &tr_traits) const {
- typename K::Point_drop_weight_d k_drop_w =
- m_k.point_drop_weight_d_object();
- typename K::Compute_weight_d k_point_weight =
- m_k.compute_weight_d_object();
- return project_point_and_compute_weight(
- k_drop_w(wp), k_point_weight(wp), tsb, tr_traits);
+ typename K::Point_drop_weight_d k_drop_w = m_k.point_drop_weight_d_object();
+ typename K::Compute_weight_d k_point_weight = m_k.compute_weight_d_object();
+ return project_point_and_compute_weight(k_drop_w(wp), k_point_weight(wp), tsb, tr_traits);
}
// Same as above, with slightly different parameters
- Tr_point project_point_and_compute_weight(const Point &p, const FT w,
- const Tangent_space_basis &tsb,
+ Tr_point project_point_and_compute_weight(const Point &p, const FT w, const Tangent_space_basis &tsb,
const Tr_traits &tr_traits) const {
const int point_dim = m_k.point_dimension_d_object()(p);
- typename K::Construct_point_d constr_pt =
- m_k.construct_point_d_object();
- typename K::Scalar_product_d scalar_pdct =
- m_k.scalar_product_d_object();
- typename K::Difference_of_points_d diff_points =
- m_k.difference_of_points_d_object();
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
- typename K::Construct_cartesian_const_iterator_d ccci =
- m_k.construct_cartesian_const_iterator_d_object();
+ typename K::Construct_point_d constr_pt = m_k.construct_point_d_object();
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points = m_k.difference_of_points_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+ typename K::Construct_cartesian_const_iterator_d ccci = m_k.construct_cartesian_const_iterator_d_object();
Point origin = compute_perturbed_point(tsb.origin());
Vector v = diff_points(p, origin);
@@ -1591,8 +1396,7 @@ class Tangential_complex {
// p_proj += c * tsb[i]
if (!same_dim) {
- for (int j = 0; j < point_dim; ++j)
- p_proj[j] += c * coord(tsb[i], j);
+ for (int j = 0; j < point_dim; ++j) p_proj[j] += c * coord(tsb[i], j);
}
}
@@ -1603,24 +1407,21 @@ class Tangential_complex {
sq_dist_to_proj_pt = m_k.squared_distance_d_object()(p, projected_pt);
}
- return tr_traits.construct_weighted_point_d_object()
- (tr_traits.construct_point_d_object()(static_cast<int> (coords.size()), coords.begin(), coords.end()),
- w - sq_dist_to_proj_pt);
+ return tr_traits.construct_weighted_point_d_object()(
+ tr_traits.construct_point_d_object()(static_cast<int>(coords.size()), coords.begin(), coords.end()),
+ w - sq_dist_to_proj_pt);
}
// Project all the points in the tangent space
template <typename Indexed_point_range>
- std::vector<Tr_point> project_points_and_compute_weights(
- const Indexed_point_range &point_indices,
+ std::vector<Tr_point> project_points_and_compute_weights(const Indexed_point_range &point_indices,
const Tangent_space_basis &tsb,
const Tr_traits &tr_traits) const {
std::vector<Tr_point> ret;
- for (typename Indexed_point_range::const_iterator
- it = point_indices.begin(), it_end = point_indices.end();
+ for (typename Indexed_point_range::const_iterator it = point_indices.begin(), it_end = point_indices.end();
it != it_end; ++it) {
- ret.push_back(project_point_and_compute_weight(
- compute_perturbed_weighted_point(*it), tsb, tr_traits));
+ ret.push_back(project_point_and_compute_weight(compute_perturbed_weighted_point(*it), tsb, tr_traits));
}
return ret;
}
@@ -1639,7 +1440,7 @@ class Tangential_complex {
// A simplex here is a list of point indices
// TODO(CJ): improve it like the other "is_simplex_consistent" below
- bool is_simplex_consistent(Simplex const& simplex) const {
+ bool is_simplex_consistent(Simplex const &simplex) const {
// Check if the simplex is in the stars of all its vertices
Simplex::const_iterator it_point_idx = simplex.begin();
// For each point p of the simplex, we parse the incidents cells of p
@@ -1647,18 +1448,16 @@ class Tangential_complex {
for (; it_point_idx != simplex.end(); ++it_point_idx) {
std::size_t point_idx = *it_point_idx;
// Don't check infinite simplices
- if (point_idx == (std::numeric_limits<std::size_t>::max)())
- continue;
+ if (point_idx == (std::numeric_limits<std::size_t>::max)()) continue;
- Star const& star = m_stars[point_idx];
+ Star const &star = m_stars[point_idx];
// What we're looking for is "simplex" \ point_idx
Incident_simplex is_to_find = simplex;
is_to_find.erase(point_idx);
// For each cell
- if (std::find(star.begin(), star.end(), is_to_find) == star.end())
- return false;
+ if (std::find(star.begin(), star.end(), is_to_find) == star.end()) return false;
}
return true;
@@ -1671,9 +1470,8 @@ class Tangential_complex {
// star(center_point)
template <typename OutputIterator> // value_type = std::size_t
- bool is_simplex_consistent(
- std::size_t center_point,
- Incident_simplex const& s, // without "center_point"
+ bool is_simplex_consistent(std::size_t center_point,
+ Incident_simplex const &s, // without "center_point"
OutputIterator points_whose_star_does_not_contain_s,
bool check_also_in_non_maximal_faces = false) const {
Simplex full_simplex = s;
@@ -1686,10 +1484,9 @@ class Tangential_complex {
for (; it_point_idx != s.end(); ++it_point_idx) {
std::size_t point_idx = *it_point_idx;
// Don't check infinite simplices
- if (point_idx == (std::numeric_limits<std::size_t>::max)())
- continue;
+ if (point_idx == (std::numeric_limits<std::size_t>::max)()) continue;
- Star const& star = m_stars[point_idx];
+ Star const &star = m_stars[point_idx];
// What we're looking for is full_simplex \ point_idx
Incident_simplex is_to_find = full_simplex;
@@ -1699,15 +1496,11 @@ class Tangential_complex {
// For each simplex "is" of the star, check if ic_to_simplex is
// included in "is"
bool found = false;
- for (Star::const_iterator is = star.begin(), is_end = star.end();
- !found && is != is_end; ++is) {
- if (std::includes(is->begin(), is->end(),
- is_to_find.begin(), is_to_find.end()))
- found = true;
+ for (Star::const_iterator is = star.begin(), is_end = star.end(); !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), is_to_find.begin(), is_to_find.end())) found = true;
}
- if (!found)
- *points_whose_star_does_not_contain_s++ = point_idx;
+ if (!found) *points_whose_star_does_not_contain_s++ = point_idx;
} else {
// Does the star contain is_to_find?
if (std::find(star.begin(), star.end(), is_to_find) == star.end())
@@ -1721,19 +1514,15 @@ class Tangential_complex {
// A simplex here is a list of point indices
// It looks for s in star(p).
// "s" contains all the points of the simplex except p.
- bool is_simplex_in_star(std::size_t p,
- Incident_simplex const& s,
- bool check_also_in_non_maximal_faces = true) const {
- Star const& star = m_stars[p];
+ bool is_simplex_in_star(std::size_t p, Incident_simplex const &s, bool check_also_in_non_maximal_faces = true) const {
+ Star const &star = m_stars[p];
if (check_also_in_non_maximal_faces) {
// For each simplex "is" of the star, check if ic_to_simplex is
// included in "is"
bool found = false;
- for (Star::const_iterator is = star.begin(), is_end = star.end();
- !found && is != is_end; ++is) {
- if (std::includes(is->begin(), is->end(), s.begin(), s.end()))
- found = true;
+ for (Star::const_iterator is = star.begin(), is_end = star.end(); !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), s.begin(), s.end())) found = true;
}
return found;
@@ -1745,64 +1534,55 @@ class Tangential_complex {
#ifdef GUDHI_USE_TBB
// Functor for try_to_solve_inconsistencies_in_a_local_triangulation function
class Try_to_solve_inconsistencies_in_a_local_triangulation {
- Tangential_complex & m_tc;
+ Tangential_complex &m_tc;
double m_max_perturb;
tbb::combinable<std::size_t> &m_num_inconsistencies;
tbb::combinable<std::vector<std::size_t> > &m_updated_points;
public:
// Constructor
- Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc,
- double max_perturb,
+ Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc, double max_perturb,
tbb::combinable<std::size_t> &num_inconsistencies,
tbb::combinable<std::vector<std::size_t> > &updated_points)
: m_tc(tc),
- m_max_perturb(max_perturb),
- m_num_inconsistencies(num_inconsistencies),
- m_updated_points(updated_points) { }
+ m_max_perturb(max_perturb),
+ m_num_inconsistencies(num_inconsistencies),
+ m_updated_points(updated_points) {}
// Constructor
- Try_to_solve_inconsistencies_in_a_local_triangulation(const Try_to_solve_inconsistencies_in_a_local_triangulation&
- tsilt)
+ Try_to_solve_inconsistencies_in_a_local_triangulation(
+ const Try_to_solve_inconsistencies_in_a_local_triangulation &tsilt)
: m_tc(tsilt.m_tc),
- m_max_perturb(tsilt.m_max_perturb),
- m_num_inconsistencies(tsilt.m_num_inconsistencies),
- m_updated_points(tsilt.m_updated_points) { }
+ m_max_perturb(tsilt.m_max_perturb),
+ m_num_inconsistencies(tsilt.m_num_inconsistencies),
+ m_updated_points(tsilt.m_updated_points) {}
// operator()
- void operator()(const tbb::blocked_range<size_t>& r) const {
+ void operator()(const tbb::blocked_range<size_t> &r) const {
for (size_t i = r.begin(); i != r.end(); ++i) {
- m_num_inconsistencies.local() +=
- m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(i, m_max_perturb,
- std::back_inserter(m_updated_points.local()));
+ m_num_inconsistencies.local() += m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(
+ i, m_max_perturb, std::back_inserter(m_updated_points.local()));
}
}
};
#endif // GUDHI_USE_TBB
void perturb(std::size_t point_idx, double max_perturb) {
- const Tr_traits &local_tr_traits =
- m_triangulations[point_idx].tr().geom_traits();
- typename Tr_traits::Compute_coordinate_d coord =
- local_tr_traits.compute_coordinate_d_object();
- typename K::Translated_point_d k_transl =
- m_k.translated_point_d_object();
- typename K::Construct_vector_d k_constr_vec =
- m_k.construct_vector_d_object();
- typename K::Scaled_vector_d k_scaled_vec =
- m_k.scaled_vector_d_object();
-
- CGAL::Random_points_in_ball_d<Tr_bare_point>
- tr_point_in_ball_generator(m_intrinsic_dim,
- m_random_generator.get_double(0., max_perturb));
+ const Tr_traits &local_tr_traits = m_triangulations[point_idx].tr().geom_traits();
+ typename Tr_traits::Compute_coordinate_d coord = local_tr_traits.compute_coordinate_d_object();
+ typename K::Translated_point_d k_transl = m_k.translated_point_d_object();
+ typename K::Construct_vector_d k_constr_vec = m_k.construct_vector_d_object();
+ typename K::Scaled_vector_d k_scaled_vec = m_k.scaled_vector_d_object();
+
+ CGAL::Random_points_in_ball_d<Tr_bare_point> tr_point_in_ball_generator(
+ m_intrinsic_dim, m_random_generator.get_double(0., max_perturb));
Tr_point local_random_transl =
local_tr_traits.construct_weighted_point_d_object()(*tr_point_in_ball_generator++, 0);
Translation_for_perturb global_transl = k_constr_vec(m_ambient_dim);
const Tangent_space_basis &tsb = m_tangent_spaces[point_idx];
for (int i = 0; i < m_intrinsic_dim; ++i) {
- global_transl = k_transl(global_transl,
- k_scaled_vec(tsb[i], coord(local_random_transl, i)));
+ global_transl = k_transl(global_transl, k_scaled_vec(tsb[i], coord(local_random_transl, i)));
}
// Parallel
#if defined(GUDHI_USE_TBB)
@@ -1817,12 +1597,11 @@ class Tangential_complex {
// Return true if inconsistencies were found
template <typename OutputIt>
- bool try_to_solve_inconsistencies_in_a_local_triangulation(std::size_t tr_index,
- double max_perturb,
- OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) {
+ bool try_to_solve_inconsistencies_in_a_local_triangulation(
+ std::size_t tr_index, double max_perturb, OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) {
bool is_inconsistent = false;
- Star const& star = m_stars[tr_index];
+ Star const &star = m_stars[tr_index];
// For each incident simplex
Star::const_iterator it_inc_simplex = star.begin();
@@ -1831,8 +1610,7 @@ class Tangential_complex {
const Incident_simplex &incident_simplex = *it_inc_simplex;
// Don't check infinite cells
- if (is_infinite(incident_simplex))
- continue;
+ if (is_infinite(incident_simplex)) continue;
Simplex c = incident_simplex;
c.insert(tr_index); // Add the missing index
@@ -1854,31 +1632,24 @@ class Tangential_complex {
return is_inconsistent;
}
-
// 1st line: number of points
// Then one point per line
- std::ostream &export_point_set(std::ostream & os,
- bool use_perturbed_points = false,
+ std::ostream &export_point_set(std::ostream &os, bool use_perturbed_points = false,
const char *coord_separator = " ") const {
if (use_perturbed_points) {
std::vector<Point> perturbed_points;
perturbed_points.reserve(m_points.size());
- for (std::size_t i = 0; i < m_points.size(); ++i)
- perturbed_points.push_back(compute_perturbed_point(i));
+ for (std::size_t i = 0; i < m_points.size(); ++i) perturbed_points.push_back(compute_perturbed_point(i));
- return export_point_set(
- m_k, perturbed_points, os, coord_separator);
+ return export_point_set(m_k, perturbed_points, os, coord_separator);
} else {
- return export_point_set(
- m_k, m_points, os, coord_separator);
+ return export_point_set(m_k, m_points, os, coord_separator);
}
}
- template<typename ProjectionFunctor = CGAL::Identity<Point> >
- std::ostream &export_vertices_to_off(
- std::ostream & os, std::size_t &num_vertices,
- bool use_perturbed_points = false,
- ProjectionFunctor const& point_projection = ProjectionFunctor()) const {
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_vertices_to_off(std::ostream &os, std::size_t &num_vertices, bool use_perturbed_points = false,
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
if (m_points.empty()) {
num_vertices = 0;
return os;
@@ -1890,8 +1661,7 @@ class Tangential_complex {
const int N = (m_intrinsic_dim == 1 ? 2 : 1);
// Kernel functors
- typename K::Compute_coordinate_d coord =
- m_k.compute_coordinate_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
#ifdef GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF
int num_coords = m_ambient_dim;
@@ -1906,18 +1676,14 @@ class Tangential_complex {
typename Points::const_iterator it_p_end = m_points.end();
// For each point p
for (std::size_t i = 0; it_p != it_p_end; ++it_p, ++i) {
- Point p = point_projection(
- use_perturbed_points ? compute_perturbed_point(i) : *it_p);
+ Point p = point_projection(use_perturbed_points ? compute_perturbed_point(i) : *it_p);
for (int ii = 0; ii < N; ++ii) {
int j = 0;
- for (; j < num_coords; ++j)
- os << CGAL::to_double(coord(p, j)) << " ";
- if (j == 2)
- os << "0";
+ for (; j < num_coords; ++j) os << CGAL::to_double(coord(p, j)) << " ";
+ if (j == 2) os << "0";
#ifdef GUDHI_TC_EXPORT_NORMALS
- for (j = 0; j < num_coords; ++j)
- os << " " << CGAL::to_double(coord(*it_os->begin(), j));
+ for (j = 0; j < num_coords; ++j) os << " " << CGAL::to_double(coord(*it_os->begin(), j));
#endif
os << "\n";
}
@@ -1930,12 +1696,11 @@ class Tangential_complex {
return os;
}
- std::ostream &export_simplices_to_off(std::ostream & os, std::size_t &num_OFF_simplices,
+ std::ostream &export_simplices_to_off(std::ostream &os, std::size_t &num_OFF_simplices,
bool color_inconsistencies = false,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
- Simplex_set const *p_simpl_to_color_in_blue = NULL)
- const {
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) const {
// If m_intrinsic_dim = 1, each point is output two times
// (see export_vertices_to_off)
num_OFF_simplices = 0;
@@ -1948,10 +1713,9 @@ class Tangential_complex {
for (std::size_t idx = 0; it_tr != it_tr_end; ++it_tr, ++idx) {
bool is_star_inconsistent = false;
- Triangulation const& tr = it_tr->tr();
+ Triangulation const &tr = it_tr->tr();
- if (tr.current_dimension() < m_intrinsic_dim)
- continue;
+ if (tr.current_dimension() < m_intrinsic_dim) continue;
// Color for this star
std::stringstream color;
@@ -1977,23 +1741,16 @@ class Tangential_complex {
color_simplex = 0;
is_star_inconsistent = true;
} else {
- if (p_simpl_to_color_in_red &&
- std::find(
- p_simpl_to_color_in_red->begin(),
- p_simpl_to_color_in_red->end(),
- c) != p_simpl_to_color_in_red->end()) {
+ if (p_simpl_to_color_in_red && std::find(p_simpl_to_color_in_red->begin(), p_simpl_to_color_in_red->end(),
+ c) != p_simpl_to_color_in_red->end()) {
color_simplex = 1;
} else if (p_simpl_to_color_in_green &&
- std::find(
- p_simpl_to_color_in_green->begin(),
- p_simpl_to_color_in_green->end(),
- c) != p_simpl_to_color_in_green->end()) {
+ std::find(p_simpl_to_color_in_green->begin(), p_simpl_to_color_in_green->end(), c) !=
+ p_simpl_to_color_in_green->end()) {
color_simplex = 2;
} else if (p_simpl_to_color_in_blue &&
- std::find(
- p_simpl_to_color_in_blue->begin(),
- p_simpl_to_color_in_blue->end(),
- c) != p_simpl_to_color_in_blue->end()) {
+ std::find(p_simpl_to_color_in_blue->begin(), p_simpl_to_color_in_blue->end(), c) !=
+ p_simpl_to_color_in_blue->end()) {
color_simplex = 3;
}
}
@@ -2005,10 +1762,8 @@ class Tangential_complex {
if (m_intrinsic_dim == 1) {
Simplex tmp_c;
Simplex::iterator it = c.begin();
- for (; it != c.end(); ++it)
- tmp_c.insert(*it * 2);
- if (num_vertices == 2)
- tmp_c.insert(*tmp_c.rbegin() + 1);
+ for (; it != c.end(); ++it) tmp_c.insert(*it * 2);
+ if (num_vertices == 2) tmp_c.insert(*tmp_c.rbegin() + 1);
c = tmp_c;
}
@@ -2023,26 +1778,21 @@ class Tangential_complex {
Simplex triangle;
Simplex::iterator it = c.begin();
for (int i = 0; it != c.end(); ++i, ++it) {
- if (booleans[i])
- triangle.insert(*it);
+ if (booleans[i]) triangle.insert(*it);
}
- star_using_triangles.push_back(
- std::make_pair(triangle, color_simplex));
+ star_using_triangles.push_back(std::make_pair(triangle, color_simplex));
} while (std::next_permutation(booleans.begin(), booleans.end()));
}
}
// For each cell
- Star_using_triangles::const_iterator it_simplex =
- star_using_triangles.begin();
- Star_using_triangles::const_iterator it_simplex_end =
- star_using_triangles.end();
+ Star_using_triangles::const_iterator it_simplex = star_using_triangles.begin();
+ Star_using_triangles::const_iterator it_simplex_end = star_using_triangles.end();
for (; it_simplex != it_simplex_end; ++it_simplex) {
const Simplex &c = it_simplex->first;
// Don't export infinite cells
- if (is_infinite(c))
- continue;
+ if (is_infinite(c)) continue;
int color_simplex = it_simplex->second;
@@ -2054,46 +1804,42 @@ class Tangential_complex {
}
os << 3 << " " << sstr_c.str();
- if (color_inconsistencies || p_simpl_to_color_in_red
- || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
+ if (color_inconsistencies || p_simpl_to_color_in_red || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
switch (color_simplex) {
- case 0: os << " 255 255 0";
+ case 0:
+ os << " 255 255 0";
break;
- case 1: os << " 255 0 0";
+ case 1:
+ os << " 255 0 0";
break;
- case 2: os << " 0 255 0";
+ case 2:
+ os << " 0 255 0";
break;
- case 3: os << " 0 0 255";
+ case 3:
+ os << " 0 0 255";
break;
- default: os << " " << color.str();
+ default:
+ os << " " << color.str();
break;
}
}
++num_OFF_simplices;
os << "\n";
}
- if (is_star_inconsistent)
- ++num_inconsistent_stars;
+ if (is_star_inconsistent) ++num_inconsistent_stars;
}
#ifdef DEBUG_TRACES
- std::cerr
- << "\n==========================================================\n"
- << "Export from list of stars to OFF:\n"
- << " * Number of vertices: " << m_points.size() << "\n"
- << " * Total number of maximal simplices: " << num_maximal_simplices
- << "\n";
+ std::cerr << "\n==========================================================\n"
+ << "Export from list of stars to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices << "\n";
if (color_inconsistencies) {
- std::cerr
- << " * Number of inconsistent stars: "
- << num_inconsistent_stars << " ("
- << (m_points.size() > 0 ?
- 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n"
- << " * Number of inconsistent maximal simplices: "
- << num_inconsistent_maximal_simplices << " ("
- << (num_maximal_simplices > 0 ?
- 100. * num_inconsistent_maximal_simplices / num_maximal_simplices
- : 0.) << "%)\n";
+ std::cerr << " * Number of inconsistent stars: " << num_inconsistent_stars << " ("
+ << (m_points.size() > 0 ? 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n"
+ << " * Number of inconsistent maximal simplices: " << num_inconsistent_maximal_simplices << " ("
+ << (num_maximal_simplices > 0 ? 100. * num_inconsistent_maximal_simplices / num_maximal_simplices : 0.)
+ << "%)\n";
}
std::cerr << "==========================================================\n";
#endif
@@ -2102,13 +1848,11 @@ class Tangential_complex {
}
public:
- std::ostream &export_simplices_to_off(
- const Simplicial_complex &complex,
- std::ostream & os, std::size_t &num_OFF_simplices,
+ std::ostream &export_simplices_to_off(const Simplicial_complex &complex, std::ostream &os,
+ std::size_t &num_OFF_simplices,
Simplex_set const *p_simpl_to_color_in_red = NULL,
Simplex_set const *p_simpl_to_color_in_green = NULL,
- Simplex_set const *p_simpl_to_color_in_blue = NULL)
- const {
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) const {
typedef Simplicial_complex::Simplex Simplex;
typedef Simplicial_complex::Simplex_set Simplex_set;
@@ -2117,31 +1861,24 @@ class Tangential_complex {
num_OFF_simplices = 0;
std::size_t num_maximal_simplices = 0;
- typename Simplex_set::const_iterator it_s =
- complex.simplex_range().begin();
- typename Simplex_set::const_iterator it_s_end =
- complex.simplex_range().end();
+ typename Simplex_set::const_iterator it_s = complex.simplex_range().begin();
+ typename Simplex_set::const_iterator it_s_end = complex.simplex_range().end();
// For each simplex
for (; it_s != it_s_end; ++it_s) {
Simplex c = *it_s;
++num_maximal_simplices;
int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue
- if (p_simpl_to_color_in_red &&
- std::find(
- p_simpl_to_color_in_red->begin(),
- p_simpl_to_color_in_red->end(),
- c) != p_simpl_to_color_in_red->end()) {
+ if (p_simpl_to_color_in_red && std::find(p_simpl_to_color_in_red->begin(), p_simpl_to_color_in_red->end(), c) !=
+ p_simpl_to_color_in_red->end()) {
color_simplex = 1;
} else if (p_simpl_to_color_in_green &&
- std::find(p_simpl_to_color_in_green->begin(),
- p_simpl_to_color_in_green->end(),
- c) != p_simpl_to_color_in_green->end()) {
+ std::find(p_simpl_to_color_in_green->begin(), p_simpl_to_color_in_green->end(), c) !=
+ p_simpl_to_color_in_green->end()) {
color_simplex = 2;
} else if (p_simpl_to_color_in_blue &&
- std::find(p_simpl_to_color_in_blue->begin(),
- p_simpl_to_color_in_blue->end(),
- c) != p_simpl_to_color_in_blue->end()) {
+ std::find(p_simpl_to_color_in_blue->begin(), p_simpl_to_color_in_blue->end(), c) !=
+ p_simpl_to_color_in_blue->end()) {
color_simplex = 3;
}
@@ -2151,8 +1888,7 @@ class Tangential_complex {
int num_vertices = static_cast<int>(c.size());
// Do not export smaller dimension simplices
- if (num_vertices < m_intrinsic_dim + 1)
- continue;
+ if (num_vertices < m_intrinsic_dim + 1) continue;
// If m_intrinsic_dim = 1, each point is output two times,
// so we need to multiply each index by 2
@@ -2161,10 +1897,8 @@ class Tangential_complex {
if (m_intrinsic_dim == 1) {
Simplex tmp_c;
Simplex::iterator it = c.begin();
- for (; it != c.end(); ++it)
- tmp_c.insert(*it * 2);
- if (num_vertices == 2)
- tmp_c.insert(*tmp_c.rbegin() + 1);
+ for (; it != c.end(); ++it) tmp_c.insert(*it * 2);
+ if (num_vertices == 2) tmp_c.insert(*tmp_c.rbegin() + 1);
c = tmp_c;
}
@@ -2179,11 +1913,10 @@ class Tangential_complex {
Simplex triangle;
Simplex::iterator it = c.begin();
for (int i = 0; it != c.end(); ++i, ++it) {
- if (booleans[i])
- triangle.insert(*it);
+ if (booleans[i]) triangle.insert(*it);
}
triangles.push_back(triangle);
- } while (std::next_permutation(booleans.begin(), booleans.end()));
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
}
// For each cell
@@ -2191,8 +1924,7 @@ class Tangential_complex {
Triangles::const_iterator it_tri_end = triangles.end();
for (; it_tri != it_tri_end; ++it_tri) {
// Don't export infinite cells
- if (is_infinite(*it_tri))
- continue;
+ if (is_infinite(*it_tri)) continue;
os << 3 << " ";
Simplex::const_iterator it_point_idx = it_tri->begin();
@@ -2200,18 +1932,22 @@ class Tangential_complex {
os << *it_point_idx << " ";
}
- if (p_simpl_to_color_in_red || p_simpl_to_color_in_green
- || p_simpl_to_color_in_blue) {
+ if (p_simpl_to_color_in_red || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
switch (color_simplex) {
- case 0: os << " 255 255 0";
+ case 0:
+ os << " 255 255 0";
break;
- case 1: os << " 255 0 0";
+ case 1:
+ os << " 255 0 0";
break;
- case 2: os << " 0 255 0";
+ case 2:
+ os << " 0 255 0";
break;
- case 3: os << " 0 0 255";
+ case 3:
+ os << " 0 0 255";
break;
- default: os << " 128 128 128";
+ default:
+ os << " 128 128 128";
break;
}
}
@@ -2222,13 +1958,11 @@ class Tangential_complex {
}
#ifdef DEBUG_TRACES
- std::cerr
- << "\n==========================================================\n"
- << "Export from complex to OFF:\n"
- << " * Number of vertices: " << m_points.size() << "\n"
- << " * Total number of maximal simplices: " << num_maximal_simplices
- << "\n"
- << "==========================================================\n";
+ std::cerr << "\n==========================================================\n"
+ << "Export from complex to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices << "\n"
+ << "==========================================================\n";
#endif
return os;
diff --git a/utilities/Alpha_complex/alpha_complex_3d_persistence.cpp b/utilities/Alpha_complex/alpha_complex_3d_persistence.cpp
index 8cda0b70..6e603155 100644
--- a/utilities/Alpha_complex/alpha_complex_3d_persistence.cpp
+++ b/utilities/Alpha_complex/alpha_complex_3d_persistence.cpp
@@ -266,6 +266,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Alpha_complex/alpha_complex_persistence.cpp b/utilities/Alpha_complex/alpha_complex_persistence.cpp
index 42390b0e..8e6c40b7 100644
--- a/utilities/Alpha_complex/alpha_complex_persistence.cpp
+++ b/utilities/Alpha_complex/alpha_complex_persistence.cpp
@@ -133,6 +133,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Alpha_complex/exact_alpha_complex_3d_persistence.cpp b/utilities/Alpha_complex/exact_alpha_complex_3d_persistence.cpp
index cbe003ff..61f49bb1 100644
--- a/utilities/Alpha_complex/exact_alpha_complex_3d_persistence.cpp
+++ b/utilities/Alpha_complex/exact_alpha_complex_3d_persistence.cpp
@@ -260,6 +260,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Alpha_complex/periodic_alpha_complex_3d_persistence.cpp b/utilities/Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
index 11010701..a261c5a3 100644
--- a/utilities/Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
+++ b/utilities/Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
@@ -297,6 +297,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file cuboid-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Alpha_complex/weighted_alpha_complex_3d_persistence.cpp b/utilities/Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
index cdeeabfc..aa7ddee2 100644
--- a/utilities/Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
+++ b/utilities/Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
@@ -311,6 +311,6 @@ void program_options(int argc, char *argv[], std::string &off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file weight-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Cech_complex/cech_persistence.cpp b/utilities/Cech_complex/cech_persistence.cpp
index abd9dbcd..93e92695 100644
--- a/utilities/Cech_complex/cech_persistence.cpp
+++ b/utilities/Cech_complex/cech_persistence.cpp
@@ -131,6 +131,6 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Rips_complex/rips_correlation_matrix_persistence.cpp b/utilities/Rips_complex/rips_correlation_matrix_persistence.cpp
index c78677d2..287e8915 100644
--- a/utilities/Rips_complex/rips_correlation_matrix_persistence.cpp
+++ b/utilities/Rips_complex/rips_correlation_matrix_persistence.cpp
@@ -166,6 +166,6 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Rips_complex/rips_distance_matrix_persistence.cpp b/utilities/Rips_complex/rips_distance_matrix_persistence.cpp
index 53191ca7..c73152cf 100644
--- a/utilities/Rips_complex/rips_distance_matrix_persistence.cpp
+++ b/utilities/Rips_complex/rips_distance_matrix_persistence.cpp
@@ -128,6 +128,6 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Rips_complex/rips_persistence.cpp b/utilities/Rips_complex/rips_persistence.cpp
index 7cee927e..9410b9c2 100644
--- a/utilities/Rips_complex/rips_persistence.cpp
+++ b/utilities/Rips_complex/rips_persistence.cpp
@@ -130,6 +130,6 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Rips_complex/sparse_rips_persistence.cpp b/utilities/Rips_complex/sparse_rips_persistence.cpp
index bcd5c2c5..6d4d86fd 100644
--- a/utilities/Rips_complex/sparse_rips_persistence.cpp
+++ b/utilities/Rips_complex/sparse_rips_persistence.cpp
@@ -128,6 +128,6 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Witness_complex/strong_witness_persistence.cpp b/utilities/Witness_complex/strong_witness_persistence.cpp
index 9d23df74..f386e992 100644
--- a/utilities/Witness_complex/strong_witness_persistence.cpp
+++ b/utilities/Witness_complex/strong_witness_persistence.cpp
@@ -151,6 +151,6 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}
diff --git a/utilities/Witness_complex/weak_witness_persistence.cpp b/utilities/Witness_complex/weak_witness_persistence.cpp
index 1315d2ba..ea00cfe7 100644
--- a/utilities/Witness_complex/weak_witness_persistence.cpp
+++ b/utilities/Witness_complex/weak_witness_persistence.cpp
@@ -151,6 +151,6 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
std::cout << visible << std::endl;
- std::abort();
+ exit(-1);
}
}