summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorVincent Rouvreau <vincent.rouvreau@inria.fr>2022-02-01 16:21:10 +0100
committerVincent Rouvreau <vincent.rouvreau@inria.fr>2022-02-01 16:21:10 +0100
commit15163959fd57ea5318e19a5613cc69bc3f0f6b9e (patch)
tree183314a8b538bf6a39b81b20825e9416d66ae43e /src
parent002487d3cd747d6ff979f33474d8bb0a7e61f44d (diff)
parent7f1b8eb706c72921141b53e607d6e2aa28e2bf19 (diff)
Merge master and resolve conflicts
Diffstat (limited to 'src')
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h12
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp15
-rw-r--r--src/CMakeLists.txt4
-rw-r--r--src/Cech_complex/benchmark/cech_complex_benchmark.cpp2
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt4
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h2
-rw-r--r--src/Simplex_tree/example/CMakeLists.txt2
-rw-r--r--src/Simplex_tree/example/README73
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h9
-rw-r--r--src/Toplex_map/benchmark/CMakeLists.txt4
-rw-r--r--src/cmake/modules/GUDHI_modules.cmake6
-rw-r--r--src/cmake/modules/GUDHI_options.cmake5
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake176
-rw-r--r--src/common/benchmark/CMakeLists.txt4
-rw-r--r--src/common/include/gudhi/reader_utils.h6
-rw-r--r--src/python/CMakeLists.txt67
-rw-r--r--src/python/doc/alpha_complex_ref.rst1
-rw-r--r--src/python/doc/alpha_complex_sum.inc24
-rw-r--r--src/python/doc/alpha_complex_user.rst104
-rw-r--r--src/python/doc/datasets_generators.rst2
-rwxr-xr-xsrc/python/example/alpha_complex_diagram_persistence_from_off_file_example.py55
-rwxr-xr-xsrc/python/example/alpha_rips_persistence_bottleneck_distance.py110
-rwxr-xr-xsrc/python/example/plot_alpha_complex.py5
-rw-r--r--src/python/gudhi/alpha_complex.pyx62
-rw-r--r--src/python/gudhi/cubical_complex.pyx6
-rw-r--r--src/python/gudhi/datasets/generators/_points.cc11
-rw-r--r--src/python/gudhi/datasets/generators/points.py19
-rw-r--r--src/python/gudhi/periodic_cubical_complex.pyx6
-rw-r--r--src/python/gudhi/representations/vector_methods.py237
-rw-r--r--src/python/gudhi/simplex_tree.pyx25
-rw-r--r--src/python/include/Alpha_complex_factory.h118
-rw-r--r--src/python/include/Alpha_complex_interface.h52
-rwxr-xr-xsrc/python/test/test_alpha_complex.py152
-rwxr-xr-xsrc/python/test/test_betti_curve_representations.py59
-rwxr-xr-xsrc/python/test/test_cubical_complex.py25
-rwxr-xr-xsrc/python/test/test_dtm.py14
-rwxr-xr-xsrc/python/test/test_reader_utils.py33
-rwxr-xr-xsrc/python/test/test_representations.py72
-rwxr-xr-xsrc/python/test/test_simplex_tree.py44
39 files changed, 1005 insertions, 622 deletions
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index e03bb161..028ec9bb 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -20,6 +20,7 @@
#include <stdlib.h>
#include <math.h> // isnan, fmax
#include <memory> // for std::unique_ptr
+#include <cstddef> // for std::size_t
#include <CGAL/Delaunay_triangulation.h>
#include <CGAL/Regular_triangulation.h> // aka. Weighted Delaunay triangulation
@@ -213,6 +214,15 @@ class Alpha_complex {
Alpha_complex (Alpha_complex&& other) = delete;
Alpha_complex& operator= (Alpha_complex&& other) = delete;
+ /** \brief Returns the number of finite vertices in the triangulation.
+ */
+ std::size_t num_vertices() const {
+ if (triangulation_ == nullptr)
+ return 0;
+ else
+ return triangulation_->number_of_vertices();
+ }
+
/** \brief get_point returns the point corresponding to the vertex given as parameter.
*
* @param[in] vertex Vertex handle of the point to retrieve.
@@ -373,7 +383,7 @@ class Alpha_complex {
// --------------------------------------------------------------------------------------------
// Simplex_tree construction from loop on triangulation finite full cells list
- if (triangulation_->number_of_vertices() > 0) {
+ if (num_vertices() > 0) {
for (auto cit = triangulation_->finite_full_cells_begin();
cit != triangulation_->finite_full_cells_end();
++cit) {
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 4b37e4bd..f74ad217 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -56,6 +56,9 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of
Gudhi::Simplex_tree<> simplex_tree_60;
BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
+ std::clog << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == 7);
+
std::clog << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
BOOST_CHECK(simplex_tree_60.dimension() == 2);
@@ -72,6 +75,9 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of
Gudhi::Simplex_tree<> simplex_tree_59;
BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_59, max_alpha_square_value));
+ std::clog << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == 7);
+
std::clog << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
BOOST_CHECK(simplex_tree_59.dimension() == 2);
@@ -120,6 +126,9 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
Gudhi::Simplex_tree<> simplex_tree;
BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+ std::clog << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() == points.size());
+
// Another way to check num_simplices
std::clog << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
@@ -240,6 +249,9 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, lis
// ----------------------------------------------------------------------------
Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
+ std::clog << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() == points.size());
+
// Test to the limit
BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
@@ -291,6 +303,9 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_with_duplicated_points, TestedKernel
std::clog << "create_complex" << std::endl;
BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+ std::clog << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() < points.size());
+
std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices()
<< std::endl;
BOOST_CHECK(simplex_tree.num_vertices() < points.size());
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 8f6a1ccc..8023e04c 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -2,9 +2,9 @@ cmake_minimum_required(VERSION 3.5)
project(GUDHI)
-include(CMakeGUDHIVersion.txt)
-
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
+include(CMakeGUDHIVersion.txt)
+include(GUDHI_options)
set(GUDHI_MODULES "" CACHE INTERNAL "GUDHI_MODULES")
set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES")
diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
index e489e8a4..2e4adce4 100644
--- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
+++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
@@ -49,7 +49,7 @@ class Minimal_enclosing_ball_radius {
point_cloud.push_back(p1);
point_cloud.push_back(p2);
- GUDHI_CHECK((p1.end() - p1.begin()) != (p2.end() - p2.begin()), "inconsistent point dimensions");
+ GUDHI_CHECK((p1.end() - p1.begin()) == (p2.end() - p2.begin()), "inconsistent point dimensions");
Min_sphere min_sphere(p1.end() - p1.begin(), point_cloud.begin(), point_cloud.end());
return std::sqrt(min_sphere.squared_radius());
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index 3e7e9369..d66954d7 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -40,9 +40,9 @@ if(TARGET Boost::program_options)
target_link_libraries(persistence_from_file ${TBB_LIBRARIES})
endif()
add_test(NAME Persistent_cohomology_example_from_file_3_2_0 COMMAND $<TARGET_FILE:persistence_from_file>
- "${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/bunny_5000_complex.fsc" "-p" "2" "-m" "0")
+ "${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/Klein_bottle_complex.fsc" "-p" "2" "-m" "0")
add_test(NAME Persistent_cohomology_example_from_file_3_3_100 COMMAND $<TARGET_FILE:persistence_from_file>
- "${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/bunny_5000_complex.fsc" "-p" "3" "-m" "100")
+ "${CMAKE_SOURCE_DIR}/data/filtered_simplicial_complex/Klein_bottle_complex.fsc" "-p" "3" "-m" "100")
endif()
if(GMP_FOUND)
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h
index 8ec89e41..f442b632 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h
@@ -34,8 +34,6 @@ class Field_Zp {
}
void init(int charac) {
- assert(charac > 0); // division by zero + non negative values
-
Prime = charac;
// Check that the provided prime is less than the maximum allowed as int, calculation below, and 'plus_times_equal' function : 46337 ; i.e (max_prime-1)*max_prime <= INT_MAX
diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt
index 73b2c6f9..81d352fc 100644
--- a/src/Simplex_tree/example/CMakeLists.txt
+++ b/src/Simplex_tree/example/CMakeLists.txt
@@ -29,7 +29,7 @@ if(GMP_FOUND AND NOT CGAL_VERSION VERSION_LESS 4.11.0)
target_link_libraries(Simplex_tree_example_alpha_shapes_3_from_off ${TBB_LIBRARIES})
endif()
add_test(NAME Simplex_tree_example_alpha_shapes_3_from_off COMMAND $<TARGET_FILE:Simplex_tree_example_alpha_shapes_3_from_off>
- "${CMAKE_SOURCE_DIR}/data/points/bunny_5000.off")
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off")
endif()
diff --git a/src/Simplex_tree/example/README b/src/Simplex_tree/example/README
deleted file mode 100644
index a9498173..00000000
--- a/src/Simplex_tree/example/README
+++ /dev/null
@@ -1,73 +0,0 @@
-To build the example, run in a Terminal:
-
-cd /path-to-gudhi/
-cmake .
-cd /path-to-example/
-make
-
-
-Example of use :
-
-*** Simple simplex tree construction
-
-./Simplex_tree_example_simple_simplex_tree
-
-********************************************************************
-EXAMPLE OF SIMPLE INSERTION
- * INSERT 0
- + 0 INSERTED
- * INSERT 1
- + 1 INSERTED
- * INSERT (0,1)
- + (0,1) INSERTED
- * INSERT 2
- + 2 INSERTED
- * INSERT (2,0)
- + (2,0) INSERTED
- * INSERT (2,1)
- + (2,1) INSERTED
- * INSERT (2,1,0)
- + (2,1,0) INSERTED
- * INSERT 3
- + 3 INSERTED
- * INSERT (3,0)
- + (3,0) INSERTED
- * INSERT 0 (already inserted)
- - 0 NOT INSERTED
- * INSERT (2,1,0) (already inserted)
- - (2,1,0) NOT INSERTED
-********************************************************************
-* The complex contains 9 simplices
- - dimension 2 - filtration 0.4
-* Iterator on Simplices in the filtration, with [filtration value]:
- [0.1] 0
- [0.1] 1
- [0.1] 2
- [0.1] 3
- [0.2] 1 0
- [0.2] 2 0
- [0.2] 2 1
- [0.2] 3 0
- [0.3] 2 1 0
-
-*** Simplex tree construction with Z/2Z coefficients on weighted graph Klein bottle file:
-
-./Simplex_tree_example_from_cliques_of_graph ../../../data/points/Klein_bottle_complex.txt 2
-Insert the 1-skeleton in the simplex tree in 0 s.
-Expand the simplex tree in 0 s.
-Information of the Simplex Tree:
- Number of vertices = 10 Number of simplices = 82
-
-with Z/3Z coefficients:
-
-./Simplex_tree_example_from_cliques_of_graph ../../../data/points/Klein_bottle_complex.txt 3
-
-Insert the 1-skeleton in the simplex tree in 0 s.
-Expand the simplex tree in 0 s.
-Information of the Simplex Tree:
- Number of vertices = 10 Number of simplices = 106
-
-*** Simplex_tree computed and displayed from a 3D alpha complex:
- [ Requires CGAL, GMP and GMPXX to be installed]
-
-./Simplex_tree_example_alpha_shapes_3_from_off ../../../data/points/bunny_5000
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
index 653a63fd..0fd56c67 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
@@ -52,8 +52,7 @@ when \f$ \tau \neq \sigma\f$ we say that \f$ \tau\f$ is a proper-face of \f$ \si
An abstract simplicial complex is a set of simplices that contains all the faces of its simplices.
The 1-skeleton of a simplicial complex (or its graph) consists of its elements of dimension lower than 2.
- *\image html "ds_representation.png" "Skeleton-blocker representation" width=20cm
-
+\image html "ds_representation.png" "Skeleton-blocker representation"
To encode, a simplicial complex, one can encodes all its simplices.
In case when this number gets too large,
@@ -73,11 +72,7 @@ For instance, the numbers of blockers is depicted for random 3-dimensional spher
in next figure. Storing the graph and blockers of such simplicial complexes is much compact in this case than storing
their simplices.
-
- *\image html "blockers_curve.png" "Number of blockers of random triangulations of 3-spheres" width=10cm
-
-
-
+\image html "blockers_curve.png" "Number of blockers of random triangulations of 3-spheres"
\section API
diff --git a/src/Toplex_map/benchmark/CMakeLists.txt b/src/Toplex_map/benchmark/CMakeLists.txt
index 2d58a156..6703d9d0 100644
--- a/src/Toplex_map/benchmark/CMakeLists.txt
+++ b/src/Toplex_map/benchmark/CMakeLists.txt
@@ -1,3 +1,7 @@
project(Toplex_map_benchmark)
add_executable(Toplex_map_benchmark benchmark_tm.cpp)
+
+if (TBB_FOUND)
+ target_link_libraries(Toplex_map_benchmark ${TBB_LIBRARIES})
+endif()
diff --git a/src/cmake/modules/GUDHI_modules.cmake b/src/cmake/modules/GUDHI_modules.cmake
index ccaf1ac5..13248f7e 100644
--- a/src/cmake/modules/GUDHI_modules.cmake
+++ b/src/cmake/modules/GUDHI_modules.cmake
@@ -17,12 +17,6 @@ function(add_gudhi_module file_path)
endfunction(add_gudhi_module)
-option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF)
-option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF)
-option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON)
-option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON)
-option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON)
-
if (WITH_GUDHI_BENCHMARK)
set(GUDHI_SUB_DIRECTORIES "${GUDHI_SUB_DIRECTORIES};benchmark")
endif()
diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake
new file mode 100644
index 00000000..3cd0a489
--- /dev/null
+++ b/src/cmake/modules/GUDHI_options.cmake
@@ -0,0 +1,5 @@
+option(WITH_GUDHI_BENCHMARK "Activate/desactivate benchmark compilation" OFF)
+option(WITH_GUDHI_EXAMPLE "Activate/desactivate examples compilation and installation" OFF)
+option(WITH_GUDHI_PYTHON "Activate/desactivate python module compilation and installation" ON)
+option(WITH_GUDHI_TEST "Activate/desactivate examples compilation and installation" ON)
+option(WITH_GUDHI_UTILITIES "Activate/desactivate utilities compilation and installation" ON)
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index 023061f1..b316740d 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -6,6 +6,7 @@ find_package(Boost 1.56.0 QUIET OPTIONAL_COMPONENTS filesystem unit_test_framewo
if(NOT Boost_VERSION)
message(FATAL_ERROR "NOTICE: This program requires Boost and will not be compiled.")
endif(NOT Boost_VERSION)
+include_directories(${Boost_INCLUDE_DIRS})
find_package(GMP)
if(GMP_FOUND)
@@ -92,91 +93,92 @@ add_definitions( -DBOOST_SYSTEM_NO_DEPRECATED )
message(STATUS "boost include dirs:" ${Boost_INCLUDE_DIRS})
message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
-# Find the correct Python interpreter.
-# Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
-find_package( PythonInterp )
-
-# find_python_module tries to import module in Python interpreter and to retrieve its version number
-# returns ${PYTHON_MODULE_NAME_UP}_VERSION and ${PYTHON_MODULE_NAME_UP}_FOUND
-function( find_python_module PYTHON_MODULE_NAME )
- string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
- execute_process(
- COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}; print(${PYTHON_MODULE_NAME}.__version__)"
- RESULT_VARIABLE PYTHON_MODULE_RESULT
- OUTPUT_VARIABLE PYTHON_MODULE_VERSION
- ERROR_VARIABLE PYTHON_MODULE_ERROR)
- if(PYTHON_MODULE_RESULT EQUAL 0)
- # Remove all carriage returns as it can be multiline
- string(REGEX REPLACE "\n" " " PYTHON_MODULE_VERSION "${PYTHON_MODULE_VERSION}")
- message ("++ Python module ${PYTHON_MODULE_NAME} - Version ${PYTHON_MODULE_VERSION} found")
-
- set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE)
- set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
- else()
- message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
- - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
- - PYTHON_MODULE_VERSION = ${PYTHON_MODULE_VERSION}
- - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
- unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE)
- set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
- endif()
-endfunction( find_python_module )
-
-# For modules that do not define module.__version__
-function( find_python_module_no_version PYTHON_MODULE_NAME )
- string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
- execute_process(
- COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}"
- RESULT_VARIABLE PYTHON_MODULE_RESULT
- ERROR_VARIABLE PYTHON_MODULE_ERROR)
- if(PYTHON_MODULE_RESULT EQUAL 0)
- # Remove carriage return
- message ("++ Python module ${PYTHON_MODULE_NAME} found")
- set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
- else()
- message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
- - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
- - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
- set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+if (WITH_GUDHI_PYTHON)
+ # Find the correct Python interpreter.
+ # Can be set with -DPYTHON_EXECUTABLE=/usr/bin/python3 or -DPython_ADDITIONAL_VERSIONS=3 for instance.
+ find_package( PythonInterp )
+
+ # find_python_module tries to import module in Python interpreter and to retrieve its version number
+ # returns ${PYTHON_MODULE_NAME_UP}_VERSION and ${PYTHON_MODULE_NAME_UP}_FOUND
+ function( find_python_module PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}; print(${PYTHON_MODULE_NAME}.__version__)"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ OUTPUT_VARIABLE PYTHON_MODULE_VERSION
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove all carriage returns as it can be multiline
+ string(REGEX REPLACE "\n" " " PYTHON_MODULE_VERSION "${PYTHON_MODULE_VERSION}")
+ message ("++ Python module ${PYTHON_MODULE_NAME} - Version ${PYTHON_MODULE_VERSION} found")
+
+ set(${PYTHON_MODULE_NAME_UP}_VERSION ${PYTHON_MODULE_VERSION} PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
+ - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
+ - PYTHON_MODULE_VERSION = ${PYTHON_MODULE_VERSION}
+ - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
+ unset(${PYTHON_MODULE_NAME_UP}_VERSION PARENT_SCOPE)
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+ endfunction( find_python_module )
+
+ # For modules that do not define module.__version__
+ function( find_python_module_no_version PYTHON_MODULE_NAME )
+ string(TOUPPER ${PYTHON_MODULE_NAME} PYTHON_MODULE_NAME_UP)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c "import ${PYTHON_MODULE_NAME}"
+ RESULT_VARIABLE PYTHON_MODULE_RESULT
+ ERROR_VARIABLE PYTHON_MODULE_ERROR)
+ if(PYTHON_MODULE_RESULT EQUAL 0)
+ # Remove carriage return
+ message ("++ Python module ${PYTHON_MODULE_NAME} found")
+ set(${PYTHON_MODULE_NAME_UP}_FOUND TRUE PARENT_SCOPE)
+ else()
+ message ("PYTHON_MODULE_NAME = ${PYTHON_MODULE_NAME}
+ - PYTHON_MODULE_RESULT = ${PYTHON_MODULE_RESULT}
+ - PYTHON_MODULE_ERROR = ${PYTHON_MODULE_ERROR}")
+ set(${PYTHON_MODULE_NAME_UP}_FOUND FALSE PARENT_SCOPE)
+ endif()
+ endfunction( find_python_module_no_version )
+
+ if( PYTHONINTERP_FOUND )
+ find_python_module("cython")
+ find_python_module("pytest")
+ find_python_module("matplotlib")
+ find_python_module("numpy")
+ find_python_module("scipy")
+ find_python_module("sphinx")
+ find_python_module("sklearn")
+ find_python_module("ot")
+ find_python_module("pybind11")
+ find_python_module("torch")
+ find_python_module("pykeops")
+ find_python_module("eagerpy")
+ find_python_module_no_version("hnswlib")
+ find_python_module("tensorflow")
+ find_python_module("sphinx_paramlinks")
+ find_python_module_no_version("python_docs_theme")
endif()
-endfunction( find_python_module_no_version )
-
-if( PYTHONINTERP_FOUND )
- find_python_module("cython")
- find_python_module("pytest")
- find_python_module("matplotlib")
- find_python_module("numpy")
- find_python_module("scipy")
- find_python_module("sphinx")
- find_python_module("sklearn")
- find_python_module("ot")
- find_python_module("pybind11")
- find_python_module("torch")
- find_python_module("pykeops")
- find_python_module("eagerpy")
- find_python_module_no_version("hnswlib")
- find_python_module("tensorflow")
- find_python_module("sphinx_paramlinks")
- find_python_module_no_version("python_docs_theme")
-endif()
-
-if(NOT GUDHI_PYTHON_PATH)
- message(FATAL_ERROR "ERROR: GUDHI_PYTHON_PATH is not valid.")
-endif(NOT GUDHI_PYTHON_PATH)
-
-option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON)
-
-if(PYTHONINTERP_FOUND AND CYTHON_FOUND)
- if(SPHINX_FOUND)
- # Documentation generation is available through sphinx
- find_program( SPHINX_PATH sphinx-build )
-
- if(NOT SPHINX_PATH)
- if(PYTHON_VERSION_MAJOR EQUAL 3)
- # In Python3, just hack sphinx-build if it does not exist
- set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m" "sphinx.cmd.build")
- endif(PYTHON_VERSION_MAJOR EQUAL 3)
- endif(NOT SPHINX_PATH)
- endif(SPHINX_FOUND)
-endif(PYTHONINTERP_FOUND AND CYTHON_FOUND)
-
+
+ if(NOT GUDHI_PYTHON_PATH)
+ message(FATAL_ERROR "ERROR: GUDHI_PYTHON_PATH is not valid.")
+ endif(NOT GUDHI_PYTHON_PATH)
+
+ option(WITH_GUDHI_PYTHON_RUNTIME_LIBRARY_DIRS "Build with setting runtime_library_dirs. Usefull when setting rpath is not allowed" ON)
+
+ if(PYTHONINTERP_FOUND AND CYTHON_FOUND)
+ if(SPHINX_FOUND)
+ # Documentation generation is available through sphinx
+ find_program( SPHINX_PATH sphinx-build )
+
+ if(NOT SPHINX_PATH)
+ if(PYTHON_VERSION_MAJOR EQUAL 3)
+ # In Python3, just hack sphinx-build if it does not exist
+ set(SPHINX_PATH "${PYTHON_EXECUTABLE}" "-m" "sphinx.cmd.build")
+ endif(PYTHON_VERSION_MAJOR EQUAL 3)
+ endif(NOT SPHINX_PATH)
+ endif(SPHINX_FOUND)
+ endif(PYTHONINTERP_FOUND AND CYTHON_FOUND)
+endif (WITH_GUDHI_PYTHON) \ No newline at end of file
diff --git a/src/common/benchmark/CMakeLists.txt b/src/common/benchmark/CMakeLists.txt
index a3787d6e..26e4e6af 100644
--- a/src/common/benchmark/CMakeLists.txt
+++ b/src/common/benchmark/CMakeLists.txt
@@ -1,3 +1,7 @@
project(common_benchmark)
add_executable(Graph_simplicial_complex_benchmark Graph_simplicial_complex_benchmark.cpp)
+
+if (TBB_FOUND)
+ target_link_libraries(Graph_simplicial_complex_benchmark ${TBB_LIBRARIES})
+endif()
diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h
index 0938f5c1..a1b104e2 100644
--- a/src/common/include/gudhi/reader_utils.h
+++ b/src/common/include/gudhi/reader_utils.h
@@ -14,7 +14,11 @@
#include <gudhi/graph_simplicial_complex.h>
#include <gudhi/Debug_utils.h>
-#include <boost/function_output_iterator.hpp>
+#if BOOST_VERSION < 106600
+# include <boost/function_output_iterator.hpp>
+#else
+# include <boost/iterator/function_output_iterator.hpp>
+#endif
#include <boost/graph/adjacency_list.hpp>
#include <iostream>
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index b2dd34ed..ab5bc56f 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -14,13 +14,16 @@ function( add_GUDHI_PYTHON_lib THE_LIB )
endif(EXISTS ${THE_LIB})
endfunction( add_GUDHI_PYTHON_lib )
-function( add_GUDHI_PYTHON_lib_dir THE_LIB_DIR )
- # deals when it is not set - error on windows
- if(EXISTS ${THE_LIB_DIR})
- set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${THE_LIB_DIR}', " PARENT_SCOPE)
- else()
- message("add_GUDHI_PYTHON_lib_dir - '${THE_LIB_DIR}' does not exist")
- endif()
+function( add_GUDHI_PYTHON_lib_dir)
+ # Argument may be a list (specifically on windows with release/debug paths)
+ foreach(THE_LIB_DIR IN LISTS ARGN)
+ # deals when it is not set - error on windows
+ if(EXISTS ${THE_LIB_DIR})
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${THE_LIB_DIR}', " PARENT_SCOPE)
+ else()
+ message("add_GUDHI_PYTHON_lib_dir - '${THE_LIB_DIR}' does not exist")
+ endif()
+ endforeach()
endfunction( add_GUDHI_PYTHON_lib_dir )
# THE_TEST is the python test file name (without .py extension) containing tests functions
@@ -168,14 +171,20 @@ if(PYTHONINTERP_FOUND)
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'nerve_gic', ")
endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'alpha_complex', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'subsampling', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'tangential_complex', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'euclidean_witness_complex', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'euclidean_strong_witness_complex', ")
endif ()
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'alpha_complex', ")
+ endif ()
if(CGAL_FOUND)
+ if(NOT CGAL_VERSION VERSION_LESS 5.3.0)
+ # CGAL_HEADER_ONLY has been dropped for CGAL >= 5.3. Only the header-only version is supported.
+ set(CGAL_HEADER_ONLY True)
+ endif(NOT CGAL_VERSION VERSION_LESS 5.3.0)
# Add CGAL compilation args
if(CGAL_HEADER_ONLY)
add_gudhi_debug_info("CGAL header only version ${CGAL_VERSION}")
@@ -183,7 +192,7 @@ if(PYTHONINTERP_FOUND)
else(CGAL_HEADER_ONLY)
add_gudhi_debug_info("CGAL version ${CGAL_VERSION}")
add_GUDHI_PYTHON_lib("${CGAL_LIBRARY}")
- add_GUDHI_PYTHON_lib_dir("${CGAL_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${CGAL_LIBRARIES_DIR})
message("** Add CGAL ${CGAL_LIBRARIES_DIR}")
# If CGAL is not header only, CGAL library may link with boost system,
if(CMAKE_BUILD_TYPE MATCHES Debug)
@@ -191,7 +200,7 @@ if(PYTHONINTERP_FOUND)
else()
add_GUDHI_PYTHON_lib("${Boost_SYSTEM_LIBRARY_RELEASE}")
endif()
- add_GUDHI_PYTHON_lib_dir("${Boost_LIBRARY_DIRS}")
+ add_GUDHI_PYTHON_lib_dir(${Boost_LIBRARY_DIRS})
message("** Add Boost ${Boost_LIBRARY_DIRS}")
endif(CGAL_HEADER_ONLY)
# GMP and GMPXX are not required, but if present, CGAL will link with them.
@@ -203,13 +212,13 @@ if(PYTHONINTERP_FOUND)
get_filename_component(GMP_LIBRARIES_DIR ${GMP_LIBRARIES} PATH)
message("GMP_LIBRARIES_DIR from GMP_LIBRARIES set to ${GMP_LIBRARIES_DIR}")
endif(NOT GMP_LIBRARIES_DIR)
- add_GUDHI_PYTHON_lib_dir("${GMP_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${GMP_LIBRARIES_DIR})
message("** Add gmp ${GMP_LIBRARIES_DIR}")
if(GMPXX_FOUND)
add_gudhi_debug_info("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
add_GUDHI_PYTHON_lib("${GMPXX_LIBRARIES}")
- add_GUDHI_PYTHON_lib_dir("${GMPXX_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${GMPXX_LIBRARIES_DIR})
message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}")
endif(GMPXX_FOUND)
endif(GMP_FOUND)
@@ -222,7 +231,7 @@ if(PYTHONINTERP_FOUND)
get_filename_component(MPFR_LIBRARIES_DIR ${MPFR_LIBRARIES} PATH)
message("MPFR_LIBRARIES_DIR from MPFR_LIBRARIES set to ${MPFR_LIBRARIES_DIR}")
endif(NOT MPFR_LIBRARIES_DIR)
- add_GUDHI_PYTHON_lib_dir("${MPFR_LIBRARIES_DIR}")
+ add_GUDHI_PYTHON_lib_dir(${MPFR_LIBRARIES_DIR})
message("** Add mpfr ${MPFR_LIBRARIES_DIR}")
endif(MPFR_FOUND)
endif(CGAL_FOUND)
@@ -243,14 +252,14 @@ if(PYTHONINTERP_FOUND)
if (TBB_FOUND AND WITH_GUDHI_USE_TBB)
add_gudhi_debug_info("TBB version ${TBB_INTERFACE_VERSION} found and used")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_TBB', ")
- if(CMAKE_BUILD_TYPE MATCHES Debug)
+ if((CMAKE_BUILD_TYPE MATCHES Debug) AND TBB_DEBUG_LIBRARY)
add_GUDHI_PYTHON_lib("${TBB_DEBUG_LIBRARY}")
add_GUDHI_PYTHON_lib("${TBB_MALLOC_DEBUG_LIBRARY}")
else()
add_GUDHI_PYTHON_lib("${TBB_RELEASE_LIBRARY}")
add_GUDHI_PYTHON_lib("${TBB_MALLOC_RELEASE_LIBRARY}")
endif()
- add_GUDHI_PYTHON_lib_dir("${TBB_LIBRARY_DIRS}")
+ add_GUDHI_PYTHON_lib_dir(${TBB_LIBRARY_DIRS})
message("** Add tbb ${TBB_LIBRARY_DIRS}")
set(GUDHI_PYTHON_INCLUDE_DIRS "${GUDHI_PYTHON_INCLUDE_DIRS}'${TBB_INCLUDE_DIRS}', ")
endif()
@@ -291,7 +300,12 @@ if(PYTHONINTERP_FOUND)
add_custom_target(python ALL DEPENDS gudhi.so
COMMENT "Do not forget to add ${CMAKE_CURRENT_BINARY_DIR}/ to your PYTHONPATH before using examples or tests")
- set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}")
+ # Path separator management for windows
+ if (WIN32)
+ set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR};$ENV{PYTHONPATH}")
+ else(WIN32)
+ set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}")
+ endif(WIN32)
# Documentation generation is available through sphinx - requires all modules
# Make it first as sphinx test is by far the longest test which is nice when testing in parallel
if(SPHINX_PATH)
@@ -302,7 +316,7 @@ if(PYTHONINTERP_FOUND)
if(SCIPY_FOUND)
if(SKLEARN_FOUND)
if(OT_FOUND)
- if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
# User warning - Sphinx is a static pages generator, and configured to work fine with user_version
# Images and biblio warnings because not found on developper version
@@ -322,10 +336,10 @@ if(PYTHONINTERP_FOUND)
${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
# Set missing or not modules
set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES")
- else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0")
+ else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
+ message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 5.1.0")
set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
else(OT_FOUND)
message("++ Python documentation module will not be compiled because POT was not found")
set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
@@ -361,13 +375,15 @@ if(PYTHONINTERP_FOUND)
# Test examples
- if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
# Bottleneck and Alpha
add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
-f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
# Tangential
add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
@@ -435,7 +451,7 @@ if(PYTHONINTERP_FOUND)
add_gudhi_py_test(test_cover_complex)
endif (NOT CGAL_VERSION VERSION_LESS 4.11.0)
- if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
# Alpha
add_test(NAME alpha_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
@@ -451,7 +467,7 @@ if(PYTHONINTERP_FOUND)
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
add_gudhi_py_test(test_alpha_complex)
- endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
# Euclidean witness
@@ -536,6 +552,11 @@ if(PYTHONINTERP_FOUND)
add_gudhi_py_test(test_representations)
endif()
+ # Betti curves
+ if(SKLEARN_FOUND AND SCIPY_FOUND)
+ add_gudhi_py_test(test_betti_curve_representations)
+ endif()
+
# Representations preprocessing
if(SKLEARN_FOUND)
add_gudhi_py_test(test_representations_preprocessing)
diff --git a/src/python/doc/alpha_complex_ref.rst b/src/python/doc/alpha_complex_ref.rst
index 7da79543..eaa72551 100644
--- a/src/python/doc/alpha_complex_ref.rst
+++ b/src/python/doc/alpha_complex_ref.rst
@@ -9,6 +9,5 @@ Alpha complex reference manual
.. autoclass:: gudhi.AlphaComplex
:members:
:undoc-members:
- :show-inheritance:
.. automethod:: gudhi.AlphaComplex.__init__
diff --git a/src/python/doc/alpha_complex_sum.inc b/src/python/doc/alpha_complex_sum.inc
index aeab493f..5c76fd54 100644
--- a/src/python/doc/alpha_complex_sum.inc
+++ b/src/python/doc/alpha_complex_sum.inc
@@ -1,15 +1,15 @@
.. table::
:widths: 30 40 30
- +----------------------------------------------------------------+-------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------+
- | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau |
- | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. It has the same persistent homology | |
- | :alt: Alpha complex representation | as the ÄŒech complex and is significantly smaller. | :Since: GUDHI 2.0.0 |
- | :figclass: align-center | | |
- | | | :License: MIT (`GPL v3 </licensing/>`_) |
- | | | |
- | | | :Requires: `Eigen <installation.html#eigen>`_ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`_ :math:`\geq` 4.11.0 |
- | | | |
- +----------------------------------------------------------------+-------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------+
- | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` |
- +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+ +----------------------------------------------------------------+-------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+
+ | .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau |
+ | ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. It has the same persistent homology | |
+ | :alt: Alpha complex representation | as the ÄŒech complex and is significantly smaller. | :Since: GUDHI 2.0.0 |
+ | :figclass: align-center | | |
+ | | | :License: MIT (`GPL v3 </licensing/>`_) |
+ | | | |
+ | | | :Requires: `Eigen <installation.html#eigen>`_ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`_ :math:`\geq` 5.1 |
+ | | | |
+ +----------------------------------------------------------------+-------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+
+ | * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` |
+ +----------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst
index 96e267ef..cfd22742 100644
--- a/src/python/doc/alpha_complex_user.rst
+++ b/src/python/doc/alpha_complex_user.rst
@@ -9,7 +9,7 @@ Definition
.. include:: alpha_complex_sum.inc
-:doc:`AlphaComplex <alpha_complex_ref>` is constructing a :doc:`SimplexTree <simplex_tree_ref>` using
+:class:`~gudhi.AlphaComplex` is constructing a :doc:`SimplexTree <simplex_tree_ref>` using
`Delaunay Triangulation <http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations>`_
:cite:`cgal:hdj-t-19b` from the `Computational Geometry Algorithms Library <http://www.cgal.org/>`_
:cite:`cgal:eb-19b`.
@@ -33,9 +33,6 @@ Remarks
Using :code:`precision = 'fast'` makes the computations slightly faster, and the combinatorics are still exact, but
the computation of filtration values can exceptionally be arbitrarily bad. In all cases, we still guarantee that the
output is a valid filtration (faces have a filtration value no larger than their cofaces).
-* For performances reasons, it is advised to use Alpha_complex with `CGAL <installation.html#cgal>`_ :math:`\geq` 5.0.0.
-* The vertices in the output simplex tree are not guaranteed to match the order of the input points. One can use
- :func:`~gudhi.AlphaComplex.get_point` to get the initial point back.
Example from points
-------------------
@@ -44,23 +41,22 @@ This example builds the alpha-complex from the given points:
.. testcode::
- import gudhi
- alpha_complex = gudhi.AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]])
+ from gudhi import AlphaComplex
+ ac = AlphaComplex(points=[[1, 1], [7, 0], [4, 6], [9, 6], [0, 14], [2, 19], [9, 17]])
+
+ stree = ac.create_simplex_tree()
+ print('Alpha complex is of dimension ', stree.dimension(), ' - ',
+ stree.num_simplices(), ' simplices - ', stree.num_vertices(), ' vertices.')
- simplex_tree = alpha_complex.create_simplex_tree()
- result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
- repr(simplex_tree.num_simplices()) + ' simplices - ' + \
- repr(simplex_tree.num_vertices()) + ' vertices.'
- print(result_str)
fmt = '%s -> %.2f'
- for filtered_value in simplex_tree.get_filtration():
+ for filtered_value in stree.get_filtration():
print(fmt % tuple(filtered_value))
The output is:
.. testoutput::
- Alpha complex is of dimension 2 - 25 simplices - 7 vertices.
+ Alpha complex is of dimension 2 - 25 simplices - 7 vertices.
[0] -> 0.00
[1] -> 0.00
[2] -> 0.00
@@ -177,11 +173,75 @@ of speed-up, since we always first build the full filtered complex, so it is rec
:paramref:`~gudhi.AlphaComplex.create_simplex_tree.max_alpha_square`.
In the following example, a threshold of :math:`\alpha^2 = 32.0` is used.
+Weighted version
+^^^^^^^^^^^^^^^^
+
+A weighted version for Alpha complex is available. It is like a usual Alpha complex, but based on a
+`CGAL regular triangulation <https://doc.cgal.org/latest/Triangulation/index.html#title20>`_.
+
+This example builds the weighted alpha-complex of a small molecule, where atoms have different sizes.
+It is taken from
+`CGAL 3d weighted alpha shapes <https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title13>`_.
+
+Then, it is asked to display information about the alpha complex.
+
+.. testcode::
+
+ from gudhi import AlphaComplex
+ wgt_ac = AlphaComplex(points=[[ 1., -1., -1.],
+ [-1., 1., -1.],
+ [-1., -1., 1.],
+ [ 1., 1., 1.],
+ [ 2., 2., 2.]],
+ weights = [4., 4., 4., 4., 1.])
+
+ stree = wgt_ac.create_simplex_tree()
+ print('Weighted alpha complex is of dimension ', stree.dimension(), ' - ',
+ stree.num_simplices(), ' simplices - ', stree.num_vertices(), ' vertices.')
+ fmt = '%s -> %.2f'
+ for simplex in stree.get_simplices():
+ print(fmt % tuple(simplex))
+
+The output is:
+
+.. testoutput::
+
+ Weighted alpha complex is of dimension 3 - 29 simplices - 5 vertices.
+ [0, 1, 2, 3] -> -1.00
+ [0, 1, 2] -> -1.33
+ [0, 1, 3, 4] -> 95.00
+ [0, 1, 3] -> -1.33
+ [0, 1, 4] -> 95.00
+ [0, 1] -> -2.00
+ [0, 2, 3, 4] -> 95.00
+ [0, 2, 3] -> -1.33
+ [0, 2, 4] -> 95.00
+ [0, 2] -> -2.00
+ [0, 3, 4] -> 23.00
+ [0, 3] -> -2.00
+ [0, 4] -> 23.00
+ [0] -> -4.00
+ [1, 2, 3, 4] -> 95.00
+ [1, 2, 3] -> -1.33
+ [1, 2, 4] -> 95.00
+ [1, 2] -> -2.00
+ [1, 3, 4] -> 23.00
+ [1, 3] -> -2.00
+ [1, 4] -> 23.00
+ [1] -> -4.00
+ [2, 3, 4] -> 23.00
+ [2, 3] -> -2.00
+ [2, 4] -> 23.00
+ [2] -> -4.00
+ [3, 4] -> -1.00
+ [3] -> -4.00
+ [4] -> -1.00
Example from OFF file
^^^^^^^^^^^^^^^^^^^^^
-This example builds the alpha complex from 300 random points on a 2-torus.
+This example builds the alpha complex from 300 random points on a 2-torus, given by an
+`OFF file <fileformats.html#off-file-format>`_.
Then, it computes the persistence diagram and displays it:
@@ -189,14 +249,10 @@ Then, it computes the persistence diagram and displays it:
:include-source:
import matplotlib.pyplot as plt
- import gudhi
- alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \
- '/data/points/tore3D_300.off')
- simplex_tree = alpha_complex.create_simplex_tree()
- result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
- repr(simplex_tree.num_simplices()) + ' simplices - ' + \
- repr(simplex_tree.num_vertices()) + ' vertices.'
- print(result_str)
- diag = simplex_tree.persistence()
- gudhi.plot_persistence_diagram(diag)
+ import gudhi as gd
+ off_file = gd.__root_source_dir__ + '/data/points/tore3D_300.off'
+ points = gd.read_points_from_off_file(off_file = off_file)
+ stree = gd.AlphaComplex(points = points).create_simplex_tree()
+ dgm = stree.persistence()
+ gd.plot_persistence_diagram(dgm, legend = True)
plt.show()
diff --git a/src/python/doc/datasets_generators.rst b/src/python/doc/datasets_generators.rst
index 6f36bce1..260c3882 100644
--- a/src/python/doc/datasets_generators.rst
+++ b/src/python/doc/datasets_generators.rst
@@ -42,7 +42,7 @@ Example
.. autofunction:: gudhi.datasets.generators.points.sphere
Points on a flat torus
-^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^^^
You can also generate points on a torus.
diff --git a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
index fe03be31..c96121a6 100755
--- a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -1,9 +1,7 @@
#!/usr/bin/env python
import argparse
-import errno
-import os
-import gudhi
+import gudhi as gd
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
which is released under MIT.
@@ -41,33 +39,24 @@ parser.add_argument(
args = parser.parse_args()
-with open(args.file, "r") as f:
- first_line = f.readline()
- if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- print("##############################################################")
- print("AlphaComplex creation from points read in a OFF file")
-
- alpha_complex = gudhi.AlphaComplex(off_file=args.file)
- if args.max_alpha_square is not None:
- print("with max_edge_length=", args.max_alpha_square)
- simplex_tree = alpha_complex.create_simplex_tree(
- max_alpha_square=args.max_alpha_square
- )
- else:
- simplex_tree = alpha_complex.create_simplex_tree()
-
- print("Number of simplices=", simplex_tree.num_simplices())
-
- diag = simplex_tree.persistence()
-
- print("betti_numbers()=", simplex_tree.betti_numbers())
-
- if args.no_diagram == False:
- import matplotlib.pyplot as plot
- gudhi.plot_persistence_diagram(diag, band=args.band)
- plot.show()
- else:
- raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
- args.file)
-
- f.close()
+print("##############################################################")
+print("AlphaComplex creation from points read in a OFF file")
+
+points = gd.read_points_from_off_file(off_file = args.file)
+alpha_complex = gd.AlphaComplex(points = points)
+if args.max_alpha_square is not None:
+ print("with max_edge_length=", args.max_alpha_square)
+ simplex_tree = alpha_complex.create_simplex_tree(
+ max_alpha_square=args.max_alpha_square
+ )
+else:
+ simplex_tree = alpha_complex.create_simplex_tree()
+
+print("Number of simplices=", simplex_tree.num_simplices())
+
+diag = simplex_tree.persistence()
+print("betti_numbers()=", simplex_tree.betti_numbers())
+if args.no_diagram == False:
+ import matplotlib.pyplot as plot
+ gd.plot_persistence_diagram(diag, band=args.band)
+ plot.show()
diff --git a/src/python/example/alpha_rips_persistence_bottleneck_distance.py b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
index 3e12b0d5..6b97fb3b 100755
--- a/src/python/example/alpha_rips_persistence_bottleneck_distance.py
+++ b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
@@ -1,10 +1,8 @@
#!/usr/bin/env python
-import gudhi
+import gudhi as gd
import argparse
import math
-import errno
-import os
import numpy as np
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
@@ -37,70 +35,60 @@ parser.add_argument("-t", "--threshold", type=float, default=0.5)
parser.add_argument("-d", "--max_dimension", type=int, default=1)
args = parser.parse_args()
-with open(args.file, "r") as f:
- first_line = f.readline()
- if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- point_cloud = gudhi.read_points_from_off_file(off_file=args.file)
- print("##############################################################")
- print("RipsComplex creation from points read in a OFF file")
+point_cloud = gd.read_points_from_off_file(off_file=args.file)
+print("##############################################################")
+print("RipsComplex creation from points read in a OFF file")
- message = "RipsComplex with max_edge_length=" + repr(args.threshold)
- print(message)
+message = "RipsComplex with max_edge_length=" + repr(args.threshold)
+print(message)
- rips_complex = gudhi.RipsComplex(
- points=point_cloud, max_edge_length=args.threshold
- )
-
- rips_stree = rips_complex.create_simplex_tree(
- max_dimension=args.max_dimension)
-
- message = "Number of simplices=" + repr(rips_stree.num_simplices())
- print(message)
-
- rips_stree.compute_persistence()
-
- print("##############################################################")
- print("AlphaComplex creation from points read in a OFF file")
-
- message = "AlphaComplex with max_edge_length=" + repr(args.threshold)
- print(message)
-
- alpha_complex = gudhi.AlphaComplex(points=point_cloud)
- alpha_stree = alpha_complex.create_simplex_tree(
- max_alpha_square=(args.threshold * args.threshold)
- )
-
- message = "Number of simplices=" + repr(alpha_stree.num_simplices())
- print(message)
+rips_complex = gd.RipsComplex(
+ points=point_cloud, max_edge_length=args.threshold
+)
- alpha_stree.compute_persistence()
+rips_stree = rips_complex.create_simplex_tree(
+ max_dimension=args.max_dimension)
- max_b_distance = 0.0
- for dim in range(args.max_dimension):
- # Alpha persistence values needs to be transform because filtration
- # values are alpha square values
- alpha_intervals = np.sqrt(alpha_stree.persistence_intervals_in_dimension(dim))
+message = "Number of simplices=" + repr(rips_stree.num_simplices())
+print(message)
- rips_intervals = rips_stree.persistence_intervals_in_dimension(dim)
- bottleneck_distance = gudhi.bottleneck_distance(
- rips_intervals, alpha_intervals
- )
- message = (
- "In dimension "
- + repr(dim)
- + ", bottleneck distance = "
- + repr(bottleneck_distance)
- )
- print(message)
- max_b_distance = max(bottleneck_distance, max_b_distance)
+rips_stree.compute_persistence()
- print("==============================================================")
- message = "Bottleneck distance is " + repr(max_b_distance)
- print(message)
+print("##############################################################")
+print("AlphaComplex creation from points read in a OFF file")
- else:
- raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
- args.file)
+message = "AlphaComplex with max_edge_length=" + repr(args.threshold)
+print(message)
+alpha_complex = gd.AlphaComplex(points=point_cloud)
+alpha_stree = alpha_complex.create_simplex_tree(
+ max_alpha_square=(args.threshold * args.threshold)
+)
- f.close()
+message = "Number of simplices=" + repr(alpha_stree.num_simplices())
+print(message)
+
+alpha_stree.compute_persistence()
+
+max_b_distance = 0.0
+for dim in range(args.max_dimension):
+ # Alpha persistence values needs to be transform because filtration
+ # values are alpha square values
+ alpha_intervals = np.sqrt(alpha_stree.persistence_intervals_in_dimension(dim))
+
+ rips_intervals = rips_stree.persistence_intervals_in_dimension(dim)
+ bottleneck_distance = gd.bottleneck_distance(
+ rips_intervals, alpha_intervals
+ )
+ message = (
+ "In dimension "
+ + repr(dim)
+ + ", bottleneck distance = "
+ + repr(bottleneck_distance)
+ )
+ print(message)
+ max_b_distance = max(bottleneck_distance, max_b_distance)
+
+print("==============================================================")
+message = "Bottleneck distance is " + repr(max_b_distance)
+print(message)
diff --git a/src/python/example/plot_alpha_complex.py b/src/python/example/plot_alpha_complex.py
index 99c18a7c..0924619b 100755
--- a/src/python/example/plot_alpha_complex.py
+++ b/src/python/example/plot_alpha_complex.py
@@ -1,8 +1,9 @@
#!/usr/bin/env python
import numpy as np
-import gudhi
-ac = gudhi.AlphaComplex(off_file='../../data/points/tore3D_1307.off')
+import gudhi as gd
+points = gd.read_points_from_off_file(off_file = '../../data/points/tore3D_1307.off')
+ac = gd.AlphaComplex(points = points)
st = ac.create_simplex_tree()
points = np.array([ac.get_point(i) for i in range(st.num_vertices())])
# We want to plot the alpha-complex with alpha=0.1.
diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx
index ea128743..a4888914 100644
--- a/src/python/gudhi/alpha_complex.pyx
+++ b/src/python/gudhi/alpha_complex.pyx
@@ -16,7 +16,7 @@ from libcpp.utility cimport pair
from libcpp.string cimport string
from libcpp cimport bool
from libc.stdint cimport intptr_t
-import os
+import warnings
from gudhi.simplex_tree cimport *
from gudhi.simplex_tree import SimplexTree
@@ -28,66 +28,72 @@ __license__ = "GPL v3"
cdef extern from "Alpha_complex_interface.h" namespace "Gudhi":
cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface":
- Alpha_complex_interface(vector[vector[double]] points, bool fast_version, bool exact_version) nogil except +
+ Alpha_complex_interface(vector[vector[double]] points, vector[double] weights, bool fast_version, bool exact_version) nogil except +
vector[double] get_point(int vertex) nogil except +
void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square, bool default_filtration_value) nogil except +
# AlphaComplex python interface
cdef class AlphaComplex:
- """AlphaComplex is a simplicial complex constructed from the finite cells
- of a Delaunay Triangulation.
+ """AlphaComplex is a simplicial complex constructed from the finite cells of a Delaunay Triangulation.
- The filtration value of each simplex is computed as the square of the
- circumradius of the simplex if the circumsphere is empty (the simplex is
- then said to be Gabriel), and as the minimum of the filtration values of
- the codimension 1 cofaces that make it not Gabriel otherwise.
+ The filtration value of each simplex is computed as the square of the circumradius of the simplex if the
+ circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration values of the
+ codimension 1 cofaces that make it not Gabriel otherwise.
- All simplices that have a filtration value strictly greater than a given
- alpha squared value are not inserted into the complex.
+ All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into
+ the complex.
.. note::
- When Alpha_complex is constructed with an infinite value of alpha, the
- complex is a Delaunay complex.
-
+ When Alpha_complex is constructed with an infinite value of alpha, the complex is a Delaunay complex.
"""
cdef Alpha_complex_interface * this_ptr
# Fake constructor that does nothing but documenting the constructor
- def __init__(self, points=None, off_file='', precision='safe'):
+ def __init__(self, points=[], off_file='', weights=None, precision='safe'):
"""AlphaComplex constructor.
:param points: A list of points in d-Dimension.
- :type points: list of list of double
-
- Or
+ :type points: Iterable[Iterable[float]]
- :param off_file: An OFF file style name.
+ :param off_file: **[deprecated]** An `OFF file style <fileformats.html#off-file-format>`_ name.
+ If an `off_file` is given with `points` as arguments, only points from the file are taken into account.
:type off_file: string
+ :param weights: A list of weights. If set, the number of weights must correspond to the number of points.
+ :type weights: Iterable[float]
+
:param precision: Alpha complex precision can be 'fast', 'safe' or 'exact'. Default is 'safe'.
:type precision: string
+
+ :raises FileNotFoundError: **[deprecated]** If `off_file` is set but not found.
+ :raises ValueError: In case of inconsistency between the number of points and weights.
"""
# The real cython constructor
- def __cinit__(self, points = None, off_file = '', precision = 'safe'):
+ def __cinit__(self, points = [], off_file = '', weights=None, precision = 'safe'):
assert precision in ['fast', 'safe', 'exact'], "Alpha complex precision can only be 'fast', 'safe' or 'exact'"
cdef bool fast = precision == 'fast'
cdef bool exact = precision == 'exact'
- cdef vector[vector[double]] pts
if off_file:
- if os.path.isfile(off_file):
- points = read_points_from_off_file(off_file = off_file)
- else:
- print("file " + off_file + " not found.")
- if points is None:
- # Empty Alpha construction
- points=[]
+ warnings.warn("off_file is a deprecated parameter, please consider using gudhi.read_points_from_off_file",
+ DeprecationWarning)
+ points = read_points_from_off_file(off_file = off_file)
+
+ # weights are set but is inconsistent with the number of points
+ if weights != None and len(weights) != len(points):
+ raise ValueError("Inconsistency between the number of points and weights")
+
+ # need to copy the points to use them without the gil
+ cdef vector[vector[double]] pts
+ cdef vector[double] wgts
pts = points
+ if weights != None:
+ wgts = weights
with nogil:
- self.this_ptr = new Alpha_complex_interface(pts, fast, exact)
+ self.this_ptr = new Alpha_complex_interface(pts, wgts, fast, exact)
def __dealloc__(self):
if self.this_ptr != NULL:
diff --git a/src/python/gudhi/cubical_complex.pyx b/src/python/gudhi/cubical_complex.pyx
index 97c69a2d..8e244bb8 100644
--- a/src/python/gudhi/cubical_complex.pyx
+++ b/src/python/gudhi/cubical_complex.pyx
@@ -281,4 +281,8 @@ cdef class CubicalComplex:
launched first.
"""
assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_intervals_in_dimension()"
- return np.array(self.pcohptr.intervals_in_dimension(dimension))
+ piid = np.array(self.pcohptr.intervals_in_dimension(dimension))
+ # Workaround https://github.com/GUDHI/gudhi-devel/issues/507
+ if len(piid) == 0:
+ return np.empty(shape = [0, 2])
+ return piid
diff --git a/src/python/gudhi/datasets/generators/_points.cc b/src/python/gudhi/datasets/generators/_points.cc
index 70ce4925..82fea25b 100644
--- a/src/python/gudhi/datasets/generators/_points.cc
+++ b/src/python/gudhi/datasets/generators/_points.cc
@@ -96,7 +96,6 @@ PYBIND11_MODULE(_points, m) {
:type radius: float
:param sample: The sample type. Default and only available value is `"random"`.
:type sample: string
- :rtype: numpy array of float
:returns: the generated points on a sphere.
)pbdoc");
@@ -111,10 +110,12 @@ PYBIND11_MODULE(_points, m) {
:type dim: integer
:param sample: The sample type. Available values are: `"random"` and `"grid"`. Default value is `"random"`.
:type sample: string
- :rtype: numpy array of float.
- The shape of returned numpy array is :
- if sample is 'random' : (n_samples, 2*dim).
- if sample is 'grid' : (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
:returns: the generated points on a torus.
+
+ The shape of returned numpy array is:
+
+ If sample is 'random': (n_samples, 2*dim).
+
+ If sample is 'grid': (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
)pbdoc");
}
diff --git a/src/python/gudhi/datasets/generators/points.py b/src/python/gudhi/datasets/generators/points.py
index cf97777d..9bb2799d 100644
--- a/src/python/gudhi/datasets/generators/points.py
+++ b/src/python/gudhi/datasets/generators/points.py
@@ -19,15 +19,15 @@ def _generate_random_points_on_torus(n_samples, dim):
# Based on angles, construct points of size n_samples*dim on a circle and reshape the result in a n_samples*2*dim array
array_points = np.column_stack([np.cos(alpha), np.sin(alpha)]).reshape(-1, 2*dim)
-
+
return array_points
def _generate_grid_points_on_torus(n_samples, dim):
-
+
# Generate points on a dim-torus as a grid
n_samples_grid = int((n_samples+.5)**(1./dim)) # add .5 to avoid rounding down with numerical approximations
alpha = np.linspace(0, 2*np.pi, n_samples_grid, endpoint=False)
-
+
array_points = np.column_stack([np.cos(alpha), np.sin(alpha)])
array_points_idx = np.empty([n_samples_grid]*dim + [dim], dtype=int)
for i, x in enumerate(np.ix_(*([np.arange(n_samples_grid)]*dim))):
@@ -35,16 +35,19 @@ def _generate_grid_points_on_torus(n_samples, dim):
return array_points[array_points_idx].reshape(-1, 2*dim)
def torus(n_samples, dim, sample='random'):
- """
+ """
Generate points on a flat dim-torus in R^2dim either randomly or on a grid
-
+
:param n_samples: The number of points to be generated.
:param dim: The dimension of the torus on which points would be generated in R^2*dim.
:param sample: The sample type of the generated points. Can be 'random' or 'grid'.
:returns: numpy array containing the generated points on a torus.
- The shape of returned numpy array is:
- if sample is 'random' : (n_samples, 2*dim).
- if sample is 'grid' : (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
+
+ The shape of returned numpy array is:
+
+ If sample is 'random': (n_samples, 2*dim).
+
+ If sample is 'grid': (⌊n_samples**(1./dim)⌋**dim, 2*dim), where shape[0] is rounded down to the closest perfect 'dim'th power.
"""
if sample == 'random':
# Generate points randomly
diff --git a/src/python/gudhi/periodic_cubical_complex.pyx b/src/python/gudhi/periodic_cubical_complex.pyx
index ef1d3080..6c21e902 100644
--- a/src/python/gudhi/periodic_cubical_complex.pyx
+++ b/src/python/gudhi/periodic_cubical_complex.pyx
@@ -280,4 +280,8 @@ cdef class PeriodicCubicalComplex:
launched first.
"""
assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_intervals_in_dimension()"
- return np.array(self.pcohptr.intervals_in_dimension(dimension))
+ piid = np.array(self.pcohptr.intervals_in_dimension(dimension))
+ # Workaround https://github.com/GUDHI/gudhi-devel/issues/507
+ if len(piid) == 0:
+ return np.empty(shape = [0, 2])
+ return piid
diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py
index 84bc99a2..f8078d03 100644
--- a/src/python/gudhi/representations/vector_methods.py
+++ b/src/python/gudhi/representations/vector_methods.py
@@ -1,14 +1,17 @@
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Mathieu Carrière, Martin Royer
+# Author(s): Mathieu Carrière, Martin Royer, Gard Spreemann
#
# Copyright (C) 2018-2020 Inria
#
# Modification(s):
# - 2020/06 Martin: ATOL integration
+# - 2020/12 Gard: A more flexible Betti curve class capable of computing exact curves.
+# - 2021/11 Vincent Rouvreau: factorize _automatic_sample_range
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
+from sklearn.exceptions import NotFittedError
from sklearn.preprocessing import MinMaxScaler, MaxAbsScaler
from sklearn.neighbors import DistanceMetric
from sklearn.metrics import pairwise
@@ -45,10 +48,14 @@ class PersistenceImage(BaseEstimator, TransformerMixin):
y (n x 1 array): persistence diagram labels (unused).
"""
if np.isnan(np.array(self.im_range)).any():
- new_X = BirthPersistenceTransform().fit_transform(X)
- pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(new_X,y)
- [mx,my],[Mx,My] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]], [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
- self.im_range = np.where(np.isnan(np.array(self.im_range)), np.array([mx, Mx, my, My]), np.array(self.im_range))
+ try:
+ new_X = BirthPersistenceTransform().fit_transform(X)
+ pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(new_X,y)
+ [mx,my],[Mx,My] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]], [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
+ self.im_range = np.where(np.isnan(np.array(self.im_range)), np.array([mx, Mx, my, My]), np.array(self.im_range))
+ except ValueError:
+ # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507
+ pass
return self
def transform(self, X):
@@ -94,6 +101,28 @@ class PersistenceImage(BaseEstimator, TransformerMixin):
"""
return self.fit_transform([diag])[0,:]
+def _automatic_sample_range(sample_range, X, y):
+ """
+ Compute and returns sample range from the persistence diagrams if one of the sample_range values is numpy.nan.
+
+ Parameters:
+ sample_range (a numpy array of 2 float): minimum and maximum of all piecewise-linear function domains, of
+ the form [x_min, x_max].
+ X (list of n x 2 numpy arrays): input persistence diagrams.
+ y (n x 1 array): persistence diagram labels (unused).
+ """
+ nan_in_range = np.isnan(sample_range)
+ if nan_in_range.any():
+ try:
+ pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(X,y)
+ [mx,my] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]]
+ [Mx,My] = [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
+ return np.where(nan_in_range, np.array([mx, My]), sample_range)
+ except ValueError:
+ # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507
+ pass
+ return sample_range
+
class Landscape(BaseEstimator, TransformerMixin):
"""
This is a class for computing persistence landscapes from a list of persistence diagrams. A persistence landscape is a collection of 1D piecewise-linear functions computed from the rank function associated to the persistence diagram. These piecewise-linear functions are then sampled evenly on a given range and the corresponding vectors of samples are concatenated and returned. See http://jmlr.org/papers/v16/bubenik15a.html for more details.
@@ -119,10 +148,7 @@ class Landscape(BaseEstimator, TransformerMixin):
X (list of n x 2 numpy arrays): input persistence diagrams.
y (n x 1 array): persistence diagram labels (unused).
"""
- if self.nan_in_range.any():
- pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(X,y)
- [mx,my],[Mx,My] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]], [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
- self.sample_range = np.where(self.nan_in_range, np.array([mx, My]), np.array(self.sample_range))
+ self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
return self
def transform(self, X):
@@ -218,10 +244,7 @@ class Silhouette(BaseEstimator, TransformerMixin):
X (list of n x 2 numpy arrays): input persistence diagrams.
y (n x 1 array): persistence diagram labels (unused).
"""
- if np.isnan(np.array(self.sample_range)).any():
- pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(X,y)
- [mx,my],[Mx,My] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]], [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
- self.sample_range = np.where(np.isnan(np.array(self.sample_range)), np.array([mx, My]), np.array(self.sample_range))
+ self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
return self
def transform(self, X):
@@ -285,70 +308,162 @@ class Silhouette(BaseEstimator, TransformerMixin):
"""
return self.fit_transform([diag])[0,:]
+
class BettiCurve(BaseEstimator, TransformerMixin):
"""
- This is a class for computing Betti curves from a list of persistence diagrams. A Betti curve is a 1D piecewise-constant function obtained from the rank function. It is sampled evenly on a given range and the vector of samples is returned. See https://www.researchgate.net/publication/316604237_Time_Series_Classification_via_Topological_Data_Analysis for more details.
+ Compute Betti curves from persistence diagrams. There are several modes of operation: with a given resolution (with or without a sample_range), with a predefined grid, and with none of the previous. With a predefined grid, the class computes the Betti numbers at those grid points. Without a predefined grid, if the resolution is set to None, it can be fit to a list of persistence diagrams and produce a grid that consists of (at least) the filtration values at which at least one of those persistence diagrams changes Betti numbers, and then compute the Betti numbers at those grid points. In the latter mode, the exact Betti curve is computed for the entire real line. Otherwise, if the resolution is given, the Betti curve is obtained by sampling evenly using either the given sample_range or based on the persistence diagrams.
"""
- def __init__(self, resolution=100, sample_range=[np.nan, np.nan]):
+
+ def __init__(self, resolution=100, sample_range=[np.nan, np.nan], predefined_grid=None):
"""
Constructor for the BettiCurve class.
Parameters:
resolution (int): number of sample for the piecewise-constant function (default 100).
sample_range ([double, double]): minimum and maximum of the piecewise-constant function domain, of the form [x_min, x_max] (default [numpy.nan, numpy.nan]). It is the interval on which samples will be drawn evenly. If one of the values is numpy.nan, it can be computed from the persistence diagrams with the fit() method.
+ predefined_grid (1d array or None, default=None): Predefined filtration grid points at which to compute the Betti curves. Must be strictly ordered. Infinities are ok. If None (default), and resolution is given, the grid will be uniform from x_min to x_max in 'resolution' steps, otherwise a grid will be computed that captures all changes in Betti numbers in the provided data.
+
+ Attributes:
+ grid_ (1d array): The grid on which the Betti numbers are computed. If predefined_grid was specified, `grid_` will always be that grid, independently of data. If not, the grid is fitted to capture all filtration values at which the Betti numbers change.
+
+ Examples
+ --------
+ If pd is a persistence diagram and xs is a nonempty grid of finite values such that xs[0] >= pd.min(), then the results of:
+
+ >>> bc = BettiCurve(predefined_grid=xs) # doctest: +SKIP
+ >>> result = bc(pd) # doctest: +SKIP
+
+ and
+
+ >>> from scipy.interpolate import interp1d # doctest: +SKIP
+ >>> bc = BettiCurve(resolution=None, predefined_grid=None) # doctest: +SKIP
+ >>> bettis = bc.fit_transform([pd]) # doctest: +SKIP
+ >>> interp = interp1d(bc.grid_, bettis[0, :], kind="previous", fill_value="extrapolate") # doctest: +SKIP
+ >>> result = np.array(interp(xs), dtype=int) # doctest: +SKIP
+
+ are the same.
"""
- self.resolution, self.sample_range = resolution, sample_range
- def fit(self, X, y=None):
+ if (predefined_grid is not None) and (not isinstance(predefined_grid, np.ndarray)):
+ raise ValueError("Expected predefined_grid as array or None.")
+
+ self.predefined_grid = predefined_grid
+ self.resolution = resolution
+ self.sample_range = sample_range
+
+ def is_fitted(self):
+ return hasattr(self, "grid_")
+
+ def fit(self, X, y = None):
"""
- Fit the BettiCurve class on a list of persistence diagrams: if any of the values in **sample_range** is numpy.nan, replace it with the corresponding value computed on the given list of persistence diagrams.
+ Fit the BettiCurve class on a list of persistence diagrams: if any of the values in **sample_range** is numpy.nan, replace it with the corresponding value computed on the given list of persistence diagrams. When no predefined grid is provided and resolution set to None, compute a filtration grid that captures all changes in Betti numbers for all the given persistence diagrams.
Parameters:
- X (list of n x 2 numpy arrays): input persistence diagrams.
- y (n x 1 array): persistence diagram labels (unused).
+ X (list of 2d arrays): Persistence diagrams.
+ y (None): Ignored.
"""
- if np.isnan(np.array(self.sample_range)).any():
- pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(X,y)
- [mx,my],[Mx,My] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]], [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
- self.sample_range = np.where(np.isnan(np.array(self.sample_range)), np.array([mx, My]), np.array(self.sample_range))
+
+ if self.predefined_grid is None:
+ if self.resolution is None: # Flexible/exact version
+ events = np.unique(np.concatenate([pd.flatten() for pd in X] + [[-np.inf]], axis=0))
+ self.grid_ = np.array(events)
+ else:
+ self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
+ self.grid_ = np.linspace(self.sample_range[0], self.sample_range[1], self.resolution)
+ else:
+ self.grid_ = self.predefined_grid # Get the predefined grid from user
+
return self
def transform(self, X):
"""
- Compute the Betti curve for each persistence diagram individually and concatenate the results.
+ Compute Betti curves.
Parameters:
- X (list of n x 2 numpy arrays): input persistence diagrams.
-
+ X (list of 2d arrays): Persistence diagrams.
+
Returns:
- numpy array with shape (number of diagrams) x (**resolution**): output Betti curves.
+ `len(X).len(self.grid_)` array of ints: Betti numbers of the given persistence diagrams at the grid points given in `self.grid_`
"""
- Xfit = []
- x_values = np.linspace(self.sample_range[0], self.sample_range[1], self.resolution)
- step_x = x_values[1] - x_values[0]
- for diagram in X:
- diagram_int = np.clip(np.ceil((diagram[:,:2] - self.sample_range[0]) / step_x), 0, self.resolution).astype(int)
- bc = np.zeros(self.resolution)
- for interval in diagram_int:
- bc[interval[0]:interval[1]] += 1
- Xfit.append(np.reshape(bc,[1,-1]))
+ if not self.is_fitted():
+ raise NotFittedError("Not fitted.")
- Xfit = np.concatenate(Xfit, 0)
+ if not X:
+ X = [np.zeros((0, 2))]
+
+ N = len(X)
- return Xfit
+ events = np.concatenate([pd.flatten(order="F") for pd in X], axis=0)
+ sorting = np.argsort(events)
+ offsets = np.zeros(1 + N, dtype=int)
+ for i in range(0, N):
+ offsets[i+1] = offsets[i] + 2*X[i].shape[0]
+ starts = offsets[0:N]
+ ends = offsets[1:N + 1] - 1
- def __call__(self, diag):
+ bettis = [[0] for i in range(0, N)]
+
+ i = 0
+ for x in self.grid_:
+ while i < len(sorting) and events[sorting[i]] <= x:
+ j = np.searchsorted(ends, sorting[i])
+ delta = 1 if sorting[i] - starts[j] < len(X[j]) else -1
+ bettis[j][-1] += delta
+ i += 1
+ for k in range(0, N):
+ bettis[k].append(bettis[k][-1])
+
+ return np.array(bettis, dtype=int)[:, 0:-1]
+
+ def fit_transform(self, X):
+ """
+ The result is the same as fit(X) followed by transform(X), but potentially faster.
"""
- Apply BettiCurve on a single persistence diagram and outputs the result.
- Parameters:
- diag (n x 2 numpy array): input persistence diagram.
+ if self.predefined_grid is None and self.resolution is None:
+ if not X:
+ X = [np.zeros((0, 2))]
- Returns:
- numpy array with shape (**resolution**): output Betti curve.
+ N = len(X)
+
+ events = np.concatenate([pd.flatten(order="F") for pd in X], axis=0)
+ sorting = np.argsort(events)
+ offsets = np.zeros(1 + N, dtype=int)
+ for i in range(0, N):
+ offsets[i+1] = offsets[i] + 2*X[i].shape[0]
+ starts = offsets[0:N]
+ ends = offsets[1:N + 1] - 1
+
+ xs = [-np.inf]
+ bettis = [[0] for i in range(0, N)]
+
+ for i in sorting:
+ j = np.searchsorted(ends, i)
+ delta = 1 if i - starts[j] < len(X[j]) else -1
+ if events[i] == xs[-1]:
+ bettis[j][-1] += delta
+ else:
+ xs.append(events[i])
+ for k in range(0, j):
+ bettis[k].append(bettis[k][-1])
+ bettis[j].append(bettis[j][-1] + delta)
+ for k in range(j+1, N):
+ bettis[k].append(bettis[k][-1])
+
+ self.grid_ = np.array(xs)
+ return np.array(bettis, dtype=int)
+
+ else:
+ return self.fit(X).transform(X)
+
+ def __call__(self, diag):
"""
- return self.fit_transform([diag])[0,:]
+ Shorthand for transform on a single persistence diagram.
+ """
+ return self.fit_transform([diag])[0, :]
+
+
class Entropy(BaseEstimator, TransformerMixin):
"""
@@ -374,10 +489,7 @@ class Entropy(BaseEstimator, TransformerMixin):
X (list of n x 2 numpy arrays): input persistence diagrams.
y (n x 1 array): persistence diagram labels (unused).
"""
- if np.isnan(np.array(self.sample_range)).any():
- pre = DiagramScaler(use=True, scalers=[([0], MinMaxScaler()), ([1], MinMaxScaler())]).fit(X,y)
- [mx,my],[Mx,My] = [pre.scalers[0][1].data_min_[0], pre.scalers[1][1].data_min_[0]], [pre.scalers[0][1].data_max_[0], pre.scalers[1][1].data_max_[0]]
- self.sample_range = np.where(np.isnan(np.array(self.sample_range)), np.array([mx, My]), np.array(self.sample_range))
+ self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
return self
def transform(self, X):
@@ -396,9 +508,13 @@ class Entropy(BaseEstimator, TransformerMixin):
new_X = BirthPersistenceTransform().fit_transform(X)
for i in range(num_diag):
-
orig_diagram, diagram, num_pts_in_diag = X[i], new_X[i], X[i].shape[0]
- new_diagram = DiagramScaler(use=True, scalers=[([1], MaxAbsScaler())]).fit_transform([diagram])[0]
+ try:
+ new_diagram = DiagramScaler(use=True, scalers=[([1], MaxAbsScaler())]).fit_transform([diagram])[0]
+ except ValueError:
+ # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507
+ assert len(diagram) == 0
+ new_diagram = np.empty(shape = [0, 2])
if self.mode == "scalar":
ent = - np.sum( np.multiply(new_diagram[:,1], np.log(new_diagram[:,1])) )
@@ -412,12 +528,11 @@ class Entropy(BaseEstimator, TransformerMixin):
max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
for k in range(min_idx, max_idx):
ent[k] += (-1) * new_diagram[j,1] * np.log(new_diagram[j,1])
- if self.normalized:
- ent = ent / np.linalg.norm(ent, ord=1)
- Xfit.append(np.reshape(ent,[1,-1]))
-
- Xfit = np.concatenate(Xfit, 0)
+ if self.normalized:
+ ent = ent / np.linalg.norm(ent, ord=1)
+ Xfit.append(np.reshape(ent,[1,-1]))
+ Xfit = np.concatenate(Xfit, axis=0)
return Xfit
def __call__(self, diag):
@@ -478,7 +593,13 @@ class TopologicalVector(BaseEstimator, TransformerMixin):
diagram, num_pts_in_diag = X[i], X[i].shape[0]
pers = 0.5 * (diagram[:,1]-diagram[:,0])
min_pers = np.minimum(pers,np.transpose(pers))
- distances = DistanceMetric.get_metric("chebyshev").pairwise(diagram)
+ # Works fine with sklearn 1.0, but an ValueError exception is thrown on past versions
+ try:
+ distances = DistanceMetric.get_metric("chebyshev").pairwise(diagram)
+ except ValueError:
+ # Empty persistence diagram case - https://github.com/GUDHI/gudhi-devel/issues/507
+ assert len(diagram) == 0
+ distances = np.empty(shape = [0, 0])
vect = np.flip(np.sort(np.triu(np.minimum(distances, min_pers)), axis=None), 0)
dim = min(len(vect), thresh)
Xfit[i, :dim] = vect[:dim]
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
index 9c51cb46..c3720936 100644
--- a/src/python/gudhi/simplex_tree.pyx
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -9,8 +9,7 @@
from cython.operator import dereference, preincrement
from libc.stdint cimport intptr_t
-import numpy
-from numpy import array as np_array
+import numpy as np
cimport gudhi.simplex_tree
__author__ = "Vincent Rouvreau"
@@ -542,7 +541,11 @@ cdef class SimplexTree:
function to be launched first.
"""
assert self.pcohptr != NULL, "compute_persistence() must be called before persistence_intervals_in_dimension()"
- return np_array(self.pcohptr.intervals_in_dimension(dimension))
+ piid = np.array(self.pcohptr.intervals_in_dimension(dimension))
+ # Workaround https://github.com/GUDHI/gudhi-devel/issues/507
+ if len(piid) == 0:
+ return np.empty(shape = [0, 2])
+ return piid
def persistence_pairs(self):
"""This function returns a list of persistence birth and death simplices pairs.
@@ -583,8 +586,8 @@ cdef class SimplexTree:
"""
assert self.pcohptr != NULL, "lower_star_persistence_generators() requires that persistence() be called first."
gen = self.pcohptr.lower_star_generators()
- normal = [np_array(d).reshape(-1,2) for d in gen.first]
- infinite = [np_array(d) for d in gen.second]
+ normal = [np.array(d).reshape(-1,2) for d in gen.first]
+ infinite = [np.array(d) for d in gen.second]
return (normal, infinite)
def flag_persistence_generators(self):
@@ -602,19 +605,19 @@ cdef class SimplexTree:
assert self.pcohptr != NULL, "flag_persistence_generators() requires that persistence() be called first."
gen = self.pcohptr.flag_generators()
if len(gen.first) == 0:
- normal0 = numpy.empty((0,3))
+ normal0 = np.empty((0,3))
normals = []
else:
l = iter(gen.first)
- normal0 = np_array(next(l)).reshape(-1,3)
- normals = [np_array(d).reshape(-1,4) for d in l]
+ normal0 = np.array(next(l)).reshape(-1,3)
+ normals = [np.array(d).reshape(-1,4) for d in l]
if len(gen.second) == 0:
- infinite0 = numpy.empty(0)
+ infinite0 = np.empty(0)
infinites = []
else:
l = iter(gen.second)
- infinite0 = np_array(next(l))
- infinites = [np_array(d).reshape(-1,2) for d in l]
+ infinite0 = np.array(next(l))
+ infinites = [np.array(d).reshape(-1,2) for d in l]
return (normal0, normals, infinite0, infinites)
def collapse_edges(self, nb_iterations = 1):
diff --git a/src/python/include/Alpha_complex_factory.h b/src/python/include/Alpha_complex_factory.h
index 3405fdd6..3d20aa8f 100644
--- a/src/python/include/Alpha_complex_factory.h
+++ b/src/python/include/Alpha_complex_factory.h
@@ -31,15 +31,34 @@ namespace Gudhi {
namespace alpha_complex {
-template <typename CgalPointType>
-std::vector<double> pt_cgal_to_cython(CgalPointType const& point) {
- std::vector<double> vd;
- vd.reserve(point.dimension());
- for (auto coord = point.cartesian_begin(); coord != point.cartesian_end(); coord++)
- vd.push_back(CGAL::to_double(*coord));
- return vd;
-}
+// template Functor that transforms a CGAL point to a vector of double as expected by cython
+template<typename CgalPointType, bool Weighted>
+struct Point_cgal_to_cython;
+
+// Specialized Unweighted Functor
+template<typename CgalPointType>
+struct Point_cgal_to_cython<CgalPointType, false> {
+ std::vector<double> operator()(CgalPointType const& point) const
+ {
+ std::vector<double> vd;
+ vd.reserve(point.dimension());
+ for (auto coord = point.cartesian_begin(); coord != point.cartesian_end(); coord++)
+ vd.push_back(CGAL::to_double(*coord));
+ return vd;
+ }
+};
+// Specialized Weighted Functor
+template<typename CgalPointType>
+struct Point_cgal_to_cython<CgalPointType, true> {
+ std::vector<double> operator()(CgalPointType const& weighted_point) const
+ {
+ const auto& point = weighted_point.point();
+ return Point_cgal_to_cython<decltype(point), false>()(point);
+ }
+};
+
+// Function that transforms a cython point (aka. a vector of double) to a CGAL point
template <typename CgalPointType>
static CgalPointType pt_cython_to_cgal(std::vector<double> const& vec) {
return CgalPointType(vec.size(), vec.begin(), vec.end());
@@ -51,24 +70,35 @@ class Abstract_alpha_complex {
virtual bool create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
bool default_filtration_value) = 0;
+
+ virtual std::size_t num_vertices() const = 0;
virtual ~Abstract_alpha_complex() = default;
};
-class Exact_Alphacomplex_dD final : public Abstract_alpha_complex {
+template <bool Weighted = false>
+class Exact_alpha_complex_dD final : public Abstract_alpha_complex {
private:
using Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>;
- using Point = typename Kernel::Point_d;
+ using Bare_point = typename Kernel::Point_d;
+ using Point = std::conditional_t<Weighted, typename Kernel::Weighted_point_d,
+ typename Kernel::Point_d>;
public:
- Exact_Alphacomplex_dD(const std::vector<std::vector<double>>& points, bool exact_version)
+ Exact_alpha_complex_dD(const std::vector<std::vector<double>>& points, bool exact_version)
+ : exact_version_(exact_version),
+ alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal<Bare_point>)) {
+ }
+
+ Exact_alpha_complex_dD(const std::vector<std::vector<double>>& points,
+ const std::vector<double>& weights, bool exact_version)
: exact_version_(exact_version),
- alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal<Point>)) {
+ alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal<Bare_point>), weights) {
}
virtual std::vector<double> get_point(int vh) override {
- Point const& point = alpha_complex_.get_point(vh);
- return pt_cgal_to_cython(point);
+ // Can be a Weighted or a Bare point in function of Weighted
+ return Point_cgal_to_cython<Point, Weighted>()(alpha_complex_.get_point(vh));
}
virtual bool create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
@@ -76,65 +106,49 @@ class Exact_Alphacomplex_dD final : public Abstract_alpha_complex {
return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, exact_version_, default_filtration_value);
}
+ virtual std::size_t num_vertices() const {
+ return alpha_complex_.num_vertices();
+ }
+
private:
bool exact_version_;
- Alpha_complex<Kernel> alpha_complex_;
+ Alpha_complex<Kernel, Weighted> alpha_complex_;
};
-class Inexact_Alphacomplex_dD final : public Abstract_alpha_complex {
+template <bool Weighted = false>
+class Inexact_alpha_complex_dD final : public Abstract_alpha_complex {
private:
using Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
- using Point = typename Kernel::Point_d;
+ using Bare_point = typename Kernel::Point_d;
+ using Point = std::conditional_t<Weighted, typename Kernel::Weighted_point_d,
+ typename Kernel::Point_d>;
public:
- Inexact_Alphacomplex_dD(const std::vector<std::vector<double>>& points, bool exact_version)
- : exact_version_(exact_version),
- alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal<Point>)) {
+ Inexact_alpha_complex_dD(const std::vector<std::vector<double>>& points)
+ : alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal<Bare_point>)) {
+ }
+
+ Inexact_alpha_complex_dD(const std::vector<std::vector<double>>& points, const std::vector<double>& weights)
+ : alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal<Bare_point>), weights) {
}
virtual std::vector<double> get_point(int vh) override {
- Point const& point = alpha_complex_.get_point(vh);
- return pt_cgal_to_cython(point);
+ // Can be a Weighted or a Bare point in function of Weighted
+ return Point_cgal_to_cython<Point, Weighted>()(alpha_complex_.get_point(vh));
}
virtual bool create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
bool default_filtration_value) override {
- return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, exact_version_, default_filtration_value);
+ return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, false, default_filtration_value);
}
- private:
- bool exact_version_;
- Alpha_complex<Kernel> alpha_complex_;
-};
-
-template <complexity Complexity>
-class Alphacomplex_3D final : public Abstract_alpha_complex {
- private:
- using Point = typename Alpha_complex_3d<Complexity, false, false>::Bare_point_3;
-
- static Point pt_cython_to_cgal_3(std::vector<double> const& vec) {
- return Point(vec[0], vec[1], vec[2]);
- }
-
- public:
- Alphacomplex_3D(const std::vector<std::vector<double>>& points)
- : alpha_complex_(boost::adaptors::transform(points, pt_cython_to_cgal_3)) {
- }
-
- virtual std::vector<double> get_point(int vh) override {
- Point const& point = alpha_complex_.get_point(vh);
- return pt_cgal_to_cython(point);
- }
-
- virtual bool create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
- bool default_filtration_value) override {
- return alpha_complex_.create_complex(*simplex_tree, max_alpha_square);
+ virtual std::size_t num_vertices() const {
+ return alpha_complex_.num_vertices();
}
private:
- Alpha_complex_3d<Complexity, false, false> alpha_complex_;
+ Alpha_complex<Kernel, Weighted> alpha_complex_;
};
-
} // namespace alpha_complex
} // namespace Gudhi
diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h
index 23be194d..671af4a4 100644
--- a/src/python/include/Alpha_complex_interface.h
+++ b/src/python/include/Alpha_complex_interface.h
@@ -27,10 +27,23 @@ namespace alpha_complex {
class Alpha_complex_interface {
public:
- Alpha_complex_interface(const std::vector<std::vector<double>>& points, bool fast_version, bool exact_version)
- : points_(points),
- fast_version_(fast_version),
- exact_version_(exact_version) {
+ Alpha_complex_interface(const std::vector<std::vector<double>>& points,
+ const std::vector<double>& weights,
+ bool fast_version, bool exact_version) {
+ const bool weighted = (weights.size() > 0);
+ if (fast_version) {
+ if (weighted) {
+ alpha_ptr_ = std::make_unique<Inexact_alpha_complex_dD<true>>(points, weights);
+ } else {
+ alpha_ptr_ = std::make_unique<Inexact_alpha_complex_dD<false>>(points);
+ }
+ } else {
+ if (weighted) {
+ alpha_ptr_ = std::make_unique<Exact_alpha_complex_dD<true>>(points, weights, exact_version);
+ } else {
+ alpha_ptr_ = std::make_unique<Exact_alpha_complex_dD<false>>(points, exact_version);
+ }
+ }
}
std::vector<double> get_point(int vh) {
@@ -39,38 +52,13 @@ class Alpha_complex_interface {
void create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
bool default_filtration_value) {
- if (points_.size() > 0) {
- std::size_t dimension = points_[0].size();
- if (dimension == 3 && !default_filtration_value) {
- if (fast_version_)
- alpha_ptr_ = std::make_unique<Alphacomplex_3D<Gudhi::alpha_complex::complexity::FAST>>(points_);
- else if (exact_version_)
- alpha_ptr_ = std::make_unique<Alphacomplex_3D<Gudhi::alpha_complex::complexity::EXACT>>(points_);
- else
- alpha_ptr_ = std::make_unique<Alphacomplex_3D<Gudhi::alpha_complex::complexity::SAFE>>(points_);
- if (!alpha_ptr_->create_simplex_tree(simplex_tree, max_alpha_square, default_filtration_value)) {
- // create_simplex_tree will fail if all points are on a plane - Retry with dD by setting dimension to 2
- dimension--;
- alpha_ptr_.reset();
- }
- }
- // Not ** else ** because we have to take into account if 3d fails
- if (dimension != 3 || default_filtration_value) {
- if (fast_version_) {
- alpha_ptr_ = std::make_unique<Inexact_Alphacomplex_dD>(points_, exact_version_);
- } else {
- alpha_ptr_ = std::make_unique<Exact_Alphacomplex_dD>(points_, exact_version_);
- }
- alpha_ptr_->create_simplex_tree(simplex_tree, max_alpha_square, default_filtration_value);
- }
- }
+ // Nothing to be done in case of an empty point set
+ if (alpha_ptr_->num_vertices() > 0)
+ alpha_ptr_->create_simplex_tree(simplex_tree, max_alpha_square, default_filtration_value);
}
private:
std::unique_ptr<Abstract_alpha_complex> alpha_ptr_;
- std::vector<std::vector<double>> points_;
- bool fast_version_;
- bool exact_version_;
};
} // namespace alpha_complex
diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py
index 814f8289..f15284f3 100755
--- a/src/python/test/test_alpha_complex.py
+++ b/src/python/test/test_alpha_complex.py
@@ -8,10 +8,12 @@
- YYYY/MM Author: Description of the modification
"""
-import gudhi as gd
+from gudhi import AlphaComplex
import math
import numpy as np
import pytest
+import warnings
+
try:
# python3
from itertools import zip_longest
@@ -19,22 +21,24 @@ except ImportError:
# python2
from itertools import izip_longest as zip_longest
-__author__ = "Vincent Rouvreau"
-__copyright__ = "Copyright (C) 2016 Inria"
-__license__ = "MIT"
def _empty_alpha(precision):
- alpha_complex = gd.AlphaComplex(points=[[0, 0]], precision = precision)
+ alpha_complex = AlphaComplex(precision = precision)
+ assert alpha_complex.__is_defined() == True
+
+def _one_2d_point_alpha(precision):
+ alpha_complex = AlphaComplex(points=[[0, 0]], precision = precision)
assert alpha_complex.__is_defined() == True
def test_empty_alpha():
for precision in ['fast', 'safe', 'exact']:
_empty_alpha(precision)
+ _one_2d_point_alpha(precision)
def _infinite_alpha(precision):
point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
- alpha_complex = gd.AlphaComplex(points=point_list, precision = precision)
+ alpha_complex = AlphaComplex(points=point_list, precision = precision)
assert alpha_complex.__is_defined() == True
simplex_tree = alpha_complex.create_simplex_tree()
@@ -69,18 +73,9 @@ def _infinite_alpha(precision):
assert point_list[1] == alpha_complex.get_point(1)
assert point_list[2] == alpha_complex.get_point(2)
assert point_list[3] == alpha_complex.get_point(3)
- try:
- alpha_complex.get_point(4) == []
- except IndexError:
- pass
- else:
- assert False
- try:
- alpha_complex.get_point(125) == []
- except IndexError:
- pass
- else:
- assert False
+
+ with pytest.raises(IndexError):
+ alpha_complex.get_point(len(point_list))
def test_infinite_alpha():
for precision in ['fast', 'safe', 'exact']:
@@ -88,7 +83,7 @@ def test_infinite_alpha():
def _filtered_alpha(precision):
point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
- filtered_alpha = gd.AlphaComplex(points=point_list, precision = precision)
+ filtered_alpha = AlphaComplex(points=point_list, precision = precision)
simplex_tree = filtered_alpha.create_simplex_tree(max_alpha_square=0.25)
@@ -99,18 +94,9 @@ def _filtered_alpha(precision):
assert point_list[1] == filtered_alpha.get_point(1)
assert point_list[2] == filtered_alpha.get_point(2)
assert point_list[3] == filtered_alpha.get_point(3)
- try:
- filtered_alpha.get_point(4) == []
- except IndexError:
- pass
- else:
- assert False
- try:
- filtered_alpha.get_point(125) == []
- except IndexError:
- pass
- else:
- assert False
+
+ with pytest.raises(IndexError):
+ filtered_alpha.get_point(len(point_list))
assert list(simplex_tree.get_filtration()) == [
([0], 0.0),
@@ -141,10 +127,10 @@ def _safe_alpha_persistence_comparison(precision):
embedding2 = [[signal[i], delayed[i]] for i in range(len(time))]
#build alpha complex and simplex tree
- alpha_complex1 = gd.AlphaComplex(points=embedding1, precision = precision)
+ alpha_complex1 = AlphaComplex(points=embedding1, precision = precision)
simplex_tree1 = alpha_complex1.create_simplex_tree()
- alpha_complex2 = gd.AlphaComplex(points=embedding2, precision = precision)
+ alpha_complex2 = AlphaComplex(points=embedding2, precision = precision)
simplex_tree2 = alpha_complex2.create_simplex_tree()
diag1 = simplex_tree1.persistence()
@@ -162,7 +148,7 @@ def test_safe_alpha_persistence_comparison():
def _delaunay_complex(precision):
point_list = [[0, 0], [1, 0], [0, 1], [1, 1]]
- filtered_alpha = gd.AlphaComplex(points=point_list, precision = precision)
+ filtered_alpha = AlphaComplex(points=point_list, precision = precision)
simplex_tree = filtered_alpha.create_simplex_tree(default_filtration_value = True)
@@ -173,18 +159,11 @@ def _delaunay_complex(precision):
assert point_list[1] == filtered_alpha.get_point(1)
assert point_list[2] == filtered_alpha.get_point(2)
assert point_list[3] == filtered_alpha.get_point(3)
- try:
- filtered_alpha.get_point(4) == []
- except IndexError:
- pass
- else:
- assert False
- try:
- filtered_alpha.get_point(125) == []
- except IndexError:
- pass
- else:
- assert False
+
+ with pytest.raises(IndexError):
+ filtered_alpha.get_point(4)
+ with pytest.raises(IndexError):
+ filtered_alpha.get_point(125)
for filtered_value in simplex_tree.get_filtration():
assert math.isnan(filtered_value[1])
@@ -198,7 +177,13 @@ def test_delaunay_complex():
_delaunay_complex(precision)
def _3d_points_on_a_plane(precision, default_filtration_value):
- alpha = gd.AlphaComplex(off_file='alphacomplexdoc.off', precision = precision)
+ alpha = AlphaComplex(points = [[1.0, 1.0 , 0.0],
+ [7.0, 0.0 , 0.0],
+ [4.0, 6.0 , 0.0],
+ [9.0, 6.0 , 0.0],
+ [0.0, 14.0, 0.0],
+ [2.0, 19.0, 0.0],
+ [9.0, 17.0, 0.0]], precision = precision)
simplex_tree = alpha.create_simplex_tree(default_filtration_value = default_filtration_value)
assert simplex_tree.dimension() == 2
@@ -206,28 +191,16 @@ def _3d_points_on_a_plane(precision, default_filtration_value):
assert simplex_tree.num_simplices() == 25
def test_3d_points_on_a_plane():
- off_file = open("alphacomplexdoc.off", "w")
- off_file.write("OFF \n" \
- "7 0 0 \n" \
- "1.0 1.0 0.0\n" \
- "7.0 0.0 0.0\n" \
- "4.0 6.0 0.0\n" \
- "9.0 6.0 0.0\n" \
- "0.0 14.0 0.0\n" \
- "2.0 19.0 0.0\n" \
- "9.0 17.0 0.0\n" )
- off_file.close()
-
for default_filtration_value in [True, False]:
for precision in ['fast', 'safe', 'exact']:
_3d_points_on_a_plane(precision, default_filtration_value)
def _3d_tetrahedrons(precision):
points = 10*np.random.rand(10, 3)
- alpha = gd.AlphaComplex(points=points, precision = precision)
+ alpha = AlphaComplex(points = points, precision = precision)
st_alpha = alpha.create_simplex_tree(default_filtration_value = False)
# New AlphaComplex for get_point to work
- delaunay = gd.AlphaComplex(points=points, precision = precision)
+ delaunay = AlphaComplex(points = points, precision = precision)
st_delaunay = delaunay.create_simplex_tree(default_filtration_value = True)
delaunay_tetra = []
@@ -256,3 +229,60 @@ def _3d_tetrahedrons(precision):
def test_3d_tetrahedrons():
for precision in ['fast', 'safe', 'exact']:
_3d_tetrahedrons(precision)
+
+def test_off_file_deprecation_warning():
+ off_file = open("alphacomplexdoc.off", "w")
+ off_file.write("OFF \n" \
+ "7 0 0 \n" \
+ "1.0 1.0 0.0\n" \
+ "7.0 0.0 0.0\n" \
+ "4.0 6.0 0.0\n" \
+ "9.0 6.0 0.0\n" \
+ "0.0 14.0 0.0\n" \
+ "2.0 19.0 0.0\n" \
+ "9.0 17.0 0.0\n" )
+ off_file.close()
+
+ with pytest.warns(DeprecationWarning):
+ alpha = AlphaComplex(off_file="alphacomplexdoc.off")
+
+def test_non_existing_off_file():
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(FileNotFoundError):
+ alpha = AlphaComplex(off_file="pouetpouettralala.toubiloubabdou")
+
+def test_inconsistency_points_and_weights():
+ points = [[1.0, 1.0 , 0.0],
+ [7.0, 0.0 , 0.0],
+ [4.0, 6.0 , 0.0],
+ [9.0, 6.0 , 0.0],
+ [0.0, 14.0, 0.0],
+ [2.0, 19.0, 0.0],
+ [9.0, 17.0, 0.0]]
+ with pytest.raises(ValueError):
+ # 7 points, 8 weights, on purpose
+ alpha = AlphaComplex(points = points,
+ weights = [1., 2., 3., 4., 5., 6., 7., 8.])
+
+ with pytest.raises(ValueError):
+ # 7 points, 6 weights, on purpose
+ alpha = AlphaComplex(points = points,
+ weights = [1., 2., 3., 4., 5., 6.])
+
+def _weighted_doc_example(precision):
+ stree = AlphaComplex(points=[[ 1., -1., -1.],
+ [-1., 1., -1.],
+ [-1., -1., 1.],
+ [ 1., 1., 1.],
+ [ 2., 2., 2.]],
+ weights = [4., 4., 4., 4., 1.],
+ precision = precision).create_simplex_tree()
+
+ assert stree.filtration([0, 1, 2, 3]) == pytest.approx(-1.)
+ assert stree.filtration([0, 1, 3, 4]) == pytest.approx(95.)
+ assert stree.filtration([0, 2, 3, 4]) == pytest.approx(95.)
+ assert stree.filtration([1, 2, 3, 4]) == pytest.approx(95.)
+
+def test_weighted_doc_example():
+ for precision in ['fast', 'safe', 'exact']:
+ _weighted_doc_example(precision)
diff --git a/src/python/test/test_betti_curve_representations.py b/src/python/test/test_betti_curve_representations.py
new file mode 100755
index 00000000..6a45da4d
--- /dev/null
+++ b/src/python/test/test_betti_curve_representations.py
@@ -0,0 +1,59 @@
+import numpy as np
+import scipy.interpolate
+import pytest
+
+from gudhi.representations.vector_methods import BettiCurve
+
+def test_betti_curve_is_irregular_betti_curve_followed_by_interpolation():
+ m = 10
+ n = 1000
+ pinf = 0.05
+ pzero = 0.05
+ res = 100
+
+ pds = []
+ for i in range(0, m):
+ pd = np.zeros((n, 2))
+ pd[:, 0] = np.random.uniform(0, 10, n)
+ pd[:, 1] = np.random.uniform(pd[:, 0], 10, n)
+ pd[np.random.uniform(0, 1, n) < pzero, 0] = 0
+ pd[np.random.uniform(0, 1, n) < pinf, 1] = np.inf
+ pds.append(pd)
+
+ bc = BettiCurve(resolution=None, predefined_grid=None)
+ bc.fit(pds)
+ bettis = bc.transform(pds)
+
+ bc2 = BettiCurve(resolution=None, predefined_grid=None)
+ bettis2 = bc2.fit_transform(pds)
+ assert((bc2.grid_ == bc.grid_).all())
+ assert((bettis2 == bettis).all())
+
+ for i in range(0, m):
+ grid = np.linspace(pds[i][np.isfinite(pds[i])].min(), pds[i][np.isfinite(pds[i])].max() + 1, res)
+ bc_gridded = BettiCurve(predefined_grid=grid)
+ bc_gridded.fit([])
+ bettis_gridded = bc_gridded(pds[i])
+
+ interp = scipy.interpolate.interp1d(bc.grid_, bettis[i, :], kind="previous", fill_value="extrapolate")
+ bettis_interp = np.array(interp(grid), dtype=int)
+ assert((bettis_interp == bettis_gridded).all())
+
+
+def test_empty_with_predefined_grid():
+ random_grid = np.sort(np.random.uniform(0, 1, 100))
+ bc = BettiCurve(predefined_grid=random_grid)
+ bettis = bc.fit_transform([])
+ assert((bc.grid_ == random_grid).all())
+ assert((bettis == 0).all())
+
+
+def test_empty():
+ bc = BettiCurve(resolution=None, predefined_grid=None)
+ bettis = bc.fit_transform([])
+ assert(bc.grid_ == [-np.inf])
+ assert((bettis == 0).all())
+
+def test_wrong_value_of_predefined_grid():
+ with pytest.raises(ValueError):
+ BettiCurve(predefined_grid=[1, 2, 3])
diff --git a/src/python/test/test_cubical_complex.py b/src/python/test/test_cubical_complex.py
index d0e4e9e8..29d559b3 100755
--- a/src/python/test/test_cubical_complex.py
+++ b/src/python/test/test_cubical_complex.py
@@ -174,3 +174,28 @@ def test_periodic_cofaces_of_persistence_pairs_when_pd_has_no_paired_birth_and_d
assert np.array_equal(pairs[1][0], np.array([0]))
assert np.array_equal(pairs[1][1], np.array([0, 1]))
assert np.array_equal(pairs[1][2], np.array([1]))
+
+def test_cubical_persistence_intervals_in_dimension():
+ cub = CubicalComplex(
+ dimensions=[3, 3],
+ top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9],
+ )
+ cub.compute_persistence()
+ H0 = cub.persistence_intervals_in_dimension(0)
+ assert np.array_equal(H0, np.array([[ 1., float("inf")]]))
+ assert cub.persistence_intervals_in_dimension(1).shape == (0, 2)
+
+def test_periodic_cubical_persistence_intervals_in_dimension():
+ cub = PeriodicCubicalComplex(
+ dimensions=[3, 3],
+ top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9],
+ periodic_dimensions = [True, True]
+ )
+ cub.compute_persistence()
+ H0 = cub.persistence_intervals_in_dimension(0)
+ assert np.array_equal(H0, np.array([[ 1., float("inf")]]))
+ H1 = cub.persistence_intervals_in_dimension(1)
+ assert np.array_equal(H1, np.array([[ 3., float("inf")], [ 7., float("inf")]]))
+ H2 = cub.persistence_intervals_in_dimension(2)
+ assert np.array_equal(H2, np.array([[ 9., float("inf")]]))
+ assert cub.persistence_intervals_in_dimension(3).shape == (0, 2)
diff --git a/src/python/test/test_dtm.py b/src/python/test/test_dtm.py
index c29471cf..e46d616c 100755
--- a/src/python/test/test_dtm.py
+++ b/src/python/test/test_dtm.py
@@ -91,13 +91,11 @@ def test_density():
def test_dtm_overflow_warnings():
pts = numpy.array([[10., 100000000000000000000000000000.], [1000., 100000000000000000000000000.]])
- impl_warn = ["keops", "hnsw"]
with warnings.catch_warnings(record=True) as w:
- for impl in impl_warn:
- dtm = DistanceToMeasure(2, q=10000, implementation=impl)
- r = dtm.fit_transform(pts)
- assert len(w) == 2
- for i in range(len(w)):
- assert issubclass(w[i].category, RuntimeWarning)
- assert "Overflow" in str(w[i].message)
+ # TODO Test "keops" implementation as well when next version of pykeops (current is 1.5) is released (should fix the problem (cf. issue #543))
+ dtm = DistanceToMeasure(2, implementation="hnsw")
+ r = dtm.fit_transform(pts)
+ assert len(w) == 1
+ assert issubclass(w[0].category, RuntimeWarning)
+ assert "Overflow" in str(w[0].message)
diff --git a/src/python/test/test_reader_utils.py b/src/python/test/test_reader_utils.py
index e96e0569..fdfddc4b 100755
--- a/src/python/test/test_reader_utils.py
+++ b/src/python/test/test_reader_utils.py
@@ -8,8 +8,9 @@
- YYYY/MM Author: Description of the modification
"""
-import gudhi
+import gudhi as gd
import numpy as np
+from pytest import raises
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2017 Inria"
@@ -18,7 +19,7 @@ __license__ = "MIT"
def test_non_existing_csv_file():
# Try to open a non existing file
- matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ matrix = gd.read_lower_triangular_matrix_from_csv_file(
csv_file="pouetpouettralala.toubiloubabdou"
)
assert matrix == []
@@ -29,7 +30,7 @@ def test_full_square_distance_matrix_csv_file():
test_file = open("full_square_distance_matrix.csv", "w")
test_file.write("0;1;2;3;\n1;0;4;5;\n2;4;0;6;\n3;5;6;0;")
test_file.close()
- matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ matrix = gd.read_lower_triangular_matrix_from_csv_file(
csv_file="full_square_distance_matrix.csv", separator=";"
)
assert matrix == [[], [1.0], [2.0, 4.0], [3.0, 5.0, 6.0]]
@@ -40,7 +41,7 @@ def test_lower_triangular_distance_matrix_csv_file():
test_file = open("lower_triangular_distance_matrix.csv", "w")
test_file.write("\n1,\n2,3,\n4,5,6,\n7,8,9,10,")
test_file.close()
- matrix = gudhi.read_lower_triangular_matrix_from_csv_file(
+ matrix = gd.read_lower_triangular_matrix_from_csv_file(
csv_file="lower_triangular_distance_matrix.csv", separator=","
)
assert matrix == [[], [1.0], [2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0, 10.0]]
@@ -48,11 +49,11 @@ def test_lower_triangular_distance_matrix_csv_file():
def test_non_existing_persistence_file():
# Try to open a non existing file
- persistence = gudhi.read_persistence_intervals_grouped_by_dimension(
+ persistence = gd.read_persistence_intervals_grouped_by_dimension(
persistence_file="pouetpouettralala.toubiloubabdou"
)
assert persistence == []
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="pouetpouettralala.toubiloubabdou", only_this_dim=1
)
np.testing.assert_array_equal(persistence, [])
@@ -65,21 +66,21 @@ def test_read_persistence_intervals_without_dimension():
"# Simple persistence diagram without dimension\n2.7 3.7\n9.6 14.\n34.2 34.974\n3. inf"
)
test_file.close()
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_without_dimension.pers"
)
np.testing.assert_array_equal(
persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))]
)
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=0
)
np.testing.assert_array_equal(persistence, [])
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_without_dimension.pers", only_this_dim=1
)
np.testing.assert_array_equal(persistence, [])
- persistence = gudhi.read_persistence_intervals_grouped_by_dimension(
+ persistence = gd.read_persistence_intervals_grouped_by_dimension(
persistence_file="persistence_intervals_without_dimension.pers"
)
assert persistence == {
@@ -94,29 +95,29 @@ def test_read_persistence_intervals_with_dimension():
"# Simple persistence diagram with dimension\n0 2.7 3.7\n1 9.6 14.\n3 34.2 34.974\n1 3. inf"
)
test_file.close()
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_with_dimension.pers"
)
np.testing.assert_array_equal(
persistence, [(2.7, 3.7), (9.6, 14.0), (34.2, 34.974), (3.0, float("Inf"))]
)
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=0
)
np.testing.assert_array_equal(persistence, [(2.7, 3.7)])
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=1
)
np.testing.assert_array_equal(persistence, [(9.6, 14.0), (3.0, float("Inf"))])
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=2
)
np.testing.assert_array_equal(persistence, [])
- persistence = gudhi.read_persistence_intervals_in_dimension(
+ persistence = gd.read_persistence_intervals_in_dimension(
persistence_file="persistence_intervals_with_dimension.pers", only_this_dim=3
)
np.testing.assert_array_equal(persistence, [(34.2, 34.974)])
- persistence = gudhi.read_persistence_intervals_grouped_by_dimension(
+ persistence = gd.read_persistence_intervals_grouped_by_dimension(
persistence_file="persistence_intervals_with_dimension.pers"
)
assert persistence == {
diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py
index cda1a15b..d219ce7a 100755
--- a/src/python/test/test_representations.py
+++ b/src/python/test/test_representations.py
@@ -3,9 +3,23 @@ import sys
import matplotlib.pyplot as plt
import numpy as np
import pytest
+import random
from sklearn.cluster import KMeans
+# Vectorization
+from gudhi.representations import (Landscape, Silhouette, BettiCurve, ComplexPolynomial,\
+ TopologicalVector, PersistenceImage, Entropy)
+
+# Preprocessing
+from gudhi.representations import (BirthPersistenceTransform, Clamping, DiagramScaler, Padding, ProminentPoints, \
+ DiagramSelector)
+
+# Kernel
+from gudhi.representations import (PersistenceWeightedGaussianKernel, \
+ PersistenceScaleSpaceKernel, SlicedWassersteinDistance,\
+ SlicedWassersteinKernel, PersistenceFisherKernel, WassersteinDistance)
+
def test_representations_examples():
# Disable graphics for testing purposes
@@ -91,10 +105,66 @@ def test_dummy_atol():
from gudhi.representations.vector_methods import BettiCurve
-
def test_infinity():
a = np.array([[1.0, 8.0], [2.0, np.inf], [3.0, 4.0]])
c = BettiCurve(20, [0.0, 10.0])(a)
assert c[1] == 0
assert c[7] == 3
assert c[9] == 2
+
+def test_preprocessing_empty_diagrams():
+ empty_diag = np.empty(shape = [0, 2])
+ assert not np.any(BirthPersistenceTransform()(empty_diag))
+ assert not np.any(Clamping().fit_transform(empty_diag))
+ assert not np.any(DiagramScaler()(empty_diag))
+ assert not np.any(Padding()(empty_diag))
+ assert not np.any(ProminentPoints()(empty_diag))
+ assert not np.any(DiagramSelector()(empty_diag))
+
+def pow(n):
+ return lambda x: np.power(x[1]-x[0],n)
+
+def test_vectorization_empty_diagrams():
+ empty_diag = np.empty(shape = [0, 2])
+ random_resolution = random.randint(50,100)*10 # between 500 and 1000
+ print("resolution = ", random_resolution)
+ lsc = Landscape(resolution=random_resolution)(empty_diag)
+ assert not np.any(lsc)
+ assert lsc.shape[0]%random_resolution == 0
+ slt = Silhouette(resolution=random_resolution, weight=pow(2))(empty_diag)
+ assert not np.any(slt)
+ assert slt.shape[0] == random_resolution
+ btc = BettiCurve(resolution=random_resolution)(empty_diag)
+ assert not np.any(btc)
+ assert btc.shape[0] == random_resolution
+ cpp = ComplexPolynomial(threshold=random_resolution, polynomial_type="T")(empty_diag)
+ assert not np.any(cpp)
+ assert cpp.shape[0] == random_resolution
+ tpv = TopologicalVector(threshold=random_resolution)(empty_diag)
+ assert tpv.shape[0] == random_resolution
+ assert not np.any(tpv)
+ prmg = PersistenceImage(resolution=[random_resolution,random_resolution])(empty_diag)
+ assert not np.any(prmg)
+ assert prmg.shape[0] == random_resolution * random_resolution
+ sce = Entropy(mode="scalar", resolution=random_resolution)(empty_diag)
+ assert not np.any(sce)
+ assert sce.shape[0] == 1
+ scv = Entropy(mode="vector", normalized=False, resolution=random_resolution)(empty_diag)
+ assert not np.any(scv)
+ assert scv.shape[0] == random_resolution
+
+def test_kernel_empty_diagrams():
+ empty_diag = np.empty(shape = [0, 2])
+ assert SlicedWassersteinDistance(num_directions=100)(empty_diag, empty_diag) == 0.
+ assert SlicedWassersteinKernel(num_directions=100, bandwidth=1.)(empty_diag, empty_diag) == 1.
+ assert WassersteinDistance(mode="hera", delta=0.0001)(empty_diag, empty_diag) == 0.
+ assert WassersteinDistance(mode="pot")(empty_diag, empty_diag) == 0.
+ assert BottleneckDistance(epsilon=.001)(empty_diag, empty_diag) == 0.
+ assert BottleneckDistance()(empty_diag, empty_diag) == 0.
+# PersistenceWeightedGaussianKernel(bandwidth=1., kernel_approx=None, weight=arctan(1.,1.))(empty_diag, empty_diag)
+# PersistenceWeightedGaussianKernel(kernel_approx=RBFSampler(gamma=1./2, n_components=100000).fit(np.ones([1,2])), weight=arctan(1.,1.))(empty_diag, empty_diag)
+# PersistenceScaleSpaceKernel(bandwidth=1.)(empty_diag, empty_diag)
+# PersistenceScaleSpaceKernel(kernel_approx=RBFSampler(gamma=1./2, n_components=100000).fit(np.ones([1,2])))(empty_diag, empty_diag)
+# PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1.)(empty_diag, empty_diag)
+# PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1., kernel_approx=RBFSampler(gamma=1./2, n_components=100000).fit(np.ones([1,2])))(empty_diag, empty_diag)
+
diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py
index a3eacaa9..31c46213 100755
--- a/src/python/test/test_simplex_tree.py
+++ b/src/python/test/test_simplex_tree.py
@@ -9,6 +9,7 @@
"""
from gudhi import SimplexTree, __GUDHI_USE_EIGEN3
+import numpy as np
import pytest
__author__ = "Vincent Rouvreau"
@@ -404,3 +405,46 @@ def test_boundaries_iterator():
with pytest.raises(RuntimeError):
list(st.get_boundaries([6])) # (6) does not exist
+
+def test_persistence_intervals_in_dimension():
+ # Here is our triangulation of a 2-torus - taken from https://dioscuri-tda.org/Paris_TDA_Tutorial_2021.html
+ # 0-----3-----4-----0
+ # | \ | \ | \ | \ |
+ # | \ | \ | \| \ |
+ # 1-----8-----7-----1
+ # | \ | \ | \ | \ |
+ # | \ | \ | \ | \ |
+ # 2-----5-----6-----2
+ # | \ | \ | \ | \ |
+ # | \ | \ | \ | \ |
+ # 0-----3-----4-----0
+ st = SimplexTree()
+ st.insert([0,1,8])
+ st.insert([0,3,8])
+ st.insert([3,7,8])
+ st.insert([3,4,7])
+ st.insert([1,4,7])
+ st.insert([0,1,4])
+ st.insert([1,2,5])
+ st.insert([1,5,8])
+ st.insert([5,6,8])
+ st.insert([6,7,8])
+ st.insert([2,6,7])
+ st.insert([1,2,7])
+ st.insert([0,2,3])
+ st.insert([2,3,5])
+ st.insert([3,4,5])
+ st.insert([4,5,6])
+ st.insert([0,4,6])
+ st.insert([0,2,6])
+ st.compute_persistence(persistence_dim_max=True)
+
+ H0 = st.persistence_intervals_in_dimension(0)
+ assert np.array_equal(H0, np.array([[ 0., float("inf")]]))
+ H1 = st.persistence_intervals_in_dimension(1)
+ assert np.array_equal(H1, np.array([[ 0., float("inf")], [ 0., float("inf")]]))
+ H2 = st.persistence_intervals_in_dimension(2)
+ assert np.array_equal(H2, np.array([[ 0., float("inf")]]))
+ # Test empty case
+ assert st.persistence_intervals_in_dimension(3).shape == (0, 2)
+ \ No newline at end of file