summaryrefslogtreecommitdiff
path: root/src/python
diff options
context:
space:
mode:
authortlacombe <lacombe1993@gmail.com>2021-04-12 10:37:27 +0200
committertlacombe <lacombe1993@gmail.com>2021-04-12 10:37:27 +0200
commit69341c88c7c7819656c9a9b935fecc3bea50e4af (patch)
tree7fa0646180c04fb32854ca0aaf29d192d5e4118f /src/python
parente94892f972357283e70c7534f84662dfaa21cc3e (diff)
parent7e05e915adc1be285e04eb00d3ab7ba1b797f38d (diff)
merge upstream/master into essential parts
Diffstat (limited to 'src/python')
-rw-r--r--src/python/CMakeLists.txt134
-rw-r--r--src/python/doc/_templates/layout.html53
-rw-r--r--src/python/doc/bottleneck_distance_user.rst4
-rwxr-xr-xsrc/python/doc/conf.py2
-rw-r--r--src/python/doc/cubical_complex_user.rst22
-rw-r--r--src/python/doc/examples.rst33
-rw-r--r--src/python/doc/installation.rst38
-rw-r--r--src/python/doc/persistence_graphical_tools_user.rst11
-rw-r--r--src/python/doc/representations.rst64
-rw-r--r--src/python/doc/representations_sum.inc2
-rw-r--r--src/python/doc/rips_complex_sum.inc22
-rw-r--r--src/python/doc/rips_complex_user.rst6
-rw-r--r--src/python/doc/wasserstein_distance_user.rst7
-rwxr-xr-xsrc/python/example/alpha_complex_diagram_persistence_from_off_file_example.py25
-rwxr-xr-xsrc/python/example/alpha_complex_from_points_example.py2
-rwxr-xr-xsrc/python/example/diagram_vectorizations_distances_kernels.py19
-rwxr-xr-xsrc/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py2
-rwxr-xr-xsrc/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py2
-rwxr-xr-xsrc/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py2
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py2
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py2
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_off_file_example.py2
-rwxr-xr-xsrc/python/example/rips_complex_edge_collapse_example.py62
-rwxr-xr-xsrc/python/example/tangential_complex_plain_homology_from_off_file_example.py2
-rw-r--r--src/python/gudhi/__init__.py.in4
-rw-r--r--src/python/gudhi/persistence_graphical_tools.py22
-rw-r--r--src/python/gudhi/point_cloud/knn.py2
-rw-r--r--src/python/gudhi/representations/vector_methods.py162
-rw-r--r--src/python/gudhi/rips_complex.pyx8
-rw-r--r--src/python/gudhi/simplex_tree.pxd9
-rw-r--r--src/python/gudhi/simplex_tree.pyx146
-rw-r--r--src/python/gudhi/subsampling.pyx23
-rw-r--r--src/python/gudhi/wasserstein/wasserstein.py8
-rw-r--r--src/python/include/Alpha_complex_factory.h9
-rw-r--r--src/python/include/Simplex_tree_interface.h47
-rw-r--r--src/python/include/Subsampling_interface.h10
-rwxr-xr-xsrc/python/test/test_alpha_complex.py15
-rwxr-xr-xsrc/python/test/test_bottleneck_distance.py12
-rwxr-xr-xsrc/python/test/test_representations.py61
-rwxr-xr-xsrc/python/test/test_simplex_tree.py66
-rwxr-xr-xsrc/python/test/test_subsampling.py16
-rwxr-xr-xsrc/python/test/test_wasserstein_with_tensors.py47
42 files changed, 897 insertions, 290 deletions
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 4f26481e..73303a24 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -103,6 +103,9 @@ if(PYTHONINTERP_FOUND)
if(EAGERPY_FOUND)
add_gudhi_debug_info("EagerPy version ${EAGERPY_VERSION}")
endif()
+ if(TENSORFLOW_FOUND)
+ add_gudhi_debug_info("TensorFlow version ${TENSORFLOW_VERSION}")
+ endif()
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_RESULT_OF_USE_DECLTYPE', ")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DBOOST_ALL_NO_LIB', ")
@@ -130,6 +133,10 @@ if(PYTHONINTERP_FOUND)
add_gudhi_debug_info("Eigen3 version ${EIGEN3_VERSION}")
# No problem, even if no CGAL found
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DGUDHI_USE_EIGEN3', ")
+ set(GUDHI_USE_EIGEN3 "True")
+ else (EIGEN3_FOUND)
+ set(GUDHI_USE_EIGEN3 "False")
endif (EIGEN3_FOUND)
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'off_reader', ")
@@ -269,6 +276,7 @@ if(PYTHONINTERP_FOUND)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/setup.py install)")
+ set(GUDHI_PYTHON_PATH_ENV "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}")
# Documentation generation is available through sphinx - requires all modules
# Make it first as sphinx test is by far the longest test which is nice when testing in parallel
if(SPHINX_PATH)
@@ -288,14 +296,14 @@ if(PYTHONINTERP_FOUND)
# sphinx target requires gudhi.so, because conf.py reads gudhi version from it
add_custom_target(sphinx
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
add_test(NAME sphinx_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
# Set missing or not modules
@@ -339,32 +347,30 @@ if(PYTHONINTERP_FOUND)
# Bottleneck and Alpha
add_test(NAME alpha_rips_persistence_bottleneck_distance_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
-f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
- if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
- # Tangential
- add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
- --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
-
- add_gudhi_py_test(test_tangential_complex)
-
- # Witness complex AND Subsampling
- add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
-
- add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
- endif()
+ # Tangential
+ add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/tangential_complex_plain_homology_from_off_file_example.py"
+ --no-diagram -i 2 -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
+
+ add_gudhi_py_test(test_tangential_complex)
+
+ # Witness complex
+ add_test(NAME euclidean_strong_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
+
+ add_test(NAME euclidean_witness_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 1.0 -n 20 -d 2)
# Subsampling
add_gudhi_py_test(test_subsampling)
@@ -374,7 +380,7 @@ if(PYTHONINTERP_FOUND)
# Bottleneck
add_test(NAME bottleneck_basic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/bottleneck_basic_example.py")
if (PYBIND11_FOUND)
@@ -387,26 +393,26 @@ if(PYTHONINTERP_FOUND)
file(COPY ${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1 DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
add_test(NAME cover_complex_nerve_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/nerve_of_a_covering.py"
-f human.off -c 2 -r 10 -g 0.3)
add_test(NAME cover_complex_coordinate_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/coordinate_graph_induced_complex.py"
-f human.off -c 0 -v)
add_test(NAME cover_complex_functional_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/functional_graph_induced_complex.py"
-o lucky_cat.off
-f lucky_cat_PCA1 -v)
add_test(NAME cover_complex_voronoi_gic_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/voronoi_graph_induced_complex.py"
-f human.off -n 700 -v)
@@ -417,15 +423,13 @@ if(PYTHONINTERP_FOUND)
# Alpha
add_test(NAME alpha_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
- if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
- add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
- endif()
+ add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off)
add_gudhi_py_test(test_alpha_complex)
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
@@ -438,38 +442,34 @@ if(PYTHONINTERP_FOUND)
# Cubical
add_test(NAME periodic_cubical_complex_barcode_persistence_from_perseus_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py"
--no-barcode -f ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt)
- if(NUMPY_FOUND)
- add_test(NAME random_cubical_complex_persistence_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
- 10 10 10)
- endif()
+ add_test(NAME random_cubical_complex_persistence_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/random_cubical_complex_persistence_example.py"
+ 10 10 10)
add_gudhi_py_test(test_cubical_complex)
# Rips
- if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
- add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
+ add_test(NAME rips_complex_diagram_persistence_from_distance_matrix_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py"
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/distance_matrix/lower_triangular_distance_matrix.csv -e 12.0 -d 3)
- add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
- --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
- endif()
+ add_test(NAME rips_complex_diagram_persistence_from_off_file_example_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
+ ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_diagram_persistence_from_off_file_example.py
+ --no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -e 0.25 -d 3)
add_test(NAME rips_complex_from_points_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/rips_complex_from_points_example.py)
add_gudhi_py_test(test_rips_complex)
@@ -477,7 +477,7 @@ if(PYTHONINTERP_FOUND)
# Simplex tree
add_test(NAME simplex_tree_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/simplex_tree_example.py)
add_gudhi_py_test(test_simplex_tree)
@@ -486,7 +486,7 @@ if(PYTHONINTERP_FOUND)
# Witness
add_test(NAME witness_complex_from_nearest_landmark_table_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ COMMAND ${CMAKE_COMMAND} -E env "${GUDHI_PYTHON_PATH_ENV}"
${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/example/witness_complex_from_nearest_landmark_table.py)
add_gudhi_py_test(test_witness_complex)
@@ -496,14 +496,20 @@ if(PYTHONINTERP_FOUND)
# Wasserstein
if(OT_FOUND AND PYBIND11_FOUND)
- if(TORCH_FOUND AND EAGERPY_FOUND)
+ # EagerPy dependency because of enable_autodiff=True
+ if(EAGERPY_FOUND)
add_gudhi_py_test(test_wasserstein_distance)
endif()
add_gudhi_py_test(test_wasserstein_barycenter)
endif()
+ if(OT_FOUND)
+ if(TORCH_FOUND AND TENSORFLOW_FOUND AND EAGERPY_FOUND)
+ add_gudhi_py_test(test_wasserstein_with_tensors)
+ endif()
+ endif()
# Representations
- if(SKLEARN_FOUND AND MATPLOTLIB_FOUND)
+ if(SKLEARN_FOUND AND MATPLOTLIB_FOUND AND OT_FOUND AND NOT CGAL_VERSION VERSION_LESS 4.11.0)
add_gudhi_py_test(test_representations)
endif()
diff --git a/src/python/doc/_templates/layout.html b/src/python/doc/_templates/layout.html
index a672a281..cd40a51b 100644
--- a/src/python/doc/_templates/layout.html
+++ b/src/python/doc/_templates/layout.html
@@ -175,58 +175,59 @@
<h1 class="show-for-small-only"><a href="" class="icon-tree"> GUDHI library</a></h1>
</li>
<!-- Remove the class "menu-icon" to get rid of menu icon. Take out "Menu" to just have icon alone -->
- <li class="toggle-topbar menu-icon"><a href="#"><span>Navigation</span></a></li>
+ <li class="toggle-topbar menu-icon"><a href="#"><span>Nav</span></a></li>
</ul>
<section class="top-bar-section">
<ul class="right">
<li class="divider"></li>
- <li><a href="/contact/">Contact</a></li>
+ <li><a href="/contact/">Contact</a></li>
</ul>
<ul class="left">
- <li><a href="/"> <img src="/assets/img/home.png" alt="&nbsp;&nbsp;GUDHI">&nbsp;&nbsp;GUDHI </a></li>
+ <li><a href="/"> <img src="/assets/img/home.png" alt=" GUDHI"> GUDHI </a></li>
<li class="divider"></li>
<li class="has-dropdown">
- <a href="#">Project</a>
+ <a href="#">Project</a>
<ul class="dropdown">
- <li><a href="/people/">People</a></li>
- <li><a href="/keepintouch/">Keep in touch</a></li>
- <li><a href="/partners/">Partners and Funding</a></li>
- <li><a href="/relatedprojects/">Related projects</a></li>
- <li><a href="/theyaretalkingaboutus/">They are talking about us</a></li>
- <li><a href="/inaction/">GUDHI in action</a></li>
+ <li><a href="/people/">People</a></li>
+ <li><a href="/keepintouch/">Keep in touch</a></li>
+ <li><a href="/partners/">Partners and Funding</a></li>
+ <li><a href="/relatedprojects/">Related projects</a></li>
+ <li><a href="/theyaretalkingaboutus/">They are talking about us</a></li>
+ <li><a href="/inaction/">GUDHI in action</a></li>
</ul>
</li>
<li class="divider"></li>
<li class="has-dropdown">
- <a href="#">Download</a>
+ <a href="#">Download</a>
<ul class="dropdown">
- <li><a href="/licensing/">Licensing</a></li>
- <li><a href="https://github.com/GUDHI/gudhi-devel/releases/latest" target="_blank">Get the latest sources</a></li>
- <li><a href="/conda/">Conda package</a></li>
- <li><a href="/dockerfile/">Dockerfile</a></li>
+ <li><a href="/licensing/">Licensing</a></li>
+ <li><a href="https://github.com/GUDHI/gudhi-devel/releases/latest" target="_blank">Get the latest sources</a></li>
+ <li><a href="/conda/">Conda package</a></li>
+ <li><a href="https://pypi.org/project/gudhi/" target="_blank">Pip package</a></li>
+ <li><a href="/dockerfile/">Dockerfile</a></li>
</ul>
</li>
<li class="divider"></li>
<li class="has-dropdown">
- <a href="#">Documentation</a>
+ <a href="#">Documentation</a>
<ul class="dropdown">
- <li><a href="/introduction/">Introduction</a></li>
- <li><a href="https://gudhi.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
- <li><a href="https://gudhi.inria.fr/doc/latest/">C++ documentation</a></li>
- <li><a href="https://gudhi.inria.fr/python/latest/installation.html">Python installation manual</a></li>
- <li><a href="https://gudhi.inria.fr/python/latest/">Python documentation</a></li>
- <li><a href="/utils/">Utilities</a></li>
- <li><a href="/tutorials/">Tutorials</a></li>
+ <li><a href="/introduction/">Introduction</a></li>
+ <li><a href="/doc/latest/installation.html">C++ installation manual</a></li>
+ <li><a href="/doc/latest/">C++ documentation</a></li>
+ <li><a href="/python/latest/installation.html">Python installation manual</a></li>
+ <li><a href="/python/latest/">Python documentation</a></li>
+ <li><a href="/utils/">Utilities</a></li>
+ <li><a href="/tutorials/">Tutorials</a></li>
</ul>
</li>
<li class="divider"></li>
- <li><a href="/interfaces/">Interfaces</a></li>
+ <li><a href="/interfaces/">Interfaces</a></li>
<li class="divider"></li>
</ul>
</section>
</nav>
- </div><!-- /#navigation -->
- <!-- GUDHI website header BEGIN -->
+ </div><!-- /#navigation -->
+ <!-- GUDHI website header END -->
{%- block header %}{% endblock %}
diff --git a/src/python/doc/bottleneck_distance_user.rst b/src/python/doc/bottleneck_distance_user.rst
index 6c6e08d9..7baa76cc 100644
--- a/src/python/doc/bottleneck_distance_user.rst
+++ b/src/python/doc/bottleneck_distance_user.rst
@@ -47,7 +47,7 @@ The following example explains how the distance is computed:
:figclass: align-center
The point (0, 13) is at distance 6.5 from the diagonal and more
- specifically from the point (6.5, 6.5)
+ specifically from the point (6.5, 6.5).
Basic example
@@ -72,6 +72,6 @@ The output is:
.. testoutput::
- Bottleneck distance approximation = 0.81
+ Bottleneck distance approximation = 0.72
Bottleneck distance value = 0.75
diff --git a/src/python/doc/conf.py b/src/python/doc/conf.py
index 3cc5d1d6..b06baf9c 100755
--- a/src/python/doc/conf.py
+++ b/src/python/doc/conf.py
@@ -44,6 +44,8 @@ extensions = [
'sphinx_paramlinks',
]
+bibtex_bibfiles = ['../../biblio/bibliography.bib']
+
todo_include_todos = True
# plot option : do not show hyperlinks (Source code, png, hires.png, pdf)
plot_html_show_source_link = False
diff --git a/src/python/doc/cubical_complex_user.rst b/src/python/doc/cubical_complex_user.rst
index 3fd4e27a..6a211347 100644
--- a/src/python/doc/cubical_complex_user.rst
+++ b/src/python/doc/cubical_complex_user.rst
@@ -47,8 +47,8 @@ be a set of two elements).
For further details and theory of cubical complexes, please consult :cite:`kaczynski2004computational` as well as the
following paper :cite:`peikert2012topological`.
-Data structure.
----------------
+Data structure
+--------------
The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in
:math:`\mathbb{R}^n`. This extra assumption allows for a memory efficient way of storing cubical complexes in a form
@@ -77,8 +77,8 @@ Knowing the sizes of the bitmap, by a series of modulo operation, we can determi
present in the product that gives the cube :math:`C`. In a similar way, we can compute boundary and the coboundary of
each cube. Further details can be found in the literature.
-Input Format.
--------------
+Input Format
+------------
In the current implantation, filtration is given at the maximal cubes, and it is then extended by the lower star
filtration to all cubes. There are a number of constructors that can be used to construct cubical complex by users
@@ -108,8 +108,8 @@ the program output is:
Cubical complex is of dimension 2 - 49 simplices.
-Periodic boundary conditions.
------------------------------
+Periodic boundary conditions
+----------------------------
Often one would like to impose periodic boundary conditions to the cubical complex (cf.
:doc:`periodic_cubical_complex_ref`).
@@ -154,7 +154,13 @@ the program output is:
Periodic cubical complex is of dimension 2 - 42 simplices.
-Examples.
----------
+Examples
+--------
End user programs are available in python/example/ folder.
+
+Tutorial
+--------
+
+This `notebook <https://github.com/GUDHI/TDA-tutorial/blob/master/Tuto-GUDHI-cubical-complexes.ipynb>`_
+explains how to represent sublevels sets of functions using cubical complexes. \ No newline at end of file
diff --git a/src/python/doc/examples.rst b/src/python/doc/examples.rst
index a42227e3..76e5d4c7 100644
--- a/src/python/doc/examples.rst
+++ b/src/python/doc/examples.rst
@@ -7,27 +7,30 @@ Examples
.. only:: builder_html
- * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>`
+ * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
* :download:`alpha_complex_from_points_example.py <../example/alpha_complex_from_points_example.py>`
- * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>`
* :download:`alpha_rips_persistence_bottleneck_distance.py <../example/alpha_rips_persistence_bottleneck_distance.py>`
- * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
- * :download:`alpha_complex_diagram_persistence_from_off_file_example.py <../example/alpha_complex_diagram_persistence_from_off_file_example.py>`
- * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
* :download:`bottleneck_basic_example.py <../example/bottleneck_basic_example.py>`
- * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
- * :download:`plot_simplex_tree_dim012.py <../example/plot_simplex_tree_dim012.py>`
- * :download:`plot_rips_complex.py <../example/plot_rips_complex.py>`
- * :download:`plot_alpha_complex.py <../example/plot_alpha_complex.py>`
- * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>`
+ * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>`
+ * :download:`diagram_vectorizations_distances_kernels.py <../example/diagram_vectorizations_distances_kernels.py>`
* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
* :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
- * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>`
+ * :download:`gudhi_graphical_tools_example.py <../example/gudhi_graphical_tools_example.py>`
+ * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>`
+ * :download:`periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py <../example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py>`
+ * :download:`plot_alpha_complex.py <../example/plot_alpha_complex.py>`
+ * :download:`plot_rips_complex.py <../example/plot_rips_complex.py>`
+ * :download:`plot_simplex_tree_dim012.py <../example/plot_simplex_tree_dim012.py>`
+ * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_correlation_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py>`
* :download:`rips_complex_diagram_persistence_from_distance_matrix_file_example.py <../example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py>`
+ * :download:`rips_complex_diagram_persistence_from_off_file_example.py <../example/rips_complex_diagram_persistence_from_off_file_example.py>`
+ * :download:`rips_complex_edge_collapse_example.py <../example/rips_complex_edge_collapse_example.py>`
+ * :download:`rips_complex_from_points_example.py <../example/rips_complex_from_points_example.py>`
* :download:`rips_persistence_diagram.py <../example/rips_persistence_diagram.py>`
+ * :download:`simplex_tree_example.py <../example/simplex_tree_example.py>`
* :download:`sparse_rips_persistence_diagram.py <../example/sparse_rips_persistence_diagram.py>`
- * :download:`random_cubical_complex_persistence_example.py <../example/random_cubical_complex_persistence_example.py>`
- * :download:`coordinate_graph_induced_complex.py <../example/coordinate_graph_induced_complex.py>`
- * :download:`functional_graph_induced_complex.py <../example/functional_graph_induced_complex.py>`
+ * :download:`tangential_complex_plain_homology_from_off_file_example.py <../example/tangential_complex_plain_homology_from_off_file_example.py>`
* :download:`voronoi_graph_induced_complex.py <../example/voronoi_graph_induced_complex.py>`
- * :download:`nerve_of_a_covering.py <../example/nerve_of_a_covering.py>`
+ * :download:`witness_complex_from_nearest_landmark_table.py <../example/witness_complex_from_nearest_landmark_table.py>`
diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst
index 525ca84e..66efe45a 100644
--- a/src/python/doc/installation.rst
+++ b/src/python/doc/installation.rst
@@ -40,7 +40,7 @@ different, and in particular the `python/` subdirectory is actually `src/python/
there.
The library uses c++14 and requires `Boost <https://www.boost.org/>`_ :math:`\geq` 1.56.0,
-`CMake <https://www.cmake.org/>`_ :math:`\geq` 3.1 to generate makefiles,
+`CMake <https://www.cmake.org/>`_ :math:`\geq` 3.5 to generate makefiles,
`NumPy <http://numpy.org>`_, `Cython <https://www.cython.org/>`_ and
`pybind11 <https://github.com/pybind/pybind11>`_ to compile
the GUDHI Python module.
@@ -65,7 +65,7 @@ one can build the GUDHI Python module, by running the following commands in a te
cd /path-to-gudhi/
mkdir build
cd build/
- cmake ..
+ cmake -DCMAKE_BUILD_TYPE=Release ..
cd python
make
@@ -323,6 +323,35 @@ The following examples require the `Matplotlib <http://matplotlib.org>`_:
* :download:`euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py>`
* :download:`euclidean_witness_complex_diagram_persistence_from_off_file_example.py <../example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py>`
+LaTeX
+~~~~~
+
+If a sufficiently complete LaTeX toolchain is available (including dvipng and ghostscript), the LaTeX option of
+matplotlib is enabled for prettier captions (cf.
+`matplotlib text rendering with LaTeX <https://matplotlib.org/3.3.0/tutorials/text/usetex.html>`_).
+It also requires `type1cm` LaTeX package (not detected by matplotlib).
+
+If you are facing issues with LaTeX rendering, like this one:
+
+.. code-block:: none
+
+ Traceback (most recent call last):
+ File "/usr/lib/python3/dist-packages/matplotlib/texmanager.py", line 302, in _run_checked_subprocess
+ report = subprocess.check_output(command,
+ ...
+ ! LaTeX Error: File `type1cm.sty' not found.
+ ...
+
+This is because the LaTeX package is not installed on your system. On Ubuntu systems you can install texlive-full
+(for all LaTeX packages), or more specific packages like texlive-latex-extra, cm-super.
+
+You can still deactivate LaTeX rendering by saying:
+
+.. code-block:: python
+
+ import gudhi
+ gudhi.persistence_graphical_tools._gudhi_matplotlib_use_tex=False
+
PyKeOps
-------
@@ -365,6 +394,11 @@ mathematics, science, and engineering.
:class:`~gudhi.point_cloud.knn.KNearestNeighbors` can use the Python package
`SciPy <http://scipy.org>`_ as a backend if explicitly requested.
+TensorFlow
+----------
+
+`TensorFlow <https://www.tensorflow.org>`_ is currently only used in some automatic differentiation tests.
+
Bug reports and contributions
*****************************
diff --git a/src/python/doc/persistence_graphical_tools_user.rst b/src/python/doc/persistence_graphical_tools_user.rst
index b5a38eb1..d95b9d2b 100644
--- a/src/python/doc/persistence_graphical_tools_user.rst
+++ b/src/python/doc/persistence_graphical_tools_user.rst
@@ -90,3 +90,14 @@ If you want more information on a specific dimension, for instance:
gudhi.plot_persistence_density(persistence=pers_diag,
dimension=1, legend=True, axes=axes[1])
plt.show()
+
+LaTeX support
+-------------
+
+If you are facing issues with `LaTeX <installation.html#latex>`_ rendering, you can still deactivate LaTeX rendering by
+saying:
+
+.. code-block:: python
+
+ import gudhi
+ gudhi.persistence_graphical_tools._gudhi_matplotlib_use_tex=False
diff --git a/src/python/doc/representations.rst b/src/python/doc/representations.rst
index 041e3247..b0477197 100644
--- a/src/python/doc/representations.rst
+++ b/src/python/doc/representations.rst
@@ -12,11 +12,45 @@ This module, originally available at https://github.com/MathieuCarriere/sklearn-
A diagram is represented as a numpy array of shape (n,2), as can be obtained from :func:`~gudhi.SimplexTree.persistence_intervals_in_dimension` for instance. Points at infinity are represented as a numpy array of shape (n,1), storing only the birth time. The classes in this module can handle several persistence diagrams at once. In that case, the diagrams are provided as a list of numpy arrays. Note that it is not necessary for the diagrams to have the same number of points, i.e., for the corresponding arrays to have the same number of rows: all classes can handle arrays with different shapes.
-A small example is provided
+Examples
+--------
-.. only:: builder_html
+Landscapes
+^^^^^^^^^^
- * :download:`diagram_vectorizations_distances_kernels.py <../example/diagram_vectorizations_distances_kernels.py>`
+This example computes the first two Landscapes associated to a persistence diagram with four points. The landscapes are evaluated on ten samples, leading to two vectors with ten coordinates each, that are eventually concatenated in order to produce a single vector representation.
+
+.. testcode::
+
+ import numpy as np
+ from gudhi.representations import Landscape
+ # A single diagram with 4 points
+ D = np.array([[0.,4.],[1.,2.],[3.,8.],[6.,8.]])
+ diags = [D]
+ l=Landscape(num_landscapes=2,resolution=10).fit_transform(diags)
+ print(l)
+
+The output is:
+
+.. testoutput::
+
+ [[1.02851895 2.05703791 2.57129739 1.54277843 0.89995409 1.92847304
+ 2.95699199 3.08555686 2.05703791 1.02851895 0. 0.64282435
+ 0. 0. 0.51425948 0. 0. 0.
+ 0.77138922 1.02851895]]
+
+Various kernels
+^^^^^^^^^^^^^^^
+
+This small example is also provided
+:download:`diagram_vectorizations_distances_kernels.py <../example/diagram_vectorizations_distances_kernels.py>`
+
+Machine Learning and Topological Data Analysis
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This `notebook <https://github.com/GUDHI/TDA-tutorial/blob/master/Tuto-GUDHI-representations.ipynb>`_ explains how to
+efficiently combine machine learning and topological data analysis with the
+:doc:`representations module<representations>`.
Preprocessing
@@ -46,27 +80,3 @@ Metrics
:members:
:special-members:
:show-inheritance:
-
-Basic example
--------------
-
-This example computes the first two Landscapes associated to a persistence diagram with four points. The landscapes are evaluated on ten samples, leading to two vectors with ten coordinates each, that are eventually concatenated in order to produce a single vector representation.
-
-.. testcode::
-
- import numpy as np
- from gudhi.representations import Landscape
- # A single diagram with 4 points
- D = np.array([[0.,4.],[1.,2.],[3.,8.],[6.,8.]])
- diags = [D]
- l=Landscape(num_landscapes=2,resolution=10).fit_transform(diags)
- print(l)
-
-The output is:
-
-.. testoutput::
-
- [[1.02851895 2.05703791 2.57129739 1.54277843 0.89995409 1.92847304
- 2.95699199 3.08555686 2.05703791 1.02851895 0. 0.64282435
- 0. 0. 0.51425948 0. 0. 0.
- 0.77138922 1.02851895]]
diff --git a/src/python/doc/representations_sum.inc b/src/python/doc/representations_sum.inc
index 323a0920..4298aea9 100644
--- a/src/python/doc/representations_sum.inc
+++ b/src/python/doc/representations_sum.inc
@@ -2,7 +2,7 @@
:widths: 30 40 30
+------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+
- | .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière |
+ | .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière, Martin Royer |
| img/sklearn-tda.png | diagrams, compatible with scikit-learn. | |
| | | :Since: GUDHI 3.1.0 |
| | | |
diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc
index c123ea2a..2cb24990 100644
--- a/src/python/doc/rips_complex_sum.inc
+++ b/src/python/doc/rips_complex_sum.inc
@@ -1,14 +1,14 @@
.. table::
:widths: 30 40 30
- +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+
- | .. figure:: | The Vietoris-Rips complex is a simplicial complex built as the | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse |
- | ../../doc/Rips_complex/rips_complex_representation.png | clique-complex of a proximity graph. | |
- | :figclass: align-center | | :Since: GUDHI 2.0.0 |
- | | We also provide sparse approximations, to speed-up the computation | |
- | | of persistent homology, and weighted versions, which are more robust | :License: MIT |
- | | to outliers. | |
- | | | |
- +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------+
- | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` |
- +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+
+ | .. figure:: | The Vietoris-Rips complex is a simplicial complex built as the | :Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse, Yuichi Ike |
+ | ../../doc/Rips_complex/rips_complex_representation.png | clique-complex of a proximity graph. | |
+ | :figclass: align-center | | :Since: GUDHI 2.0.0 |
+ | | We also provide sparse approximations, to speed-up the computation | |
+ | | of persistent homology, and weighted versions, which are more robust | :License: MIT |
+ | | to outliers. | |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+
+ | * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` |
+ +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst
index 6048cc4e..27d218d4 100644
--- a/src/python/doc/rips_complex_user.rst
+++ b/src/python/doc/rips_complex_user.rst
@@ -7,9 +7,9 @@ Rips complex user manual
Definition
----------
-==================================================================== ================================ ======================
-:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse :Since: GUDHI 2.0.0 :License: GPL v3
-==================================================================== ================================ ======================
+================================================================================ ================================ ======================
+:Authors: Clément Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse, Yuichi Ike :Since: GUDHI 2.0.0 :License: GPL v3
+================================================================================ ================================ ======================
+-------------------------------------------+----------------------------------------------------------------------+
| :doc:`rips_complex_user` | :doc:`rips_complex_ref` |
diff --git a/src/python/doc/wasserstein_distance_user.rst b/src/python/doc/wasserstein_distance_user.rst
index d747344b..b3d17495 100644
--- a/src/python/doc/wasserstein_distance_user.rst
+++ b/src/python/doc/wasserstein_distance_user.rst
@@ -191,3 +191,10 @@ The output is:
[[0.27916667 0.55416667]
[0.7375 0.7625 ]
[0.2375 0.2625 ]]
+
+Tutorial
+********
+
+This
+`notebook <https://github.com/GUDHI/TDA-tutorial/blob/master/Tuto-GUDHI-Barycenters-of-persistence-diagrams.ipynb>`_
+presents the concept of barycenter, or Fréchet mean, of a family of persistence diagrams. \ No newline at end of file
diff --git a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
index 727af4fa..fe03be31 100755
--- a/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/alpha_complex_diagram_persistence_from_off_file_example.py
@@ -3,7 +3,6 @@
import argparse
import errno
import os
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
@@ -26,12 +25,12 @@ parser = argparse.ArgumentParser(
description="AlphaComplex creation from " "points read in a OFF file.",
epilog="Example: "
"example/alpha_complex_diagram_persistence_from_off_file_example.py "
- "-f ../data/points/tore3D_300.off -a 0.6"
+ "-f ../data/points/tore3D_300.off"
"- Constructs a alpha complex with the "
"points from the given OFF file.",
)
parser.add_argument("-f", "--file", type=str, required=True)
-parser.add_argument("-a", "--max_alpha_square", type=float, default=0.5)
+parser.add_argument("-a", "--max_alpha_square", type=float, required=False)
parser.add_argument("-b", "--band", type=float, default=0.0)
parser.add_argument(
"--no-diagram",
@@ -48,23 +47,23 @@ with open(args.file, "r") as f:
print("##############################################################")
print("AlphaComplex creation from points read in a OFF file")
- message = "AlphaComplex with max_edge_length=" + repr(args.max_alpha_square)
- print(message)
-
alpha_complex = gudhi.AlphaComplex(off_file=args.file)
- simplex_tree = alpha_complex.create_simplex_tree(
- max_alpha_square=args.max_alpha_square
- )
+ if args.max_alpha_square is not None:
+ print("with max_edge_length=", args.max_alpha_square)
+ simplex_tree = alpha_complex.create_simplex_tree(
+ max_alpha_square=args.max_alpha_square
+ )
+ else:
+ simplex_tree = alpha_complex.create_simplex_tree()
- message = "Number of simplices=" + repr(simplex_tree.num_simplices())
- print(message)
+ print("Number of simplices=", simplex_tree.num_simplices())
diag = simplex_tree.persistence()
- print("betti_numbers()=")
- print(simplex_tree.betti_numbers())
+ print("betti_numbers()=", simplex_tree.betti_numbers())
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
diff --git a/src/python/example/alpha_complex_from_points_example.py b/src/python/example/alpha_complex_from_points_example.py
index 465632eb..5d5ca66a 100755
--- a/src/python/example/alpha_complex_from_points_example.py
+++ b/src/python/example/alpha_complex_from_points_example.py
@@ -19,7 +19,7 @@ __license__ = "MIT"
print("#####################################################################")
print("AlphaComplex creation from points")
alpha_complex = AlphaComplex(points=[[0, 0], [1, 0], [0, 1], [1, 1]])
-simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=60.0)
+simplex_tree = alpha_complex.create_simplex_tree()
if simplex_tree.find([0, 1]):
print("[0, 1] Found !!")
diff --git a/src/python/example/diagram_vectorizations_distances_kernels.py b/src/python/example/diagram_vectorizations_distances_kernels.py
index c4a71a7a..2801576e 100755
--- a/src/python/example/diagram_vectorizations_distances_kernels.py
+++ b/src/python/example/diagram_vectorizations_distances_kernels.py
@@ -5,11 +5,11 @@ import numpy as np
from sklearn.kernel_approximation import RBFSampler
from sklearn.preprocessing import MinMaxScaler
-from gudhi.representations import DiagramSelector, Clamping, Landscape, Silhouette, BettiCurve, ComplexPolynomial,\
+from gudhi.representations import (DiagramSelector, Clamping, Landscape, Silhouette, BettiCurve, ComplexPolynomial,\
TopologicalVector, DiagramScaler, BirthPersistenceTransform,\
PersistenceImage, PersistenceWeightedGaussianKernel, Entropy, \
PersistenceScaleSpaceKernel, SlicedWassersteinDistance,\
- SlicedWassersteinKernel, BottleneckDistance, PersistenceFisherKernel, WassersteinDistance
+ SlicedWassersteinKernel, PersistenceFisherKernel, WassersteinDistance)
D1 = np.array([[0.,4.],[1.,2.],[3.,8.],[6.,8.], [0., np.inf], [5., np.inf]])
@@ -93,14 +93,21 @@ print("SW distance is " + str(sW(D1, D2)))
SW = SlicedWassersteinKernel(num_directions=100, bandwidth=1.)
print("SW kernel is " + str(SW(D1, D2)))
-W = WassersteinDistance(order=2, internal_p=2, mode="pot")
-print("Wasserstein distance (POT) is " + str(W(D1, D2)))
+try:
+ W = WassersteinDistance(order=2, internal_p=2, mode="pot")
+ print("Wasserstein distance (POT) is " + str(W(D1, D2)))
+except ImportError:
+ print("WassersteinDistance (POT) is not available, you may be missing pot.")
W = WassersteinDistance(order=2, internal_p=2, mode="hera", delta=0.0001)
print("Wasserstein distance (hera) is " + str(W(D1, D2)))
-W = BottleneckDistance(epsilon=.001)
-print("Bottleneck distance is " + str(W(D1, D2)))
+try:
+ from gudhi.representations import BottleneckDistance
+ W = BottleneckDistance(epsilon=.001)
+ print("Bottleneck distance is " + str(W(D1, D2)))
+except ImportError:
+ print("BottleneckDistance is not available, you may be missing CGAL.")
PF = PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1.)
print("PF kernel is " + str(PF(D1, D2)))
diff --git a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
index e1e572df..4e97cfe3 100755
--- a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -3,7 +3,6 @@
import argparse
import errno
import os
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
@@ -82,6 +81,7 @@ with open(args.file, "r") as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
diff --git a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
index 58cb2bb5..29076c74 100755
--- a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -3,7 +3,6 @@
import argparse
import errno
import os
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
@@ -79,6 +78,7 @@ with open(args.file, "r") as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
diff --git a/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
index 499171df..ee3290c6 100755
--- a/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
+++ b/src/python/example/periodic_cubical_complex_barcode_persistence_from_perseus_file_example.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python
import argparse
-import matplotlib.pyplot as plot
import errno
import os
import gudhi
@@ -75,6 +74,7 @@ if is_file_perseus(args.file):
print("betti_numbers()=")
print(periodic_cubical_complex.betti_numbers())
if args.no_barcode == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_barcode(diag)
plot.show()
else:
diff --git a/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
index 1acb187c..ea2eb7e1 100755
--- a/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
+++ b/src/python/example/rips_complex_diagram_persistence_from_correlation_matrix_file_example.py
@@ -2,7 +2,6 @@
import sys
import argparse
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
@@ -84,5 +83,6 @@ invert_diag = [
]
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(invert_diag, band=args.band)
plot.show()
diff --git a/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
index 79ccca96..236d085d 100755
--- a/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
+++ b/src/python/example/rips_complex_diagram_persistence_from_distance_matrix_file_example.py
@@ -1,7 +1,6 @@
#!/usr/bin/env python
import argparse
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
@@ -60,5 +59,6 @@ print("betti_numbers()=")
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
diff --git a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
index 6f992508..e80233a9 100755
--- a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -3,7 +3,6 @@
import argparse
import errno
import os
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
@@ -70,6 +69,7 @@ with open(args.file, "r") as f:
print(simplex_tree.betti_numbers())
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
diff --git a/src/python/example/rips_complex_edge_collapse_example.py b/src/python/example/rips_complex_edge_collapse_example.py
new file mode 100755
index 00000000..b26eb9fc
--- /dev/null
+++ b/src/python/example/rips_complex_edge_collapse_example.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+import gudhi
+import matplotlib.pyplot as plt
+import time
+
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2020 Inria"
+__license__ = "MIT"
+
+
+print("#####################################################################")
+print("RipsComplex (only the one-skeleton) creation from tore3D_300.off file")
+
+off_file = gudhi.__root_source_dir__ + '/data/points/tore3D_300.off'
+point_cloud = gudhi.read_points_from_off_file(off_file = off_file)
+rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=12.0)
+simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
+print('1. Rips complex is of dimension ', simplex_tree.dimension(), ' - ',
+ simplex_tree.num_simplices(), ' simplices - ',
+ simplex_tree.num_vertices(), ' vertices.')
+
+# Expansion of this one-skeleton would require a lot of memory. Let's collapse it
+start = time.process_time()
+simplex_tree.collapse_edges()
+print('2. Rips complex is of dimension ', simplex_tree.dimension(), ' - ',
+ simplex_tree.num_simplices(), ' simplices - ',
+ simplex_tree.num_vertices(), ' vertices.')
+simplex_tree.expansion(3)
+diag = simplex_tree.persistence()
+print("Collapse, expansion and persistence computation took ", time.process_time() - start, " sec.")
+
+# Use subplots to display diagram and density side by side
+fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(12, 5))
+gudhi.plot_persistence_diagram(diag, axes=axes[0])
+axes[0].set_title("Persistence after 1 collapse")
+
+# Collapse can be performed several times. Let's collapse it 3 times
+start = time.process_time()
+simplex_tree.collapse_edges(nb_iterations = 3)
+print('3. Rips complex is of dimension ', simplex_tree.dimension(), ' - ',
+ simplex_tree.num_simplices(), ' simplices - ',
+ simplex_tree.num_vertices(), ' vertices.')
+simplex_tree.expansion(3)
+diag = simplex_tree.persistence()
+print("Collapse, expansion and persistence computation took ", time.process_time() - start, " sec.")
+
+gudhi.plot_persistence_diagram(diag, axes=axes[1])
+axes[1].set_title("Persistence after 3 more collapses")
+
+# Plot the 2 persistence diagrams side to side to check the persistence is the same
+plt.show() \ No newline at end of file
diff --git a/src/python/example/tangential_complex_plain_homology_from_off_file_example.py b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
index 85bade4a..a4b4e9f5 100755
--- a/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
+++ b/src/python/example/tangential_complex_plain_homology_from_off_file_example.py
@@ -3,7 +3,6 @@
import argparse
import errno
import os
-import matplotlib.pyplot as plot
import gudhi
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ -
@@ -62,6 +61,7 @@ with open(args.file, "r") as f:
print(st.betti_numbers())
if args.no_diagram == False:
+ import matplotlib.pyplot as plot
gudhi.plot_persistence_diagram(diag, band=args.band)
plot.show()
else:
diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in
index 79e12fbc..3043201a 100644
--- a/src/python/gudhi/__init__.py.in
+++ b/src/python/gudhi/__init__.py.in
@@ -23,6 +23,10 @@ __all__ = [@GUDHI_PYTHON_MODULES@ @GUDHI_PYTHON_MODULES_EXTRA@]
__available_modules = ''
__missing_modules = ''
+# For unitary tests purpose
+# could use "if 'collapse_edges' in gudhi.__all__" when collapse edges will have a python module
+__GUDHI_USE_EIGEN3 = @GUDHI_USE_EIGEN3@
+
# Try to import * from gudhi.__module_name for default modules.
# Extra modules require an explicit import by the user (mostly because of
# unusual dependencies, but also to avoid cluttering namespace gudhi and
diff --git a/src/python/gudhi/persistence_graphical_tools.py b/src/python/gudhi/persistence_graphical_tools.py
index c6766c70..848dc03e 100644
--- a/src/python/gudhi/persistence_graphical_tools.py
+++ b/src/python/gudhi/persistence_graphical_tools.py
@@ -20,6 +20,7 @@ __author__ = "Vincent Rouvreau, Bertrand Michel, Theo Lacombe"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
+_gudhi_matplotlib_use_tex = True
def __min_birth_max_death(persistence, band=0.0):
"""This function returns (min_birth, max_death) from the persistence.
@@ -117,10 +118,13 @@ def plot_persistence_barcode(
try:
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
- if _matplotlib_can_use_tex():
- from matplotlib import rc
+ from matplotlib import rc
+ if _gudhi_matplotlib_use_tex and _matplotlib_can_use_tex():
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
+ else:
+ plt.rc('text', usetex=False)
+ plt.rc('font', family='DejaVu Sans')
if persistence_file != "":
if path.isfile(persistence_file):
@@ -263,10 +267,13 @@ def plot_persistence_diagram(
try:
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
- if _matplotlib_can_use_tex():
- from matplotlib import rc
+ from matplotlib import rc
+ if _gudhi_matplotlib_use_tex and _matplotlib_can_use_tex():
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
+ else:
+ plt.rc('text', usetex=False)
+ plt.rc('font', family='DejaVu Sans')
if persistence_file != "":
if path.isfile(persistence_file):
@@ -436,10 +443,13 @@ def plot_persistence_density(
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from scipy.stats import kde
- if _matplotlib_can_use_tex():
- from matplotlib import rc
+ from matplotlib import rc
+ if _gudhi_matplotlib_use_tex and _matplotlib_can_use_tex():
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
+ else:
+ plt.rc('text', usetex=False)
+ plt.rc('font', family='DejaVu Sans')
if persistence_file != "":
if dimension is None:
diff --git a/src/python/gudhi/point_cloud/knn.py b/src/python/gudhi/point_cloud/knn.py
index 4652fe80..994be3b6 100644
--- a/src/python/gudhi/point_cloud/knn.py
+++ b/src/python/gudhi/point_cloud/knn.py
@@ -46,7 +46,7 @@ class KNearestNeighbors:
sort_results (bool): if True, then distances and indices of each point are
sorted on return, so that the first column contains the closest points.
Otherwise, neighbors are returned in an arbitrary order. Defaults to True.
- enable_autodiff (bool): if the input is a torch.tensor, jax.numpy.ndarray or tensorflow.Tensor, this
+ enable_autodiff (bool): if the input is a torch.tensor or tensorflow.Tensor, this
instructs the function to compute distances in a way that works with automatic differentiation.
This is experimental, not supported for all metrics, and requires the package EagerPy.
Defaults to False.
diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py
index 46fee086..84bc99a2 100644
--- a/src/python/gudhi/representations/vector_methods.py
+++ b/src/python/gudhi/representations/vector_methods.py
@@ -1,16 +1,17 @@
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Mathieu Carrière
+# Author(s): Mathieu Carrière, Martin Royer
#
-# Copyright (C) 2018-2019 Inria
+# Copyright (C) 2018-2020 Inria
#
# Modification(s):
-# - YYYY/MM Author: Description of the modification
+# - 2020/06 Martin: ATOL integration
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.preprocessing import MinMaxScaler, MaxAbsScaler
from sklearn.neighbors import DistanceMetric
+from sklearn.metrics import pairwise
from .preprocessing import DiagramScaler, BirthPersistenceTransform
@@ -322,22 +323,15 @@ class BettiCurve(BaseEstimator, TransformerMixin):
Returns:
numpy array with shape (number of diagrams) x (**resolution**): output Betti curves.
"""
- num_diag, Xfit = len(X), []
+ Xfit = []
x_values = np.linspace(self.sample_range[0], self.sample_range[1], self.resolution)
step_x = x_values[1] - x_values[0]
- for i in range(num_diag):
-
- diagram, num_pts_in_diag = X[i], X[i].shape[0]
-
+ for diagram in X:
+ diagram_int = np.clip(np.ceil((diagram[:,:2] - self.sample_range[0]) / step_x), 0, self.resolution).astype(int)
bc = np.zeros(self.resolution)
- for j in range(num_pts_in_diag):
- [px,py] = diagram[j,:2]
- min_idx = np.clip(np.ceil((px - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
- max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
- for k in range(min_idx, max_idx):
- bc[k] += 1
-
+ for interval in diagram_int:
+ bc[interval[0]:interval[1]] += 1
Xfit.append(np.reshape(bc,[1,-1]))
Xfit = np.concatenate(Xfit, 0)
@@ -574,3 +568,141 @@ class ComplexPolynomial(BaseEstimator, TransformerMixin):
numpy array with shape (**threshold**): output complex vector of coefficients.
"""
return self.fit_transform([diag])[0,:]
+
+def _lapl_contrast(measure, centers, inertias):
+ """contrast function for vectorising `measure` in ATOL"""
+ return np.exp(-pairwise.pairwise_distances(measure, Y=centers) / inertias)
+
+def _gaus_contrast(measure, centers, inertias):
+ """contrast function for vectorising `measure` in ATOL"""
+ return np.exp(-pairwise.pairwise_distances(measure, Y=centers, squared=True) / inertias**2)
+
+def _indicator_contrast(diags, centers, inertias):
+ """contrast function for vectorising `measure` in ATOL"""
+ robe_curve = np.clip(2-pairwise.pairwise_distances(diags, Y=centers)/inertias, 0, 1)
+ return robe_curve
+
+def _cloud_weighting(measure):
+ """automatic uniform weighting with mass 1 for `measure` in ATOL"""
+ return np.ones(shape=measure.shape[0])
+
+def _iidproba_weighting(measure):
+ """automatic uniform weighting with mass 1/N for `measure` in ATOL"""
+ return np.ones(shape=measure.shape[0]) / measure.shape[0]
+
+class Atol(BaseEstimator, TransformerMixin):
+ """
+ This class allows to vectorise measures (e.g. point clouds, persistence diagrams, etc) after a quantisation step.
+
+ ATOL paper: :cite:`royer2019atol`
+
+ Example
+ --------
+ >>> from sklearn.cluster import KMeans
+ >>> from gudhi.representations.vector_methods import Atol
+ >>> import numpy as np
+ >>> a = np.array([[1, 2, 4], [1, 4, 0], [1, 0, 4]])
+ >>> b = np.array([[4, 2, 0], [4, 4, 0], [4, 0, 2]])
+ >>> c = np.array([[3, 2, -1], [1, 2, -1]])
+ >>> atol_vectoriser = Atol(quantiser=KMeans(n_clusters=2, random_state=202006))
+ >>> atol_vectoriser.fit(X=[a, b, c]).centers # doctest: +SKIP
+ >>> # array([[ 2. , 0.66666667, 3.33333333],
+ >>> # [ 2.6 , 2.8 , -0.4 ]])
+ >>> atol_vectoriser(a)
+ >>> # array([1.18168665, 0.42375966]) # doctest: +SKIP
+ >>> atol_vectoriser(c)
+ >>> # array([0.02062512, 1.25157463]) # doctest: +SKIP
+ >>> atol_vectoriser.transform(X=[a, b, c]) # doctest: +SKIP
+ >>> # array([[1.18168665, 0.42375966],
+ >>> # [0.29861028, 1.06330156],
+ >>> # [0.02062512, 1.25157463]])
+ """
+ # Note the example above must be up to date with the one in tests called test_atol_doc
+ def __init__(self, quantiser, weighting_method="cloud", contrast="gaussian"):
+ """
+ Constructor for the Atol measure vectorisation class.
+
+ Parameters:
+ quantiser (Object): Object with `fit` (sklearn API consistent) and `cluster_centers` and `n_clusters`
+ attributes, e.g. sklearn.cluster.KMeans. It will be fitted when the Atol object function `fit` is called.
+ weighting_method (string): constant generic function for weighting the measure points
+ choose from {"cloud", "iidproba"}
+ (default: constant function, i.e. the measure is seen as a point cloud by default).
+ This will have no impact if weights are provided along with measures all the way: `fit` and `transform`.
+ contrast (string): constant function for evaluating proximity of a measure with respect to centers
+ choose from {"gaussian", "laplacian", "indicator"}
+ (default: gaussian contrast function, see page 3 in the ATOL paper).
+ """
+ self.quantiser = quantiser
+ self.contrast = {
+ "gaussian": _gaus_contrast,
+ "laplacian": _lapl_contrast,
+ "indicator": _indicator_contrast,
+ }.get(contrast, _gaus_contrast)
+ self.weighting_method = {
+ "cloud" : _cloud_weighting,
+ "iidproba": _iidproba_weighting,
+ }.get(weighting_method, _cloud_weighting)
+
+ def fit(self, X, y=None, sample_weight=None):
+ """
+ Calibration step: fit centers to the sample measures and derive inertias between centers.
+
+ Parameters:
+ X (list N x d numpy arrays): input measures in R^d from which to learn center locations and inertias
+ (measures can have different N).
+ y: Ignored, present for API consistency by convention.
+ sample_weight (list of numpy arrays): weights for each measure point in X, optional.
+ If None, the object's weighting_method will be used.
+
+ Returns:
+ self
+ """
+ if not hasattr(self.quantiser, 'fit'):
+ raise TypeError("quantiser %s has no `fit` attribute." % (self.quantiser))
+ if sample_weight is None:
+ sample_weight = np.concatenate([self.weighting_method(measure) for measure in X])
+
+ measures_concat = np.concatenate(X)
+ self.quantiser.fit(X=measures_concat, sample_weight=sample_weight)
+ self.centers = self.quantiser.cluster_centers_
+ if self.quantiser.n_clusters == 1:
+ dist_centers = pairwise.pairwise_distances(measures_concat)
+ np.fill_diagonal(dist_centers, 0)
+ self.inertias = np.array([np.max(dist_centers)/2])
+ else:
+ dist_centers = pairwise.pairwise_distances(self.centers)
+ dist_centers[dist_centers == 0] = np.inf
+ self.inertias = np.min(dist_centers, axis=0)/2
+ return self
+
+ def __call__(self, measure, sample_weight=None):
+ """
+ Apply measure vectorisation on a single measure.
+
+ Parameters:
+ measure (n x d numpy array): input measure in R^d.
+
+ Returns:
+ numpy array in R^self.quantiser.n_clusters.
+ """
+ if sample_weight is None:
+ sample_weight = self.weighting_method(measure)
+ return np.sum(sample_weight * self.contrast(measure, self.centers, self.inertias.T).T, axis=1)
+
+ def transform(self, X, sample_weight=None):
+ """
+ Apply measure vectorisation on a list of measures.
+
+ Parameters:
+ X (list N x d numpy arrays): input measures in R^d from which to learn center locations and inertias
+ (measures can have different N).
+ sample_weight (list of numpy arrays): weights for each measure point in X, optional.
+ If None, the object's weighting_method will be used.
+
+ Returns:
+ numpy array with shape (number of measures) x (self.quantiser.n_clusters).
+ """
+ if sample_weight is None:
+ sample_weight = [self.weighting_method(measure) for measure in X]
+ return np.stack([self(measure, sample_weight=weight) for measure, weight in zip(X, sample_weight)])
diff --git a/src/python/gudhi/rips_complex.pyx b/src/python/gudhi/rips_complex.pyx
index 72e82c79..c3470292 100644
--- a/src/python/gudhi/rips_complex.pyx
+++ b/src/python/gudhi/rips_complex.pyx
@@ -49,13 +49,13 @@ cdef class RipsComplex:
:type max_edge_length: float
:param points: A list of points in d-Dimension.
- :type points: list of list of double
+ :type points: list of list of float
Or
:param distance_matrix: A distance matrix (full square or lower
triangular).
- :type points: list of list of double
+ :type points: list of list of float
And in both cases
@@ -89,10 +89,10 @@ cdef class RipsComplex:
def create_simplex_tree(self, max_dimension=1):
"""
- :param max_dimension: graph expansion for rips until this given maximal
+ :param max_dimension: graph expansion for Rips until this given maximal
dimension.
:type max_dimension: int
- :returns: A simplex tree created from the Delaunay Triangulation.
+ :returns: A simplex tree encoding the Vietoris–Rips filtration.
:rtype: SimplexTree
"""
stree = SimplexTree()
diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd
index e748ac40..000323af 100644
--- a/src/python/gudhi/simplex_tree.pxd
+++ b/src/python/gudhi/simplex_tree.pxd
@@ -36,6 +36,12 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
Simplex_tree_skeleton_iterator operator++() nogil
bint operator!=(Simplex_tree_skeleton_iterator) nogil
+ cdef cppclass Simplex_tree_boundary_iterator "Gudhi::Simplex_tree_interface<Gudhi::Simplex_tree_options_full_featured>::Boundary_simplex_iterator":
+ Simplex_tree_boundary_iterator() nogil
+ Simplex_tree_simplex_handle& operator*() nogil
+ Simplex_tree_boundary_iterator operator++() nogil
+ bint operator!=(Simplex_tree_boundary_iterator) nogil
+
cdef cppclass Simplex_tree_interface_full_featured "Gudhi::Simplex_tree_interface<Gudhi::Simplex_tree_options_full_featured>":
Simplex_tree() nogil
@@ -57,6 +63,8 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
bool make_filtration_non_decreasing() nogil
void compute_extended_filtration() nogil
vector[vector[pair[int, pair[double, double]]]] compute_extended_persistence_subdiagrams(vector[pair[int, pair[double, double]]] dgm, double min_persistence) nogil
+ Simplex_tree_interface_full_featured* collapse_edges(int nb_collapse_iteration) nogil except +
+ void reset_filtration(double filtration, int dimension) nogil
# Iterators over Simplex tree
pair[vector[int], double] get_simplex_and_filtration(Simplex_tree_simplex_handle f_simplex) nogil
Simplex_tree_simplices_iterator get_simplices_iterator_begin() nogil
@@ -65,6 +73,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
vector[Simplex_tree_simplex_handle].const_iterator get_filtration_iterator_end() nogil
Simplex_tree_skeleton_iterator get_skeleton_iterator_begin(int dimension) nogil
Simplex_tree_skeleton_iterator get_skeleton_iterator_end(int dimension) nogil
+ pair[Simplex_tree_boundary_iterator, Simplex_tree_boundary_iterator] get_boundary_iterators(vector[int] simplex) nogil except +
cdef extern from "Persistent_cohomology_interface.h" namespace "Gudhi":
cdef cppclass Simplex_tree_persistence_interface "Gudhi::Persistent_cohomology_interface<Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_full_featured>>":
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
index 20e66d9f..d7991417 100644
--- a/src/python/gudhi/simplex_tree.pyx
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -69,7 +69,7 @@ cdef class SimplexTree:
this simplicial complex, or +infinity if it is not in the complex.
:param simplex: The N-simplex, represented by a list of vertex.
- :type simplex: list of int.
+ :type simplex: list of int
:returns: The simplicial complex filtration value.
:rtype: float
"""
@@ -80,7 +80,7 @@ cdef class SimplexTree:
given N-simplex.
:param simplex: The N-simplex, represented by a list of vertex.
- :type simplex: list of int.
+ :type simplex: list of int
:param filtration: The new filtration value.
:type filtration: float
@@ -153,7 +153,7 @@ cdef class SimplexTree:
"""This function sets the dimension of the simplicial complex.
:param dimension: The new dimension value.
- :type dimension: int.
+ :type dimension: int
.. note::
@@ -172,7 +172,7 @@ cdef class SimplexTree:
complex or not.
:param simplex: The N-simplex to find, represented by a list of vertex.
- :type simplex: list of int.
+ :type simplex: list of int
:returns: true if the simplex was found, false otherwise.
:rtype: bool
"""
@@ -186,9 +186,9 @@ cdef class SimplexTree:
:param simplex: The N-simplex to insert, represented by a list of
vertex.
- :type simplex: list of int.
+ :type simplex: list of int
:param filtration: The filtration value of the simplex.
- :type filtration: float.
+ :type filtration: float
:returns: true if the simplex was not yet in the complex, false
otherwise (whatever its original filtration value).
:rtype: bool
@@ -228,7 +228,7 @@ cdef class SimplexTree:
"""This function returns a generator with the (simplices of the) skeleton of a maximum given dimension.
:param dimension: The skeleton dimension value.
- :type dimension: int.
+ :type dimension: int
:returns: The (simplices of the) skeleton of a maximum dimension.
:rtype: generator with tuples(simplex, filtration)
"""
@@ -243,7 +243,7 @@ cdef class SimplexTree:
"""This function returns the star of a given N-simplex.
:param simplex: The N-simplex, represented by a list of vertex.
- :type simplex: list of int.
+ :type simplex: list of int
:returns: The (simplices of the) star of a simplex.
:rtype: list of tuples(simplex, filtration)
"""
@@ -265,10 +265,10 @@ cdef class SimplexTree:
given codimension.
:param simplex: The N-simplex, represented by a list of vertex.
- :type simplex: list of int.
+ :type simplex: list of int
:param codimension: The codimension. If codimension = 0, all cofaces
are returned (equivalent of get_star function)
- :type codimension: int.
+ :type codimension: int
:returns: The (simplices of the) cofaces of a simplex
:rtype: list of tuples(simplex, filtration)
"""
@@ -285,12 +285,28 @@ cdef class SimplexTree:
ct.append((v, filtered_simplex.second))
return ct
+ def get_boundaries(self, simplex):
+ """This function returns a generator with the boundaries of a given N-simplex.
+ If you do not need the filtration values, the boundary can also be obtained as
+ :code:`itertools.combinations(simplex,len(simplex)-1)`.
+
+ :param simplex: The N-simplex, represented by a list of vertex.
+ :type simplex: list of int.
+ :returns: The (simplices of the) boundary of a simplex
+ :rtype: generator with tuples(simplex, filtration)
+ """
+ cdef pair[Simplex_tree_boundary_iterator, Simplex_tree_boundary_iterator] it = self.get_ptr().get_boundary_iterators(simplex)
+
+ while it.first != it.second:
+ yield self.get_ptr().get_simplex_and_filtration(dereference(it.first))
+ preincrement(it.first)
+
def remove_maximal_simplex(self, simplex):
"""This function removes a given maximal N-simplex from the simplicial
complex.
:param simplex: The N-simplex, represented by a list of vertex.
- :type simplex: list of int.
+ :type simplex: list of int
.. note::
@@ -308,7 +324,7 @@ cdef class SimplexTree:
"""Prune above filtration value given as parameter.
:param filtration: Maximum threshold value.
- :type filtration: float.
+ :type filtration: float
:returns: The filtration modification information.
:rtype: bool
@@ -328,7 +344,7 @@ cdef class SimplexTree:
return self.get_ptr().prune_above_filtration(filtration)
def expansion(self, max_dim):
- """Expands the Simplex_tree containing only its one skeleton
+ """Expands the simplex tree containing only its one skeleton
until dimension max_dim.
The expanded simplicial complex until dimension :math:`d`
@@ -338,11 +354,11 @@ cdef class SimplexTree:
The filtration value assigned to a simplex is the maximal filtration
value of one of its edges.
- The Simplex_tree must contain no simplex of dimension bigger than
+ The simplex tree must contain no simplex of dimension bigger than
1 when calling the method.
:param max_dim: The maximal dimension.
- :type max_dim: int.
+ :type max_dim: int
"""
cdef int maxdim = max_dim
with nogil:
@@ -358,38 +374,54 @@ cdef class SimplexTree:
"""
return self.get_ptr().make_filtration_non_decreasing()
+ def reset_filtration(self, filtration, min_dim = 0):
+ """This function resets the filtration value of all the simplices of dimension at least min_dim. Resets all the
+ simplex tree when `min_dim = 0`.
+ `reset_filtration` may break the filtration property with `min_dim > 0`, and it is the user's responsibility to
+ make it a valid filtration (using a large enough `filt_value`, or calling `make_filtration_non_decreasing`
+ afterwards for instance).
+
+ :param filtration: New threshold value.
+ :type filtration: float.
+ :param min_dim: The minimal dimension. Default value is 0.
+ :type min_dim: int.
+ """
+ self.get_ptr().reset_filtration(filtration, min_dim)
+
def extend_filtration(self):
- """ Extend filtration for computing extended persistence. This function only uses the
- filtration values at the 0-dimensional simplices, and computes the extended persistence
- diagram induced by the lower-star filtration computed with these values.
+ """ Extend filtration for computing extended persistence. This function only uses the filtration values at the
+ 0-dimensional simplices, and computes the extended persistence diagram induced by the lower-star filtration
+ computed with these values.
.. note::
- Note that after calling this function, the filtration
- values are actually modified within the Simplex_tree.
- The function :func:`extended_persistence`
- retrieves the original values.
+ Note that after calling this function, the filtration values are actually modified within the simplex tree.
+ The function :func:`extended_persistence` retrieves the original values.
.. note::
- Note that this code creates an extra vertex internally, so you should make sure that
- the Simplex_tree does not contain a vertex with the largest possible value (i.e., 4294967295).
+ Note that this code creates an extra vertex internally, so you should make sure that the simplex tree does
+ not contain a vertex with the largest possible value (i.e., 4294967295).
+
+ This `notebook <https://github.com/GUDHI/TDA-tutorial/blob/master/Tuto-GUDHI-extended-persistence.ipynb>`_
+ explains how to compute an extension of persistence called extended persistence.
"""
self.get_ptr().compute_extended_filtration()
def extended_persistence(self, homology_coeff_field=11, min_persistence=0):
- """This function retrieves good values for extended persistence, and separate the diagrams
- into the Ordinary, Relative, Extended+ and Extended- subdiagrams.
-
- :param homology_coeff_field: The homology coefficient field. Must be a
- prime number. Default value is 11.
- :type homology_coeff_field: int.
- :param min_persistence: The minimum persistence value (i.e., the absolute value of the difference between the persistence diagram point coordinates) to take into
- account (strictly greater than min_persistence). Default value is
- 0.0.
- Sets min_persistence to -1.0 to see all values.
- :type min_persistence: float.
- :returns: A list of four persistence diagrams in the format described in :func:`persistence`. The first one is Ordinary, the second one is Relative, the third one is Extended+ and the fourth one is Extended-. See https://link.springer.com/article/10.1007/s10208-008-9027-z and/or section 2.2 in https://link.springer.com/article/10.1007/s10208-017-9370-z for a description of these subtypes.
+ """This function retrieves good values for extended persistence, and separate the diagrams into the Ordinary,
+ Relative, Extended+ and Extended- subdiagrams.
+
+ :param homology_coeff_field: The homology coefficient field. Must be a prime number. Default value is 11.
+ :type homology_coeff_field: int
+ :param min_persistence: The minimum persistence value (i.e., the absolute value of the difference between the
+ persistence diagram point coordinates) to take into account (strictly greater than min_persistence).
+ Default value is 0.0. Sets min_persistence to -1.0 to see all values.
+ :type min_persistence: float
+ :returns: A list of four persistence diagrams in the format described in :func:`persistence`. The first one is
+ Ordinary, the second one is Relative, the third one is Extended+ and the fourth one is Extended-.
+ See https://link.springer.com/article/10.1007/s10208-008-9027-z and/or section 2.2 in
+ https://link.springer.com/article/10.1007/s10208-017-9370-z for a description of these subtypes.
.. note::
@@ -400,6 +432,9 @@ cdef class SimplexTree:
The coordinates of the persistence diagram points might be a little different than the
original filtration values due to the internal transformation (scaling to [-2,-1]) that is
performed on these values during the computation of extended persistence.
+
+ This `notebook <https://github.com/GUDHI/TDA-tutorial/blob/master/Tuto-GUDHI-extended-persistence.ipynb>`_
+ explains how to compute an extension of persistence called extended persistence.
"""
cdef vector[pair[int, pair[double, double]]] persistence_result
if self.pcohptr != NULL:
@@ -415,12 +450,12 @@ cdef class SimplexTree:
:param homology_coeff_field: The homology coefficient field. Must be a
prime number. Default value is 11.
- :type homology_coeff_field: int.
+ :type homology_coeff_field: int
:param min_persistence: The minimum persistence value to take into
account (strictly greater than min_persistence). Default value is
0.0.
Set min_persistence to -1.0 to see all values.
- :type min_persistence: float.
+ :type min_persistence: float
:param persistence_dim_max: If true, the persistent homology for the
maximal dimension in the complex is computed. If false, it is
ignored. Default is false.
@@ -438,12 +473,12 @@ cdef class SimplexTree:
:param homology_coeff_field: The homology coefficient field. Must be a
prime number. Default value is 11.
- :type homology_coeff_field: int.
+ :type homology_coeff_field: int
:param min_persistence: The minimum persistence value to take into
account (strictly greater than min_persistence). Default value is
0.0.
Sets min_persistence to -1.0 to see all values.
- :type min_persistence: float.
+ :type min_persistence: float
:param persistence_dim_max: If true, the persistent homology for the
maximal dimension in the complex is computed. If false, it is
ignored. Default is false.
@@ -478,10 +513,10 @@ cdef class SimplexTree:
:param from_value: The persistence birth limit to be added in the
numbers (persistent birth <= from_value).
- :type from_value: float.
+ :type from_value: float
:param to_value: The persistence death limit to be added in the
numbers (persistent death > to_value).
- :type to_value: float.
+ :type to_value: float
:returns: The persistent Betti numbers ([B0, B1, ..., Bn]).
:rtype: list of int
@@ -498,7 +533,7 @@ cdef class SimplexTree:
complex in a specific dimension.
:param dimension: The specific dimension.
- :type dimension: int.
+ :type dimension: int
:returns: The persistence intervals.
:rtype: numpy array of dimension 2
@@ -527,7 +562,7 @@ cdef class SimplexTree:
complex in a user given file name.
:param persistence_file: Name of the file.
- :type persistence_file: string.
+ :type persistence_file: string
:note: intervals_in_dim function requires
:func:`compute_persistence`
@@ -581,3 +616,26 @@ cdef class SimplexTree:
infinite0 = np_array(next(l))
infinites = [np_array(d).reshape(-1,2) for d in l]
return (normal0, normals, infinite0, infinites)
+
+ def collapse_edges(self, nb_iterations = 1):
+ """Assuming the simplex tree is a 1-skeleton graph, this method collapse edges (simplices of higher dimension
+ are ignored) and resets the simplex tree from the remaining edges.
+ A good candidate is to build a simplex tree on top of a :class:`~gudhi.RipsComplex` of dimension 1 before
+ collapsing edges
+ (cf. :download:`rips_complex_edge_collapse_example.py <../example/rips_complex_edge_collapse_example.py>`).
+ For implementation details, please refer to :cite:`edgecollapsesocg2020`.
+
+ :param nb_iterations: The number of edge collapse iterations to perform. Default is 1.
+ :type nb_iterations: int
+
+ :note: collapse_edges method requires `Eigen <installation.html#eigen>`_ >= 3.1.0 and an exception is thrown
+ if this method is not available.
+ """
+ # Backup old pointer
+ cdef Simplex_tree_interface_full_featured* ptr = self.get_ptr()
+ cdef int nb_iter = nb_iterations
+ with nogil:
+ # New pointer is a new collapsed simplex tree
+ self.thisptr = <intptr_t>(ptr.collapse_edges(nb_iter))
+ # Delete old pointer
+ del ptr
diff --git a/src/python/gudhi/subsampling.pyx b/src/python/gudhi/subsampling.pyx
index f77c6f75..46f32335 100644
--- a/src/python/gudhi/subsampling.pyx
+++ b/src/python/gudhi/subsampling.pyx
@@ -33,7 +33,7 @@ def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_poi
The iteration starts with the landmark `starting point`.
:param points: The input point set.
- :type points: Iterable[Iterable[float]].
+ :type points: Iterable[Iterable[float]]
Or
@@ -42,14 +42,15 @@ def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_poi
And in both cases
- :param nb_points: Number of points of the subsample.
- :type nb_points: unsigned.
+ :param nb_points: Number of points of the subsample (the subsample may be \
+ smaller if there are fewer than nb_points distinct input points)
+ :type nb_points: int
:param starting_point: The iteration starts with the landmark `starting \
- point`,which is the index of the point to start with. If not set, this \
+ point`, which is the index of the point to start with. If not set, this \
index is chosen randomly.
- :type starting_point: unsigned.
+ :type starting_point: int
:returns: The subsample point set.
- :rtype: List[List[float]].
+ :rtype: List[List[float]]
"""
if off_file:
if os.path.isfile(off_file):
@@ -76,7 +77,7 @@ def pick_n_random_points(points=None, off_file='', nb_points=0):
"""Subsample a point set by picking random vertices.
:param points: The input point set.
- :type points: Iterable[Iterable[float]].
+ :type points: Iterable[Iterable[float]]
Or
@@ -86,7 +87,7 @@ def pick_n_random_points(points=None, off_file='', nb_points=0):
And in both cases
:param nb_points: Number of points of the subsample.
- :type nb_points: unsigned.
+ :type nb_points: int
:returns: The subsample point set.
:rtype: List[List[float]]
"""
@@ -104,10 +105,10 @@ def pick_n_random_points(points=None, off_file='', nb_points=0):
def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
"""Outputs a subset of the input points so that the squared distance
- between any two points is greater than or equal to min_squared_dist.
+ between any two points is greater than min_squared_dist.
:param points: The input point set.
- :type points: Iterable[Iterable[float]].
+ :type points: Iterable[Iterable[float]]
Or
@@ -118,7 +119,7 @@ def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
:param min_squared_dist: Minimum squared distance separating the output \
points.
- :type min_squared_dist: float.
+ :type min_squared_dist: float
:returns: The subsample point set.
:rtype: List[List[float]]
"""
diff --git a/src/python/gudhi/wasserstein/wasserstein.py b/src/python/gudhi/wasserstein/wasserstein.py
index 142385b1..572d4249 100644
--- a/src/python/gudhi/wasserstein/wasserstein.py
+++ b/src/python/gudhi/wasserstein/wasserstein.py
@@ -202,8 +202,8 @@ def wasserstein_distance(X, Y, matching=False, order=1., internal_p=np.inf, enab
then the optimal matching will be set to `None`.
:param order: exponent for Wasserstein; Default value is 1.
:param internal_p: Ground metric on the (upper-half) plane (i.e. norm L^p in R^2);
- default value is `np.inf`.
- :param enable_autodiff: If X and Y are torch.tensor, tensorflow.Tensor or jax.numpy.ndarray, make the computation
+ Default value is `np.inf`.
+ :param enable_autodiff: If X and Y are torch.tensor or tensorflow.Tensor, make the computation
transparent to automatic differentiation. This requires the package EagerPy and is currently incompatible
with `matching=True` and with `keep_essential_parts=True`.
@@ -306,9 +306,9 @@ def wasserstein_distance(X, Y, matching=False, order=1., internal_p=np.inf, enab
# empty arrays are not handled properly by the helpers, so we avoid calling them
if len(pairs_X_Y):
dists.append((Y_orig[pairs_X_Y[:, 1]] - X_orig[pairs_X_Y[:, 0]]).norms.lp(internal_p, axis=-1).norms.lp(order))
- if len(pairs_X_diag):
+ if len(pairs_X_diag[0]):
dists.append(_perstot_autodiff(X_orig[pairs_X_diag], order, internal_p))
- if len(pairs_Y_diag):
+ if len(pairs_Y_diag[0]):
dists.append(_perstot_autodiff(Y_orig[pairs_Y_diag], order, internal_p))
dists = [dist.reshape(1) for dist in dists]
return ep.concatenate(dists).norms.lp(order).raw
diff --git a/src/python/include/Alpha_complex_factory.h b/src/python/include/Alpha_complex_factory.h
index d699ad9b..3405fdd6 100644
--- a/src/python/include/Alpha_complex_factory.h
+++ b/src/python/include/Alpha_complex_factory.h
@@ -48,11 +48,14 @@ static CgalPointType pt_cython_to_cgal(std::vector<double> const& vec) {
class Abstract_alpha_complex {
public:
virtual std::vector<double> get_point(int vh) = 0;
+
virtual bool create_simplex_tree(Simplex_tree_interface<>* simplex_tree, double max_alpha_square,
bool default_filtration_value) = 0;
+
+ virtual ~Abstract_alpha_complex() = default;
};
-class Exact_Alphacomplex_dD : public Abstract_alpha_complex {
+class Exact_Alphacomplex_dD final : public Abstract_alpha_complex {
private:
using Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>;
using Point = typename Kernel::Point_d;
@@ -78,7 +81,7 @@ class Exact_Alphacomplex_dD : public Abstract_alpha_complex {
Alpha_complex<Kernel> alpha_complex_;
};
-class Inexact_Alphacomplex_dD : public Abstract_alpha_complex {
+class Inexact_Alphacomplex_dD final : public Abstract_alpha_complex {
private:
using Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
using Point = typename Kernel::Point_d;
@@ -104,7 +107,7 @@ class Inexact_Alphacomplex_dD : public Abstract_alpha_complex {
};
template <complexity Complexity>
-class Alphacomplex_3D : public Abstract_alpha_complex {
+class Alphacomplex_3D final : public Abstract_alpha_complex {
private:
using Point = typename Alpha_complex_3d<Complexity, false, false>::Bare_point_3;
diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h
index 56d7c41d..629f6083 100644
--- a/src/python/include/Simplex_tree_interface.h
+++ b/src/python/include/Simplex_tree_interface.h
@@ -15,10 +15,15 @@
#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Points_off_io.h>
+#ifdef GUDHI_USE_EIGEN3
+#include <gudhi/Flag_complex_edge_collapser.h>
+#endif
#include <iostream>
#include <vector>
#include <utility> // std::pair
+#include <tuple>
+#include <iterator> // for std::distance
namespace Gudhi {
@@ -36,6 +41,7 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
using Skeleton_simplex_iterator = typename Base::Skeleton_simplex_iterator;
using Complex_simplex_iterator = typename Base::Complex_simplex_iterator;
using Extended_filtration_data = typename Base::Extended_filtration_data;
+ using Boundary_simplex_iterator = typename Base::Boundary_simplex_iterator;
public:
@@ -157,6 +163,38 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
return new_dgm;
}
+ Simplex_tree_interface* collapse_edges(int nb_collapse_iteration) {
+#ifdef GUDHI_USE_EIGEN3
+ using Filtered_edge = std::tuple<Vertex_handle, Vertex_handle, Filtration_value>;
+ std::vector<Filtered_edge> edges;
+ for (Simplex_handle sh : Base::skeleton_simplex_range(1)) {
+ if (Base::dimension(sh) == 1) {
+ typename Base::Simplex_vertex_range rg = Base::simplex_vertex_range(sh);
+ auto vit = rg.begin();
+ Vertex_handle v = *vit;
+ Vertex_handle w = *++vit;
+ edges.emplace_back(v, w, Base::filtration(sh));
+ }
+ }
+
+ for (int iteration = 0; iteration < nb_collapse_iteration; iteration++) {
+ edges = Gudhi::collapse::flag_complex_collapse_edges(edges);
+ }
+ Simplex_tree_interface* collapsed_stree_ptr = new Simplex_tree_interface();
+ // Copy the original 0-skeleton
+ for (Simplex_handle sh : Base::skeleton_simplex_range(0)) {
+ collapsed_stree_ptr->insert({*(Base::simplex_vertex_range(sh).begin())}, Base::filtration(sh));
+ }
+ // Insert remaining edges
+ for (auto remaining_edge : edges) {
+ collapsed_stree_ptr->insert({std::get<0>(remaining_edge), std::get<1>(remaining_edge)}, std::get<2>(remaining_edge));
+ }
+ return collapsed_stree_ptr;
+#else
+ throw std::runtime_error("Unable to collapse edges as it requires Eigen3 >= 3.1.0.");
+#endif
+ }
+
// Iterator over the simplex tree
Complex_simplex_iterator get_simplices_iterator_begin() {
// this specific case works because the range is just a pair of iterators - won't work if range was a vector
@@ -188,6 +226,15 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
// this specific case works because the range is just a pair of iterators - won't work if range was a vector
return Base::skeleton_simplex_range(dimension).end();
}
+
+ std::pair<Boundary_simplex_iterator, Boundary_simplex_iterator> get_boundary_iterators(const Simplex& simplex) {
+ auto bd_sh = Base::find(simplex);
+ if (bd_sh == Base::null_simplex())
+ throw std::runtime_error("simplex not found - cannot find boundaries");
+ // this specific case works because the range is just a pair of iterators - won't work if range was a vector
+ auto boundary_srange = Base::boundary_simplex_range(bd_sh);
+ return std::make_pair(boundary_srange.begin(), boundary_srange.end());
+ }
};
} // namespace Gudhi
diff --git a/src/python/include/Subsampling_interface.h b/src/python/include/Subsampling_interface.h
index cdda851f..6aee7231 100644
--- a/src/python/include/Subsampling_interface.h
+++ b/src/python/include/Subsampling_interface.h
@@ -11,6 +11,7 @@
#ifndef INCLUDE_SUBSAMPLING_INTERFACE_H_
#define INCLUDE_SUBSAMPLING_INTERFACE_H_
+#include <gudhi/distance_functions.h>
#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/pick_n_random_points.h>
#include <gudhi/sparsify_point_set.h>
@@ -27,14 +28,13 @@ namespace subsampling {
using Subsampling_dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
using Subsampling_point_d = Subsampling_dynamic_kernel::Point_d;
-using Subsampling_ft = Subsampling_dynamic_kernel::FT;
// ------ choose_n_farthest_points ------
std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector<std::vector<double>>& points,
unsigned nb_points) {
std::vector<std::vector<double>> landmarks;
- Subsampling_dynamic_kernel k;
- choose_n_farthest_points(k, points, nb_points, random_starting_point, std::back_inserter(landmarks));
+ choose_n_farthest_points(Euclidean_distance(), points, nb_points,
+ random_starting_point, std::back_inserter(landmarks));
return landmarks;
}
@@ -42,8 +42,8 @@ std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector
std::vector<std::vector<double>> subsampling_n_farthest_points(const std::vector<std::vector<double>>& points,
unsigned nb_points, unsigned starting_point) {
std::vector<std::vector<double>> landmarks;
- Subsampling_dynamic_kernel k;
- choose_n_farthest_points(k, points, nb_points, starting_point, std::back_inserter(landmarks));
+ choose_n_farthest_points(Euclidean_distance(), points, nb_points,
+ starting_point, std::back_inserter(landmarks));
return landmarks;
}
diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py
index a4ee260b..814f8289 100755
--- a/src/python/test/test_alpha_complex.py
+++ b/src/python/test/test_alpha_complex.py
@@ -198,8 +198,7 @@ def test_delaunay_complex():
_delaunay_complex(precision)
def _3d_points_on_a_plane(precision, default_filtration_value):
- alpha = gd.AlphaComplex(off_file=gd.__root_source_dir__ + '/data/points/alphacomplexdoc.off',
- precision = precision)
+ alpha = gd.AlphaComplex(off_file='alphacomplexdoc.off', precision = precision)
simplex_tree = alpha.create_simplex_tree(default_filtration_value = default_filtration_value)
assert simplex_tree.dimension() == 2
@@ -207,6 +206,18 @@ def _3d_points_on_a_plane(precision, default_filtration_value):
assert simplex_tree.num_simplices() == 25
def test_3d_points_on_a_plane():
+ off_file = open("alphacomplexdoc.off", "w")
+ off_file.write("OFF \n" \
+ "7 0 0 \n" \
+ "1.0 1.0 0.0\n" \
+ "7.0 0.0 0.0\n" \
+ "4.0 6.0 0.0\n" \
+ "9.0 6.0 0.0\n" \
+ "0.0 14.0 0.0\n" \
+ "2.0 19.0 0.0\n" \
+ "9.0 17.0 0.0\n" )
+ off_file.close()
+
for default_filtration_value in [True, False]:
for precision in ['fast', 'safe', 'exact']:
_3d_points_on_a_plane(precision, default_filtration_value)
diff --git a/src/python/test/test_bottleneck_distance.py b/src/python/test/test_bottleneck_distance.py
index 6915bea8..07fcc9cc 100755
--- a/src/python/test/test_bottleneck_distance.py
+++ b/src/python/test/test_bottleneck_distance.py
@@ -25,3 +25,15 @@ def test_basic_bottleneck():
assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == pytest.approx(0.75, abs=0.1)
assert gudhi.hera.bottleneck_distance(diag1, diag2, 0) == 0.75
assert gudhi.hera.bottleneck_distance(diag1, diag2, 0.1) == pytest.approx(0.75, rel=0.1)
+
+ import numpy as np
+
+ # Translating both diagrams along the diagonal should not affect the distance,
+ # checks that negative numbers are not an issue
+ diag1 = np.array(diag1) - 100
+ diag2 = np.array(diag2) - 100
+
+ assert gudhi.bottleneck_distance(diag1, diag2) == 0.75
+ assert gudhi.bottleneck_distance(diag1, diag2, 0.1) == pytest.approx(0.75, abs=0.1)
+ assert gudhi.hera.bottleneck_distance(diag1, diag2, 0) == 0.75
+ assert gudhi.hera.bottleneck_distance(diag1, diag2, 0.1) == pytest.approx(0.75, rel=0.1)
diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py
index 589cee00..cda1a15b 100755
--- a/src/python/test/test_representations.py
+++ b/src/python/test/test_representations.py
@@ -4,6 +4,8 @@ import matplotlib.pyplot as plt
import numpy as np
import pytest
+from sklearn.cluster import KMeans
+
def test_representations_examples():
# Disable graphics for testing purposes
@@ -15,6 +17,7 @@ def test_representations_examples():
return None
+from gudhi.representations.vector_methods import Atol
from gudhi.representations.metrics import *
from gudhi.representations.kernel_methods import *
@@ -36,8 +39,62 @@ def test_multiple():
d2 = BottleneckDistance(epsilon=0.00001).fit_transform(l1)
d3 = pairwise_persistence_diagram_distances(l1, l1b, e=0.00001, n_jobs=4)
assert d1 == pytest.approx(d2)
- assert d3 == pytest.approx(d2, abs=1e-5) # Because of 0 entries (on the diagonal)
+ assert d3 == pytest.approx(d2, abs=1e-5) # Because of 0 entries (on the diagonal)
d1 = pairwise_persistence_diagram_distances(l1, l2, metric="wasserstein", order=2, internal_p=2)
d2 = WassersteinDistance(order=2, internal_p=2, n_jobs=4).fit(l2).transform(l1)
print(d1.shape, d2.shape)
- assert d1 == pytest.approx(d2, rel=.02)
+ assert d1 == pytest.approx(d2, rel=0.02)
+
+
+# Test sorted values as points order can be inverted, and sorted test is not documentation-friendly
+# Note the test below must be up to date with the Atol class documentation
+def test_atol_doc():
+ a = np.array([[1, 2, 4], [1, 4, 0], [1, 0, 4]])
+ b = np.array([[4, 2, 0], [4, 4, 0], [4, 0, 2]])
+ c = np.array([[3, 2, -1], [1, 2, -1]])
+
+ atol_vectoriser = Atol(quantiser=KMeans(n_clusters=2, random_state=202006))
+ # Atol will do
+ # X = np.concatenate([a,b,c])
+ # kmeans = KMeans(n_clusters=2, random_state=202006).fit(X)
+ # kmeans.labels_ will be : array([1, 0, 1, 0, 0, 1, 0, 0])
+ first_cluster = np.asarray([a[0], a[2], b[2]])
+ second_cluster = np.asarray([a[1], b[0], b[2], c[0], c[1]])
+
+ # Check the center of the first_cluster and second_cluster are in Atol centers
+ centers = atol_vectoriser.fit(X=[a, b, c]).centers
+ np.isclose(centers, first_cluster.mean(axis=0)).all(1).any()
+ np.isclose(centers, second_cluster.mean(axis=0)).all(1).any()
+
+ vectorization = atol_vectoriser.transform(X=[a, b, c])
+ assert np.allclose(vectorization[0], atol_vectoriser(a))
+ assert np.allclose(vectorization[1], atol_vectoriser(b))
+ assert np.allclose(vectorization[2], atol_vectoriser(c))
+
+
+def test_dummy_atol():
+ a = np.array([[1, 2, 4], [1, 4, 0], [1, 0, 4]])
+ b = np.array([[4, 2, 0], [4, 4, 0], [4, 0, 2]])
+ c = np.array([[3, 2, -1], [1, 2, -1]])
+
+ for weighting_method in ["cloud", "iidproba"]:
+ for contrast in ["gaussian", "laplacian", "indicator"]:
+ atol_vectoriser = Atol(
+ quantiser=KMeans(n_clusters=1, random_state=202006),
+ weighting_method=weighting_method,
+ contrast=contrast,
+ )
+ atol_vectoriser.fit([a, b, c])
+ atol_vectoriser(a)
+ atol_vectoriser.transform(X=[a, b, c])
+
+
+from gudhi.representations.vector_methods import BettiCurve
+
+
+def test_infinity():
+ a = np.array([[1.0, 8.0], [2.0, np.inf], [3.0, 4.0]])
+ c = BettiCurve(20, [0.0, 10.0])(a)
+ assert c[1] == 0
+ assert c[7] == 3
+ assert c[9] == 2
diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py
index 2137d822..a3eacaa9 100755
--- a/src/python/test/test_simplex_tree.py
+++ b/src/python/test/test_simplex_tree.py
@@ -8,7 +8,7 @@
- YYYY/MM Author: Description of the modification
"""
-from gudhi import SimplexTree
+from gudhi import SimplexTree, __GUDHI_USE_EIGEN3
import pytest
__author__ = "Vincent Rouvreau"
@@ -340,3 +340,67 @@ def test_simplices_iterator():
assert st.find(simplex[0]) == True
print("filtration is: ", simplex[1])
assert st.filtration(simplex[0]) == simplex[1]
+
+def test_collapse_edges():
+ st = SimplexTree()
+
+ assert st.insert([0, 1], filtration=1.0) == True
+ assert st.insert([1, 2], filtration=1.0) == True
+ assert st.insert([2, 3], filtration=1.0) == True
+ assert st.insert([0, 3], filtration=1.0) == True
+ assert st.insert([0, 2], filtration=2.0) == True
+ assert st.insert([1, 3], filtration=2.0) == True
+
+ assert st.num_simplices() == 10
+
+ if __GUDHI_USE_EIGEN3:
+ st.collapse_edges()
+ assert st.num_simplices() == 9
+ assert st.find([1, 3]) == False
+ for simplex in st.get_skeleton(0):
+ assert simplex[1] == 1.
+ else:
+ # If no Eigen3, collapse_edges throws an exception
+ with pytest.raises(RuntimeError):
+ st.collapse_edges()
+
+def test_reset_filtration():
+ st = SimplexTree()
+
+ assert st.insert([0, 1, 2], 3.) == True
+ assert st.insert([0, 3], 2.) == True
+ assert st.insert([3, 4, 5], 3.) == True
+ assert st.insert([0, 1, 6, 7], 4.) == True
+
+ # Guaranteed by construction
+ for simplex in st.get_simplices():
+ assert st.filtration(simplex[0]) >= 2.
+
+ # dimension until 5 even if simplex tree is of dimension 3 to test the limits
+ for dimension in range(5, -1, -1):
+ st.reset_filtration(0., dimension)
+ for simplex in st.get_skeleton(3):
+ print(simplex)
+ if len(simplex[0]) < (dimension) + 1:
+ assert st.filtration(simplex[0]) >= 2.
+ else:
+ assert st.filtration(simplex[0]) == 0.
+
+def test_boundaries_iterator():
+ st = SimplexTree()
+
+ assert st.insert([0, 1, 2, 3], filtration=1.0) == True
+ assert st.insert([1, 2, 3, 4], filtration=2.0) == True
+
+ assert list(st.get_boundaries([1, 2, 3])) == [([1, 2], 1.0), ([1, 3], 1.0), ([2, 3], 1.0)]
+ assert list(st.get_boundaries([2, 3, 4])) == [([2, 3], 1.0), ([2, 4], 2.0), ([3, 4], 2.0)]
+ assert list(st.get_boundaries([2])) == []
+
+ with pytest.raises(RuntimeError):
+ list(st.get_boundaries([]))
+
+ with pytest.raises(RuntimeError):
+ list(st.get_boundaries([0, 4])) # (0, 4) does not exist
+
+ with pytest.raises(RuntimeError):
+ list(st.get_boundaries([6])) # (6) does not exist
diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py
index 31f64e32..4019852e 100755
--- a/src/python/test/test_subsampling.py
+++ b/src/python/test/test_subsampling.py
@@ -141,12 +141,16 @@ def test_simple_sparsify_points():
# assert gudhi.sparsify_point_set(points = [], min_squared_dist = 0.0) == []
# assert gudhi.sparsify_point_set(points = [], min_squared_dist = 10.0) == []
assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=0.0) == point_set
- assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.0) == point_set
- assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.0) == [
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=0.999) == point_set
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.001) == [
[0, 1],
[1, 0],
]
- assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.01) == [[0, 1]]
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=1.999) == [
+ [0, 1],
+ [1, 0],
+ ]
+ assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.001) == [[0, 1]]
assert (
len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0))
@@ -157,11 +161,11 @@ def test_simple_sparsify_points():
== 5
)
assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.0))
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.1))
== 4
)
assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=90.0))
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=89.9))
== 3
)
assert (
@@ -169,7 +173,7 @@ def test_simple_sparsify_points():
== 2
)
assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.0))
+ len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=324.9))
== 2
)
assert (
diff --git a/src/python/test/test_wasserstein_with_tensors.py b/src/python/test/test_wasserstein_with_tensors.py
new file mode 100755
index 00000000..e3f1411a
--- /dev/null
+++ b/src/python/test/test_wasserstein_with_tensors.py
@@ -0,0 +1,47 @@
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Mathieu Carriere
+
+ Copyright (C) 2020 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+from gudhi.wasserstein import wasserstein_distance as pot
+import numpy as np
+import torch
+import tensorflow as tf
+
+def test_wasserstein_distance_grad():
+ diag1 = torch.tensor([[2.7, 3.7], [9.6, 14.0], [34.2, 34.974]], requires_grad=True)
+ diag2 = torch.tensor([[2.8, 4.45], [9.5, 14.1]], requires_grad=True)
+ diag3 = torch.tensor([[2.8, 4.45], [9.5, 14.1]], requires_grad=True)
+ assert diag1.grad is None and diag2.grad is None and diag3.grad is None
+ dist12 = pot(diag1, diag2, internal_p=2, order=2, enable_autodiff=True)
+ dist30 = pot(diag3, torch.tensor([]), internal_p=2, order=2, enable_autodiff=True)
+ dist12.backward()
+ dist30.backward()
+ assert not torch.isnan(diag1.grad).any() and not torch.isnan(diag2.grad).any() and not torch.isnan(diag3.grad).any()
+ diag4 = torch.tensor([[0., 10.]], requires_grad=True)
+ diag5 = torch.tensor([[1., 11.], [3., 4.]], requires_grad=True)
+ dist45 = pot(diag4, diag5, internal_p=1, order=1, enable_autodiff=True)
+ assert dist45 == 3.
+ dist45.backward()
+ assert np.array_equal(diag4.grad, [[-1., -1.]])
+ assert np.array_equal(diag5.grad, [[1., 1.], [-1., 1.]])
+ diag6 = torch.tensor([[5., 10.]], requires_grad=True)
+ pot(diag6, diag6, internal_p=2, order=2, enable_autodiff=True).backward()
+ # https://github.com/jonasrauber/eagerpy/issues/6
+ # assert np.array_equal(diag6.grad, [[0., 0.]])
+
+def test_wasserstein_distance_grad_tensorflow():
+ with tf.GradientTape() as tape:
+ diag4 = tf.convert_to_tensor(tf.Variable(initial_value=np.array([[0., 10.]]), trainable=True))
+ diag5 = tf.convert_to_tensor(tf.Variable(initial_value=np.array([[1., 11.], [3., 4.]]), trainable=True))
+ dist45 = pot(diag4, diag5, internal_p=1, order=1, enable_autodiff=True)
+ assert dist45 == 3.
+
+ grads = tape.gradient(dist45, [diag4, diag5])
+ assert np.array_equal(grads[0].values, [[-1., -1.]])
+ assert np.array_equal(grads[1].values, [[1., 1.], [-1., 1.]]) \ No newline at end of file