summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/next_release.md3
-rw-r--r--biblio/how_to_cite_gudhi.bib.in352
m---------ext/hera0
-rw-r--r--src/Cech_complex/doc/Intro_cech_complex.h2
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex.h3
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h55
-rw-r--r--src/Simplex_tree/test/simplex_tree_unit_test.cpp14
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h5
-rw-r--r--src/cmake/modules/GUDHI_submodules.cmake6
-rw-r--r--src/cmake/modules/GUDHI_user_version_target.cmake5
-rw-r--r--src/common/doc/main_page.md2
-rw-r--r--src/python/CMakeLists.txt8
-rw-r--r--src/python/doc/representations_sum.inc22
-rw-r--r--src/python/gudhi/hera/bottleneck.cc2
-rw-r--r--src/python/gudhi/hera/wasserstein.cc10
-rw-r--r--src/python/gudhi/representations/vector_methods.py7
-rw-r--r--src/python/gudhi/simplex_tree.pxd1
-rw-r--r--src/python/gudhi/simplex_tree.pyx100
-rw-r--r--src/python/include/Alpha_complex_factory.h4
-rw-r--r--src/python/include/Simplex_tree_interface.h26
-rw-r--r--src/python/setup.py.in6
-rw-r--r--src/python/test/test_persistence_graphical_tools.py5
-rwxr-xr-xsrc/python/test/test_simplex_tree.py365
-rwxr-xr-xsrc/python/test/test_wasserstein_distance.py9
24 files changed, 675 insertions, 337 deletions
diff --git a/.github/next_release.md b/.github/next_release.md
index d5fcef1c..929a7ce6 100644
--- a/.github/next_release.md
+++ b/.github/next_release.md
@@ -9,6 +9,9 @@ Below is a list of changes made since GUDHI 3.6.0:
- [Module](link)
- ...
+- [Simplex tree](https://gudhi.inria.fr/python/latest/simplex_tree_ref.html)
+ - New functions to initialize from a matrix or insert batches of simplices of the same dimension.
+
- [Rips complex](https://gudhi.inria.fr/python/latest/rips_complex_user.html)
- Construction now rejects positional arguments, you need to specify `points=X`.
diff --git a/biblio/how_to_cite_gudhi.bib.in b/biblio/how_to_cite_gudhi.bib.in
index 579dbf41..02c09dea 100644
--- a/biblio/how_to_cite_gudhi.bib.in
+++ b/biblio/how_to_cite_gudhi.bib.in
@@ -1,168 +1,262 @@
@book{gudhi:urm
-, title = "{GUDHI} User and Reference Manual"
-, author = "{The GUDHI Project}"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, year = @GUDHI_VERSION_YEAR@
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/"
+, title = {GUDHI User and Reference Manual}
+, author = {The GUDHI Project}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, year = {@GUDHI_VERSION_YEAR@}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/}
}
-@incollection{gudhi:FilteredComplexes
-, author = "Cl\'ement Maria"
-, title = "Filtered Complexes"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__simplex__tree.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:CubicalComplex
+, author = {Pawel Dlotko}
+, title = {Cubical complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cubical__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:PersistentCohomology
-, author = "Cl\'ement Maria"
-, title = "Persistent Cohomology"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__persistent__cohomology.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:FilteredComplexes
+, author = {Cl{\'{e}}ment Maria}
+, title = {Filtered Complexes}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__simplex__tree.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:Contraction
-, author = "David Salinas"
-, title = "Contraction"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__contr.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:ToplexMap
+, author = {Fran{\c{c}}ois Godi}
+, title = {Toplex map}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__toplex__map.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
@incollection{gudhi:SkeletonBlocker
-, author = "David Salinas"
-, title = "Skeleton-Blocker"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__skbl.html"
-, year = @GUDHI_VERSION_YEAR@
+, author = {David Salinas}
+, title = {Skeleton-Blocker}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__skbl.html}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:Contraction
+, author = {David Salinas}
+, title = {Contraction}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__contr.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
@incollection{gudhi:AlphaComplex
-, author = "Vincent Rouvreau"
-, title = "Alpha complex"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__alpha__complex.html"
-, year = @GUDHI_VERSION_YEAR@
+, author = {Vincent Rouvreau}
+, title = {Alpha complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__alpha__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:CubicalComplex
-, author = "Pawel Dlotko"
-, title = "Cubical complex"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cubical__complex.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:CechComplex
+, author = {Vincent Rouvreau and Hind Montassif}
+, title = {{\v{C}}ech complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cech__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:RipsComplex
+, author = {Cl{\'{e}}ment Maria and Pawel Dlotko and Vincent Rouvreau and Marc Glisse}
+, title = {Rips complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__rips__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:Collapse
+, author = {Siddharth Pritam and Marc Glisse}
+, title = {Edge collapse}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__edge__collapse.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
@incollection{gudhi:WitnessComplex
-, author = "Siargey Kachanovich"
-, title = "Witness complex"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__witness__complex.html"
-, year = @GUDHI_VERSION_YEAR@
+, author = {Siargey Kachanovich}
+, title = {Witness complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__witness__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:SubSampling
-, author = "Cl\'ement Jamin and Siargey Kachanovich"
-, title = "Subsampling"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__subsampling.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:CoverComplex
+, author = {Mathieu Carri{\`{e}}re}
+, title = {Cover complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cover__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:SpatialSearching
-, author = "Cl\'ement Jamin"
-, title = "Spatial searching"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__spatial__searching.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:CoxeterTriangulation
+, author = {Siargey Kachanovich}
+, title = {Coxeter triangulation}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cover__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
@incollection{gudhi:TangentialComplex
-, author = "Cl\'ement Jamin"
-, title = "Tangential complex"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__tangential__complex.html"
-, year = @GUDHI_VERSION_YEAR@
+, author = {Cl{\'{e}}ment Jamin}
+, title = {Tangential complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__tangential__complex.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:RipsComplex
-, author = "Cl\'ement Maria and Pawel Dlotko and Vincent Rouvreau and Marc Glisse"
-, title = "Rips complex"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__rips__complex.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:PersistentCohomology
+, author = {Cl{\'{e}}ment Maria}
+, title = {Persistent Cohomology}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__persistent__cohomology.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
@incollection{gudhi:BottleneckDistance
-, author = "Fran{{\c{c}}ois Godi"
-, title = "Bottleneck distance"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__bottleneck__distance.html"
-, year = @GUDHI_VERSION_YEAR@
+, author = {Fran{\c{c}}ois Godi}
+, title = {Bottleneck distance}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__bottleneck__distance.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:cython
-, author = "Vincent Rouvreau"
-, title = "Cython interface"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/python/@GUDHI_VERSION@/"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:PersistenceRepresentations
+, author = {Pawel Dlotko}
+, title = {Persistence representations}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group___persistence__representations.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:CoverComplex
-, author = "Mathieu Carri\`ere"
-, title = "Cover complex"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__cover__complex.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:SubSampling
+, author = {Cl{\'{e}}ment Jamin and Siargey Kachanovich}
+, title = {Subsampling}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__subsampling.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:PersistenceRepresentations
-, author = "Pawel Dlotko"
-, title = "Persistence representations"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group___persistence__representations.html"
-, year = @GUDHI_VERSION_YEAR@
+@incollection{gudhi:SpatialSearching
+, author = {Cl{\'{e}}ment Jamin}
+, title = {Spatial searching}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__spatial__searching.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
-@incollection{gudhi:Collapse
-, author = "Siddharth Pritam and Marc Glisse"
-, title = "Edge collapse"
-, publisher = "{GUDHI Editorial Board}"
-, edition = "{@GUDHI_VERSION@}"
-, booktitle = "{GUDHI} User and Reference Manual"
-, url = "https://gudhi.inria.fr/doc/@GUDHI_VERSION@/group__edge__collapse.html"
-, year = @GUDHI_VERSION_YEAR@
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% Python specific gudhi modules
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+@incollection{gudhi:WeightedRipsComplex
+, author = {Rapha{\"{e}}l Tinarrage and Yuichi Ike and Masatoshi Takenouchi}
+, title = {Weighted Rips Complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/rips_complex_user.html#weighted-rips-complex}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:DTMRipsComplex
+, author = {Yuichi Ike}
+, title = {DTM Rips Complex}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/rips_complex_user.html#dtm-rips-complex}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:WassersteinDistance
+, author = {Th{\'{e}}o Lacombe and Marc Glisse}
+, title = {Wasserstein distance}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/wasserstein_distance_user.html}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:PersistenceRepresentationsScikitlearnInterface
+, author = {Mathieu Carri{\`{e}}re and Gard Spreemann and Wojciech Reise}
+, title = {Persistence representations scikit-learn like interface}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/representations.html}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:Atol
+, author = {Martin Royer}
+, title = {Measure Vectorization for Automatic Topologically-Oriented Learning}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/representations.html#gudhi.representations.vector_methods.Atol}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:DistanceToMeasure
+, author = {Marc Glisse}
+, title = {Distance to measure}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/point_cloud.html#module-gudhi.point_cloud.knn}
+, year = {@GUDHI_VERSION_YEAR@}
+}
+
+@incollection{gudhi:PersistenceBasedClustering
+, author = {Marc Glisse}
+, title = {persistence-based clustering}
+, publisher = {GUDHI Editorial Board}
+, edition = {@GUDHI_VERSION@}
+, booktitle = {GUDHI User and Reference Manual}
+, url = {https://gudhi.inria.fr/python/@GUDHI_VERSION@/clustering.html}
+, year = {@GUDHI_VERSION_YEAR@}
}
diff --git a/ext/hera b/ext/hera
-Subproject b528c4067a8aac346eb307d3c23b82d5953cfe2
+Subproject 8bfdd4bd32f005c18b5c75c502b987de552d6e4
diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h
index 595fb64b..73093c07 100644
--- a/src/Cech_complex/doc/Intro_cech_complex.h
+++ b/src/Cech_complex/doc/Intro_cech_complex.h
@@ -17,7 +17,7 @@ namespace cech_complex {
/** \defgroup cech_complex Čech complex
*
- * \author Vincent Rouvreau
+ * \author Vincent Rouvreau, Hind montassif
*
* @{
*
diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h
index 625f7c9c..dbdf5e93 100644
--- a/src/Cech_complex/include/gudhi/Cech_complex.h
+++ b/src/Cech_complex/include/gudhi/Cech_complex.h
@@ -1,11 +1,12 @@
/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
* See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
- * Author(s): Vincent Rouvreau
+ * Author(s): Vincent Rouvreau, Hind Montassif
*
* Copyright (C) 2018 Inria
*
* Modification(s):
* - YYYY/MM Author: Description of the modification
+ * - 2022/02 Hind Montassif : Replace MiniBall with Sphere_circumradius
*/
#ifndef CECH_COMPLEX_H_
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 9059219c..4177a0b8 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -24,6 +24,8 @@
#include <boost/iterator/transform_iterator.hpp>
#include <boost/graph/adjacency_list.hpp>
#include <boost/range/adaptor/reversed.hpp>
+#include <boost/range/adaptor/transformed.hpp>
+#include <boost/range/size.hpp>
#include <boost/container/static_vector.hpp>
#ifdef GUDHI_USE_TBB
@@ -702,10 +704,10 @@ class Simplex_tree {
return true;
}
- private:
- /** \brief Inserts a simplex represented by a vector of vertex.
- * @param[in] simplex vector of Vertex_handles, representing the vertices of the new simplex. The vector must be
- * sorted by increasing vertex handle order.
+ protected:
+ /** \brief Inserts a simplex represented by a range of vertex.
+ * @param[in] simplex range of Vertex_handles, representing the vertices of the new simplex. The range must be
+ * sorted by increasing vertex handle order, and not empty.
* @param[in] filtration the filtration value assigned to the new simplex.
* @return If the new simplex is inserted successfully (i.e. it was not in the
* simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned
@@ -717,12 +719,13 @@ class Simplex_tree {
* null_simplex.
*
*/
- std::pair<Simplex_handle, bool> insert_vertex_vector(const std::vector<Vertex_handle>& simplex,
+ template <class RandomVertexHandleRange = std::initializer_list<Vertex_handle>>
+ std::pair<Simplex_handle, bool> insert_simplex_raw(const RandomVertexHandleRange& simplex,
Filtration_value filtration) {
Siblings * curr_sib = &root_;
std::pair<Simplex_handle, bool> res_insert;
auto vi = simplex.begin();
- for (; vi != simplex.end() - 1; ++vi) {
+ for (; vi != std::prev(simplex.end()); ++vi) {
GUDHI_CHECK(*vi != null_vertex(), "cannot use the dummy null_vertex() as a real vertex");
res_insert = curr_sib->members_.emplace(*vi, Node(curr_sib, filtration));
if (!(has_children(res_insert.first))) {
@@ -743,9 +746,10 @@ class Simplex_tree {
return std::pair<Simplex_handle, bool>(null_simplex(), false);
}
// otherwise the insertion has succeeded - size is a size_type
- if (static_cast<int>(simplex.size()) - 1 > dimension_) {
+ int dim = static_cast<int>(boost::size(simplex)) - 1;
+ if (dim > dimension_) {
// Update dimension if needed
- dimension_ = static_cast<int>(simplex.size()) - 1;
+ dimension_ = dim;
}
return res_insert;
}
@@ -786,7 +790,7 @@ class Simplex_tree {
// Copy before sorting
std::vector<Vertex_handle> copy(first, last);
std::sort(std::begin(copy), std::end(copy));
- return insert_vertex_vector(copy, filtration);
+ return insert_simplex_raw(copy, filtration);
}
/** \brief Insert a N-simplex and all his subfaces, from a N-simplex represented by a range of
@@ -1119,16 +1123,12 @@ class Simplex_tree {
dimension_ = 1;
}
- root_.members_.reserve(num_vertices(skel_graph));
+ root_.members_.reserve(num_vertices(skel_graph)); // probably useless in most cases
+ auto verts = vertices(skel_graph) | boost::adaptors::transformed([&](auto v){
+ return Dit_value_t(v, Node(&root_, get(vertex_filtration_t(), skel_graph, v))); });
+ root_.members_.insert(boost::begin(verts), boost::end(verts));
+ // This automatically sorts the vertices, the graph concept doesn't guarantee the order in which we iterate.
- typename boost::graph_traits<OneSkeletonGraph>::vertex_iterator v_it,
- v_it_end;
- for (std::tie(v_it, v_it_end) = vertices(skel_graph); v_it != v_it_end;
- ++v_it) {
- root_.members_.emplace_hint(
- root_.members_.end(), *v_it,
- Node(&root_, get(vertex_filtration_t(), skel_graph, *v_it)));
- }
std::pair<typename boost::graph_traits<OneSkeletonGraph>::edge_iterator,
typename boost::graph_traits<OneSkeletonGraph>::edge_iterator> boost_edges = edges(skel_graph);
// boost_edges.first is the equivalent to boost_edges.begin()
@@ -1137,7 +1137,7 @@ class Simplex_tree {
auto edge = *(boost_edges.first);
auto u = source(edge, skel_graph);
auto v = target(edge, skel_graph);
- if (u == v) throw "Self-loops are not simplicial";
+ if (u == v) throw std::invalid_argument("Self-loops are not simplicial");
// We cannot skip edges with the wrong orientation and expect them to
// come a second time with the right orientation, that does not always
// happen in practice. emplace() should be a NOP when an element with the
@@ -1156,6 +1156,21 @@ class Simplex_tree {
}
}
+ /** \brief Inserts several vertices.
+ * @param[in] vertices A range of Vertex_handle
+ * @param[in] filt filtration value of the new vertices (the same for all)
+ *
+ * This may be faster than inserting the vertices one by one, especially in a random order.
+ * The complex does not need to be empty before calling this function. However, if a vertex is
+ * already present, its filtration value is not modified, unlike with other insertion functions. */
+ template <class VertexRange>
+ void insert_batch_vertices(VertexRange const& vertices, Filtration_value filt = 0) {
+ auto verts = vertices | boost::adaptors::transformed([&](auto v){
+ return Dit_value_t(v, Node(&root_, filt)); });
+ root_.members_.insert(boost::begin(verts), boost::end(verts));
+ if (dimension_ < 0 && !root_.members_.empty()) dimension_ = 0;
+ }
+
/** \brief Expands the Simplex_tree containing only its one skeleton
* until dimension max_dim.
*
@@ -1598,7 +1613,7 @@ class Simplex_tree {
Simplex_tree st_copy = *this;
// Add point for coning the simplicial complex
- this->insert_simplex({maxvert}, -3);
+ this->insert_simplex_raw({maxvert}, -3);
// For each simplex
std::vector<Vertex_handle> vr;
diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
index 79bb5a93..ebcc406c 100644
--- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
@@ -1038,3 +1038,17 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(simplex_tree_boundaries_and_opposite_vertex_iterat
BOOST_CHECK(opposite_vertices.size() == 0);
}
}
+
+BOOST_AUTO_TEST_CASE(batch_vertices) {
+ typedef Simplex_tree<> typeST;
+ std::clog << "********************************************************************" << std::endl;
+ std::clog << "TEST BATCH VERTEX INSERTION" << std::endl;
+ typeST st;
+ st.insert_simplex_and_subfaces({3}, 1.5);
+ std::vector verts { 2, 3, 5, 6 };
+ st.insert_batch_vertices(verts);
+ BOOST_CHECK(st.num_vertices() == 4);
+ BOOST_CHECK(st.num_simplices() == 4);
+ BOOST_CHECK(st.filtration(st.find({2})) == 0.);
+ BOOST_CHECK(st.filtration(st.find({3})) == 1.5);
+}
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
index 56a24af0..b448db2d 100644
--- a/src/Tangential_complex/include/gudhi/Tangential_complex.h
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -345,10 +345,11 @@ class Tangential_complex {
m_stars.resize(m_points.size());
m_squared_star_spheres_radii_incl_margin.resize(m_points.size(), FT(-1));
#ifdef GUDHI_TC_PERTURB_POSITION
- if (m_points.empty())
+ if (m_points.empty()) {
m_translations.clear();
- else
+ } else {
m_translations.resize(m_points.size(), m_k.construct_vector_d_object()(m_ambient_dim));
+ }
#if defined(GUDHI_USE_TBB)
delete[] m_p_perturb_mutexes;
m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()];
diff --git a/src/cmake/modules/GUDHI_submodules.cmake b/src/cmake/modules/GUDHI_submodules.cmake
index 78b045bd..c844386d 100644
--- a/src/cmake/modules/GUDHI_submodules.cmake
+++ b/src/cmake/modules/GUDHI_submodules.cmake
@@ -1,5 +1,3 @@
# For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera.
-set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include)
-set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h")
-set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include)
-set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h") \ No newline at end of file
+set(HERA_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/include)
+set(HERA_INCLUDE_DIR ${HERA_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find hera/{wasserstein.h,bottleneck.h}")
diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake
index 4487ad86..2144ff6f 100644
--- a/src/cmake/modules/GUDHI_user_version_target.cmake
+++ b/src/cmake/modules/GUDHI_user_version_target.cmake
@@ -60,10 +60,9 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
-if(HERA_WASSERSTEIN_INCLUDE_DIR STREQUAL HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR OR
- HERA_BOTTLENECK_INCLUDE_DIR STREQUAL HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR)
+if(HERA_INCLUDE_DIR STREQUAL HERA_INTERNAL_INCLUDE_DIR)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy_directory ${CMAKE_SOURCE_DIR}/ext/hera ${GUDHI_USER_VERSION_DIR}/ext/hera)
+ copy_directory ${CMAKE_SOURCE_DIR}/ext/hera/include ${GUDHI_USER_VERSION_DIR}/ext/hera/include)
endif()
set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md
index ce903405..9b7c2853 100644
--- a/src/common/doc/main_page.md
+++ b/src/common/doc/main_page.md
@@ -178,7 +178,7 @@
The set of all simplices is filtered by the radius of their minimal enclosing ball.
</td>
<td width="15%">
- <b>Author:</b> Vincent Rouvreau<br>
+ <b>Author:</b> Vincent Rouvreau, Hind Montassif<br>
<b>Introduced in:</b> GUDHI 2.2.0<br>
<b>Copyright:</b> MIT [(LGPL v3)](../../licensing/)<br>
<b>Requires:</b> \ref cgal
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 32ec13bd..0bb864ff 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -44,7 +44,7 @@ function( add_gudhi_debug_info DEBUG_INFO )
endfunction( add_gudhi_debug_info )
if(PYTHONINTERP_FOUND)
- if(PYBIND11_FOUND AND CYTHON_FOUND)
+ if(NUMPY_FOUND AND PYBIND11_FOUND AND CYTHON_FOUND)
add_gudhi_debug_info("Pybind11 version ${PYBIND11_VERSION}")
# PyBind11 modules
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'bottleneck', ")
@@ -623,10 +623,10 @@ if(PYTHONINTERP_FOUND)
# Set missing or not modules
set(GUDHI_MODULES ${GUDHI_MODULES} "python" CACHE INTERNAL "GUDHI_MODULES")
- else(PYBIND11_FOUND AND CYTHON_FOUND)
- message("++ Python module will not be compiled because cython and/or pybind11 was/were not found")
+ else(NUMPY_FOUND AND PYBIND11_FOUND AND CYTHON_FOUND)
+ message("++ Python module will not be compiled because numpy and/or cython and/or pybind11 was/were not found")
set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(PYBIND11_FOUND AND CYTHON_FOUND)
+ endif(NUMPY_FOUND AND PYBIND11_FOUND AND CYTHON_FOUND)
else(PYTHONINTERP_FOUND)
message("++ Python module will not be compiled because no Python interpreter was found")
set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python" CACHE INTERNAL "GUDHI_MISSING_MODULES")
diff --git a/src/python/doc/representations_sum.inc b/src/python/doc/representations_sum.inc
index 4298aea9..9515f044 100644
--- a/src/python/doc/representations_sum.inc
+++ b/src/python/doc/representations_sum.inc
@@ -1,14 +1,14 @@
.. table::
:widths: 30 40 30
- +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+
- | .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière, Martin Royer |
- | img/sklearn-tda.png | diagrams, compatible with scikit-learn. | |
- | | | :Since: GUDHI 3.1.0 |
- | | | |
- | | | :License: MIT |
- | | | |
- | | | :Requires: `Scikit-learn <installation.html#scikit-learn>`_ |
- +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------+
- | * :doc:`representations` |
- +------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------+
+ +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------------------+
+ | .. figure:: | Vectorizations, distances and kernels that work on persistence | :Author: Mathieu Carrière, Martin Royer, Gard Spreemann, Wojciech Reise |
+ | img/sklearn-tda.png | diagrams, compatible with scikit-learn. | |
+ | | | :Since: GUDHI 3.1.0 |
+ | | | |
+ | | | :License: MIT |
+ | | | |
+ | | | :Requires: `Scikit-learn <installation.html#scikit-learn>`_ |
+ +------------------------------------------------------------------+----------------------------------------------------------------+-------------------------------------------------------------------------+
+ | * :doc:`representations` |
+ +------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/gudhi/hera/bottleneck.cc b/src/python/gudhi/hera/bottleneck.cc
index 0cb562ce..ec461f7c 100644
--- a/src/python/gudhi/hera/bottleneck.cc
+++ b/src/python/gudhi/hera/bottleneck.cc
@@ -16,7 +16,7 @@
using py::ssize_t;
#endif
-#include <bottleneck.h> // Hera
+#include <hera/bottleneck.h> // Hera
double bottleneck_distance(Dgm d1, Dgm d2, double delta)
{
diff --git a/src/python/gudhi/hera/wasserstein.cc b/src/python/gudhi/hera/wasserstein.cc
index fa0cf8aa..3516352e 100644
--- a/src/python/gudhi/hera/wasserstein.cc
+++ b/src/python/gudhi/hera/wasserstein.cc
@@ -8,10 +8,16 @@
* - YYYY/MM Author: Description of the modification
*/
-#include <wasserstein.h> // Hera
-
#include <pybind11_diagram_utils.h>
+#ifdef _MSC_VER
+// https://github.com/grey-narn/hera/issues/3
+// ssize_t is a non-standard type (well, posix)
+using py::ssize_t;
+#endif
+
+#include <hera/wasserstein.h> // Hera
+
double wasserstein_distance(
Dgm d1, Dgm d2,
double wasserstein_power, double internal_p,
diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py
index a169aee8..d52185ef 100644
--- a/src/python/gudhi/representations/vector_methods.py
+++ b/src/python/gudhi/representations/vector_methods.py
@@ -13,8 +13,13 @@ import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.exceptions import NotFittedError
from sklearn.preprocessing import MinMaxScaler, MaxAbsScaler
-from sklearn.neighbors import DistanceMetric
from sklearn.metrics import pairwise
+try:
+ # New location since 1.0
+ from sklearn.metrics import DistanceMetric
+except ImportError:
+ # Will be removed in 1.3
+ from sklearn.neighbors import DistanceMetric
from .preprocessing import DiagramScaler, BirthPersistenceTransform
diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd
index 5642f82d..f86f1232 100644
--- a/src/python/gudhi/simplex_tree.pxd
+++ b/src/python/gudhi/simplex_tree.pxd
@@ -56,6 +56,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
int upper_bound_dimension() nogil
bool find_simplex(vector[int] simplex) nogil
bool insert(vector[int] simplex, double filtration) nogil
+ void insert_matrix(double* filtrations, int n, int stride0, int stride1, double max_filtration) nogil
vector[pair[vector[int], double]] get_star(vector[int] simplex) nogil
vector[pair[vector[int], double]] get_cofaces(vector[int] simplex, int dimension) nogil
void expansion(int max_dim) nogil except +
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
index 05bfe22e..18215d2f 100644
--- a/src/python/gudhi/simplex_tree.pyx
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -8,14 +8,23 @@
# - YYYY/MM Author: Description of the modification
from cython.operator import dereference, preincrement
-from libc.stdint cimport intptr_t
+from libc.stdint cimport intptr_t, int32_t, int64_t
import numpy as np
cimport gudhi.simplex_tree
+cimport cython
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
+ctypedef fused some_int:
+ int32_t
+ int64_t
+
+ctypedef fused some_float:
+ float
+ double
+
cdef bool callback(vector[int] simplex, void *blocker_func):
return (<object>blocker_func)(simplex)
@@ -228,6 +237,87 @@ cdef class SimplexTree:
"""
return self.get_ptr().insert(simplex, <double>filtration)
+ @staticmethod
+ @cython.boundscheck(False)
+ def create_from_array(filtrations, double max_filtration=np.inf):
+ """Creates a new, empty complex and inserts vertices and edges. The vertices are numbered from 0 to n-1, and
+ the filtration values are encoded in the array, with the diagonal representing the vertices. It is the
+ caller's responsibility to ensure that this defines a filtration, which can be achieved with either::
+
+ filtrations[np.diag_indices_from(filtrations)] = filtrations.min(axis=1)
+
+ or::
+
+ diag = filtrations.diagonal()
+ filtrations = np.fmax(np.fmax(filtrations, diag[:, None]), diag[None, :])
+
+ :param filtrations: the filtration values of the vertices and edges to insert. The matrix is assumed to be symmetric.
+ :type filtrations: numpy.ndarray of shape (n,n)
+ :param max_filtration: only insert vertices and edges with filtration values no larger than max_filtration
+ :type max_filtration: float
+ :returns: the new complex
+ :rtype: SimplexTree
+ """
+ # TODO: document which half of the matrix is actually read?
+ filtrations = np.asanyarray(filtrations, dtype=float)
+ cdef double[:,:] F = filtrations
+ ret = SimplexTree()
+ cdef int n = F.shape[0]
+ assert n == F.shape[1], 'create_from_array() expects a square array'
+ with nogil:
+ ret.get_ptr().insert_matrix(&F[0,0], n, F.strides[0], F.strides[1], max_filtration)
+ return ret
+
+ def insert_edges_from_coo_matrix(self, edges):
+ """Inserts edges given by a sparse matrix in `COOrdinate format
+ <https://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.coo_matrix.html>`_.
+ If an edge is repeated, the smallest filtration value is used. Missing entries are not inserted.
+ Diagonal entries are currently interpreted as vertices, although we do not guarantee this behavior
+ in the future, and this is only useful if you want to insert vertices with a smaller filtration value
+ than the smallest edge containing it, since vertices are implicitly inserted together with the edges.
+
+ :param edges: the edges to insert and their filtration values.
+ :type edges: scipy.sparse.coo_matrix of shape (n,n)
+
+ .. seealso:: :func:`insert_batch`
+ """
+ # TODO: optimize this?
+ for edge in zip(edges.row, edges.col, edges.data):
+ self.get_ptr().insert((edge[0], edge[1]), edge[2])
+
+ @cython.boundscheck(False)
+ @cython.wraparound(False)
+ def insert_batch(self, some_int[:,:] vertex_array, some_float[:] filtrations):
+ """Inserts k-simplices given by a sparse array in a format similar
+ to `torch.sparse <https://pytorch.org/docs/stable/sparse.html>`_.
+ The n-th simplex has vertices `vertex_array[0,n]`, ...,
+ `vertex_array[k,n]` and filtration value `filtrations[n]`.
+ If a simplex is repeated, the smallest filtration value is used.
+ Simplices with a repeated vertex are currently interpreted as lower
+ dimensional simplices, but we do not guarantee this behavior in the
+ future. Any time a simplex is inserted, its faces are inserted as well
+ if needed to preserve a simplicial complex.
+
+ :param vertex_array: the k-simplices to insert.
+ :type vertex_array: numpy.array of shape (k+1,n)
+ :param filtrations: the filtration values.
+ :type filtrations: numpy.array of shape (n,)
+ """
+ # This may be slow if we end up inserting vertices in a bad order (flat_map).
+ # We could first insert the vertices from np.unique(vertex_array), or leave it to the caller.
+ cdef Py_ssize_t k = vertex_array.shape[0]
+ cdef Py_ssize_t n = vertex_array.shape[1]
+ assert filtrations.shape[0] == n, 'inconsistent sizes for vertex_array and filtrations'
+ cdef Py_ssize_t i
+ cdef Py_ssize_t j
+ cdef vector[int] v
+ with nogil:
+ for i in range(n):
+ for j in range(k):
+ v.push_back(vertex_array[j, i])
+ self.get_ptr().insert(v, filtrations[i])
+ v.clear()
+
def get_simplices(self):
"""This function returns a generator with simplices and their given
filtration values.
@@ -376,7 +466,7 @@ cdef class SimplexTree:
"""
return self.get_ptr().prune_above_filtration(filtration)
- def expansion(self, max_dim):
+ def expansion(self, max_dimension):
"""Expands the simplex tree containing only its one skeleton
until dimension max_dim.
@@ -390,10 +480,10 @@ cdef class SimplexTree:
The simplex tree must contain no simplex of dimension bigger than
1 when calling the method.
- :param max_dim: The maximal dimension.
- :type max_dim: int
+ :param max_dimension: The maximal dimension.
+ :type max_dimension: int
"""
- cdef int maxdim = max_dim
+ cdef int maxdim = max_dimension
with nogil:
self.get_ptr().expansion(maxdim)
diff --git a/src/python/include/Alpha_complex_factory.h b/src/python/include/Alpha_complex_factory.h
index 3d20aa8f..41eb72c1 100644
--- a/src/python/include/Alpha_complex_factory.h
+++ b/src/python/include/Alpha_complex_factory.h
@@ -106,7 +106,7 @@ class Exact_alpha_complex_dD final : public Abstract_alpha_complex {
return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, exact_version_, default_filtration_value);
}
- virtual std::size_t num_vertices() const {
+ virtual std::size_t num_vertices() const override {
return alpha_complex_.num_vertices();
}
@@ -141,7 +141,7 @@ class Inexact_alpha_complex_dD final : public Abstract_alpha_complex {
return alpha_complex_.create_complex(*simplex_tree, max_alpha_square, false, default_filtration_value);
}
- virtual std::size_t num_vertices() const {
+ virtual std::size_t num_vertices() const override {
return alpha_complex_.num_vertices();
}
diff --git a/src/python/include/Simplex_tree_interface.h b/src/python/include/Simplex_tree_interface.h
index 3848c5ad..0317ea39 100644
--- a/src/python/include/Simplex_tree_interface.h
+++ b/src/python/include/Simplex_tree_interface.h
@@ -40,6 +40,8 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
using Complex_simplex_iterator = typename Base::Complex_simplex_iterator;
using Extended_filtration_data = typename Base::Extended_filtration_data;
using Boundary_simplex_iterator = typename Base::Boundary_simplex_iterator;
+ using Siblings = typename Base::Siblings;
+ using Node = typename Base::Node;
typedef bool (*blocker_func_t)(Simplex simplex, void *user_data);
public:
@@ -62,6 +64,30 @@ class Simplex_tree_interface : public Simplex_tree<SimplexTreeOptions> {
return (result.second);
}
+ void insert_matrix(double* filtrations, int n, int stride0, int stride1, double max_filtration) {
+ // We could delegate to insert_graph, but wrapping the matrix in a graph interface is too much work,
+ // and this is a bit more efficient.
+ auto& rm = this->root()->members_;
+ for(int i=0; i<n; ++i) {
+ char* p = reinterpret_cast<char*>(filtrations) + i * stride0;
+ double fv = *reinterpret_cast<double*>(p + i * stride1);
+ if(fv > max_filtration) continue;
+ auto sh = rm.emplace_hint(rm.end(), i, Node(this->root(), fv));
+ Siblings* children = nullptr;
+ // Should we make a first pass to count the number of edges so we can reserve the right space?
+ for(int j=i+1; j<n; ++j) {
+ double fe = *reinterpret_cast<double*>(p + j * stride1);
+ if(fe > max_filtration) continue;
+ if(!children) {
+ children = new Siblings(this->root(), i);
+ sh->second.assign_children(children);
+ }
+ children->members().emplace_hint(children->members().end(), j, Node(children, fe));
+ }
+ }
+
+ }
+
// Do not interface this function, only used in alpha complex interface for complex creation
bool insert_simplex(const Simplex& simplex, Filtration_value filtration = 0) {
Insertion_result result = Base::insert_simplex(simplex, filtration);
diff --git a/src/python/setup.py.in b/src/python/setup.py.in
index 2c67c2c5..1ecbe985 100644
--- a/src/python/setup.py.in
+++ b/src/python/setup.py.in
@@ -48,10 +48,8 @@ ext_modules = cythonize(ext_modules, compiler_directives={'language_level': '3'}
for module in pybind11_modules:
my_include_dirs = include_dirs + [pybind11.get_include(False), pybind11.get_include(True)]
- if module == 'hera/wasserstein':
- my_include_dirs = ['@HERA_WASSERSTEIN_INCLUDE_DIR@'] + my_include_dirs
- elif module == 'hera/bottleneck':
- my_include_dirs = ['@HERA_BOTTLENECK_INCLUDE_DIR@'] + my_include_dirs
+ if module.startswith('hera/'):
+ my_include_dirs = ['@HERA_INCLUDE_DIR@'] + my_include_dirs
ext_modules.append(Extension(
'gudhi.' + module.replace('/', '.'),
sources = [source_dir + module + '.cc'],
diff --git a/src/python/test/test_persistence_graphical_tools.py b/src/python/test/test_persistence_graphical_tools.py
index c19836b7..0e2ac3f8 100644
--- a/src/python/test/test_persistence_graphical_tools.py
+++ b/src/python/test/test_persistence_graphical_tools.py
@@ -12,6 +12,7 @@ import gudhi as gd
import numpy as np
import matplotlib as plt
import pytest
+import warnings
def test_array_handler():
@@ -71,13 +72,13 @@ def test_limit_to_max_intervals():
(0, (0.0, 0.106382)),
]
# check no warnings if max_intervals equals to the diagrams number
- with pytest.warns(None) as record:
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
truncated_diags = gd.persistence_graphical_tools._limit_to_max_intervals(
diags, 10, key=lambda life_time: life_time[1][1] - life_time[1][0]
)
# check diagrams are not sorted
assert truncated_diags == diags
- assert len(record) == 0
# check warning if max_intervals lower than the diagrams number
with pytest.warns(UserWarning) as record:
diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py
index 54bafed5..2ccbfbf5 100755
--- a/src/python/test/test_simplex_tree.py
+++ b/src/python/test/test_simplex_tree.py
@@ -249,6 +249,7 @@ def test_make_filtration_non_decreasing():
assert st.filtration([3, 4]) == 2.0
assert st.filtration([4, 5]) == 2.0
+
def test_extend_filtration():
# Inserted simplex:
@@ -257,86 +258,87 @@ def test_extend_filtration():
# / \ /
# o o
# /2\ /3
- # o o
- # 1 0
-
- st = SimplexTree()
- st.insert([0,2])
- st.insert([1,2])
- st.insert([0,3])
- st.insert([2,5])
- st.insert([3,4])
- st.insert([3,5])
- st.assign_filtration([0], 1.)
- st.assign_filtration([1], 2.)
- st.assign_filtration([2], 3.)
- st.assign_filtration([3], 4.)
- st.assign_filtration([4], 5.)
- st.assign_filtration([5], 6.)
-
- assert list(st.get_filtration()) == [
- ([0, 2], 0.0),
- ([1, 2], 0.0),
- ([0, 3], 0.0),
- ([3, 4], 0.0),
- ([2, 5], 0.0),
- ([3, 5], 0.0),
- ([0], 1.0),
- ([1], 2.0),
- ([2], 3.0),
- ([3], 4.0),
- ([4], 5.0),
- ([5], 6.0)
+ # o o
+ # 1 0
+
+ st = SimplexTree()
+ st.insert([0, 2])
+ st.insert([1, 2])
+ st.insert([0, 3])
+ st.insert([2, 5])
+ st.insert([3, 4])
+ st.insert([3, 5])
+ st.assign_filtration([0], 1.0)
+ st.assign_filtration([1], 2.0)
+ st.assign_filtration([2], 3.0)
+ st.assign_filtration([3], 4.0)
+ st.assign_filtration([4], 5.0)
+ st.assign_filtration([5], 6.0)
+
+ assert list(st.get_filtration()) == [
+ ([0, 2], 0.0),
+ ([1, 2], 0.0),
+ ([0, 3], 0.0),
+ ([3, 4], 0.0),
+ ([2, 5], 0.0),
+ ([3, 5], 0.0),
+ ([0], 1.0),
+ ([1], 2.0),
+ ([2], 3.0),
+ ([3], 4.0),
+ ([4], 5.0),
+ ([5], 6.0),
]
-
+
st.extend_filtration()
-
- assert list(st.get_filtration()) == [
- ([6], -3.0),
- ([0], -2.0),
- ([1], -1.8),
- ([2], -1.6),
- ([0, 2], -1.6),
- ([1, 2], -1.6),
- ([3], -1.4),
- ([0, 3], -1.4),
- ([4], -1.2),
- ([3, 4], -1.2),
- ([5], -1.0),
- ([2, 5], -1.0),
- ([3, 5], -1.0),
- ([5, 6], 1.0),
- ([4, 6], 1.2),
- ([3, 6], 1.4),
+
+ assert list(st.get_filtration()) == [
+ ([6], -3.0),
+ ([0], -2.0),
+ ([1], -1.8),
+ ([2], -1.6),
+ ([0, 2], -1.6),
+ ([1, 2], -1.6),
+ ([3], -1.4),
+ ([0, 3], -1.4),
+ ([4], -1.2),
+ ([3, 4], -1.2),
+ ([5], -1.0),
+ ([2, 5], -1.0),
+ ([3, 5], -1.0),
+ ([5, 6], 1.0),
+ ([4, 6], 1.2),
+ ([3, 6], 1.4),
([3, 4, 6], 1.4),
- ([3, 5, 6], 1.4),
- ([2, 6], 1.6),
- ([2, 5, 6], 1.6),
- ([1, 6], 1.8),
- ([1, 2, 6], 1.8),
- ([0, 6], 2.0),
- ([0, 2, 6], 2.0),
- ([0, 3, 6], 2.0)
+ ([3, 5, 6], 1.4),
+ ([2, 6], 1.6),
+ ([2, 5, 6], 1.6),
+ ([1, 6], 1.8),
+ ([1, 2, 6], 1.8),
+ ([0, 6], 2.0),
+ ([0, 2, 6], 2.0),
+ ([0, 3, 6], 2.0),
]
- dgms = st.extended_persistence(min_persistence=-1.)
+ dgms = st.extended_persistence(min_persistence=-1.0)
assert len(dgms) == 4
# Sort by (death-birth) descending - we are only interested in those with the longest life span
for idx in range(4):
- dgms[idx] = sorted(dgms[idx], key=lambda x:(-abs(x[1][0]-x[1][1])))
+ dgms[idx] = sorted(dgms[idx], key=lambda x: (-abs(x[1][0] - x[1][1])))
+
+ assert dgms[0][0][1][0] == pytest.approx(2.0)
+ assert dgms[0][0][1][1] == pytest.approx(3.0)
+ assert dgms[1][0][1][0] == pytest.approx(5.0)
+ assert dgms[1][0][1][1] == pytest.approx(4.0)
+ assert dgms[2][0][1][0] == pytest.approx(1.0)
+ assert dgms[2][0][1][1] == pytest.approx(6.0)
+ assert dgms[3][0][1][0] == pytest.approx(6.0)
+ assert dgms[3][0][1][1] == pytest.approx(1.0)
- assert dgms[0][0][1][0] == pytest.approx(2.)
- assert dgms[0][0][1][1] == pytest.approx(3.)
- assert dgms[1][0][1][0] == pytest.approx(5.)
- assert dgms[1][0][1][1] == pytest.approx(4.)
- assert dgms[2][0][1][0] == pytest.approx(1.)
- assert dgms[2][0][1][1] == pytest.approx(6.)
- assert dgms[3][0][1][0] == pytest.approx(6.)
- assert dgms[3][0][1][1] == pytest.approx(1.)
def test_simplices_iterator():
st = SimplexTree()
-
+
assert st.insert([0, 1, 2], filtration=4.0) == True
assert st.insert([2, 3, 4], filtration=2.0) == True
@@ -346,9 +348,10 @@ def test_simplices_iterator():
print("filtration is: ", simplex[1])
assert st.filtration(simplex[0]) == simplex[1]
+
def test_collapse_edges():
st = SimplexTree()
-
+
assert st.insert([0, 1], filtration=1.0) == True
assert st.insert([1, 2], filtration=1.0) == True
assert st.insert([2, 3], filtration=1.0) == True
@@ -360,31 +363,33 @@ def test_collapse_edges():
st.collapse_edges()
assert st.num_simplices() == 9
- assert st.find([0, 2]) == False # [1, 3] would be fine as well
+ assert st.find([0, 2]) == False # [1, 3] would be fine as well
for simplex in st.get_skeleton(0):
- assert simplex[1] == 1.
+ assert simplex[1] == 1.0
+
def test_reset_filtration():
st = SimplexTree()
-
- assert st.insert([0, 1, 2], 3.) == True
- assert st.insert([0, 3], 2.) == True
- assert st.insert([3, 4, 5], 3.) == True
- assert st.insert([0, 1, 6, 7], 4.) == True
+
+ assert st.insert([0, 1, 2], 3.0) == True
+ assert st.insert([0, 3], 2.0) == True
+ assert st.insert([3, 4, 5], 3.0) == True
+ assert st.insert([0, 1, 6, 7], 4.0) == True
# Guaranteed by construction
for simplex in st.get_simplices():
- assert st.filtration(simplex[0]) >= 2.
-
+ assert st.filtration(simplex[0]) >= 2.0
+
# dimension until 5 even if simplex tree is of dimension 3 to test the limits
for dimension in range(5, -1, -1):
- st.reset_filtration(0., dimension)
+ st.reset_filtration(0.0, dimension)
for simplex in st.get_skeleton(3):
print(simplex)
if len(simplex[0]) < (dimension) + 1:
- assert st.filtration(simplex[0]) >= 2.
+ assert st.filtration(simplex[0]) >= 2.0
else:
- assert st.filtration(simplex[0]) == 0.
+ assert st.filtration(simplex[0]) == 0.0
+
def test_boundaries_iterator():
st = SimplexTree()
@@ -400,16 +405,17 @@ def test_boundaries_iterator():
list(st.get_boundaries([]))
with pytest.raises(RuntimeError):
- list(st.get_boundaries([0, 4])) # (0, 4) does not exist
+ list(st.get_boundaries([0, 4])) # (0, 4) does not exist
with pytest.raises(RuntimeError):
- list(st.get_boundaries([6])) # (6) does not exist
+ list(st.get_boundaries([6])) # (6) does not exist
+
def test_persistence_intervals_in_dimension():
# Here is our triangulation of a 2-torus - taken from https://dioscuri-tda.org/Paris_TDA_Tutorial_2021.html
# 0-----3-----4-----0
# | \ | \ | \ | \ |
- # | \ | \ | \| \ |
+ # | \ | \ | \| \ |
# 1-----8-----7-----1
# | \ | \ | \ | \ |
# | \ | \ | \ | \ |
@@ -418,50 +424,52 @@ def test_persistence_intervals_in_dimension():
# | \ | \ | \ | \ |
# 0-----3-----4-----0
st = SimplexTree()
- st.insert([0,1,8])
- st.insert([0,3,8])
- st.insert([3,7,8])
- st.insert([3,4,7])
- st.insert([1,4,7])
- st.insert([0,1,4])
- st.insert([1,2,5])
- st.insert([1,5,8])
- st.insert([5,6,8])
- st.insert([6,7,8])
- st.insert([2,6,7])
- st.insert([1,2,7])
- st.insert([0,2,3])
- st.insert([2,3,5])
- st.insert([3,4,5])
- st.insert([4,5,6])
- st.insert([0,4,6])
- st.insert([0,2,6])
+ st.insert([0, 1, 8])
+ st.insert([0, 3, 8])
+ st.insert([3, 7, 8])
+ st.insert([3, 4, 7])
+ st.insert([1, 4, 7])
+ st.insert([0, 1, 4])
+ st.insert([1, 2, 5])
+ st.insert([1, 5, 8])
+ st.insert([5, 6, 8])
+ st.insert([6, 7, 8])
+ st.insert([2, 6, 7])
+ st.insert([1, 2, 7])
+ st.insert([0, 2, 3])
+ st.insert([2, 3, 5])
+ st.insert([3, 4, 5])
+ st.insert([4, 5, 6])
+ st.insert([0, 4, 6])
+ st.insert([0, 2, 6])
st.compute_persistence(persistence_dim_max=True)
-
+
H0 = st.persistence_intervals_in_dimension(0)
- assert np.array_equal(H0, np.array([[ 0., float("inf")]]))
+ assert np.array_equal(H0, np.array([[0.0, float("inf")]]))
H1 = st.persistence_intervals_in_dimension(1)
- assert np.array_equal(H1, np.array([[ 0., float("inf")], [ 0., float("inf")]]))
+ assert np.array_equal(H1, np.array([[0.0, float("inf")], [0.0, float("inf")]]))
H2 = st.persistence_intervals_in_dimension(2)
- assert np.array_equal(H2, np.array([[ 0., float("inf")]]))
+ assert np.array_equal(H2, np.array([[0.0, float("inf")]]))
# Test empty case
assert st.persistence_intervals_in_dimension(3).shape == (0, 2)
+
def test_equality_operator():
st1 = SimplexTree()
st2 = SimplexTree()
assert st1 == st2
- st1.insert([1,2,3], 4.)
+ st1.insert([1, 2, 3], 4.0)
assert st1 != st2
- st2.insert([1,2,3], 4.)
+ st2.insert([1, 2, 3], 4.0)
assert st1 == st2
+
def test_simplex_tree_deep_copy():
st = SimplexTree()
- st.insert([1, 2, 3], 0.)
+ st.insert([1, 2, 3], 0.0)
# compute persistence only on the original
st.compute_persistence()
@@ -480,14 +488,15 @@ def test_simplex_tree_deep_copy():
for a_splx in a_filt_list:
assert a_splx in st_filt_list
-
+
# test double free
del st
del st_copy
+
def test_simplex_tree_deep_copy_constructor():
st = SimplexTree()
- st.insert([1, 2, 3], 0.)
+ st.insert([1, 2, 3], 0.0)
# compute persistence only on the original
st.compute_persistence()
@@ -506,56 +515,132 @@ def test_simplex_tree_deep_copy_constructor():
for a_splx in a_filt_list:
assert a_splx in st_filt_list
-
+
# test double free
del st
del st_copy
+
def test_simplex_tree_constructor_exception():
with pytest.raises(TypeError):
- st = SimplexTree(other = "Construction from a string shall raise an exception")
+ st = SimplexTree(other="Construction from a string shall raise an exception")
+
+
+def test_create_from_array():
+ a = np.array([[1, 4, 13, 6], [4, 3, 11, 5], [13, 11, 10, 12], [6, 5, 12, 2]])
+ st = SimplexTree.create_from_array(a, max_filtration=5.0)
+ assert list(st.get_filtration()) == [([0], 1.0), ([3], 2.0), ([1], 3.0), ([0, 1], 4.0), ([1, 3], 5.0)]
+
+
+def test_insert_edges_from_coo_matrix():
+ try:
+ from scipy.sparse import coo_matrix
+ from scipy.spatial import cKDTree
+ except ImportError:
+ print("Skipping, no SciPy")
+ return
+
+ st = SimplexTree()
+ st.insert([1, 2, 7], 7)
+ row = np.array([2, 5, 3])
+ col = np.array([1, 4, 6])
+ dat = np.array([1, 2, 3])
+ edges = coo_matrix((dat, (row, col)))
+ st.insert_edges_from_coo_matrix(edges)
+ assert list(st.get_filtration()) == [
+ ([1], 1.0),
+ ([2], 1.0),
+ ([1, 2], 1.0),
+ ([4], 2.0),
+ ([5], 2.0),
+ ([4, 5], 2.0),
+ ([3], 3.0),
+ ([6], 3.0),
+ ([3, 6], 3.0),
+ ([7], 7.0),
+ ([1, 7], 7.0),
+ ([2, 7], 7.0),
+ ([1, 2, 7], 7.0),
+ ]
+
+ pts = np.random.rand(100, 2)
+ tree = cKDTree(pts)
+ edges = tree.sparse_distance_matrix(tree, max_distance=0.15, output_type="coo_matrix")
+ st = SimplexTree()
+ st.insert_edges_from_coo_matrix(edges)
+ assert 100 < st.num_simplices() < 1000
+
+
+def test_insert_batch():
+ st = SimplexTree()
+ # vertices
+ st.insert_batch(np.array([[6, 1, 5]]), np.array([-5.0, 2.0, -3.0]))
+ # triangles
+ st.insert_batch(np.array([[2, 10], [5, 0], [6, 11]]), np.array([4.0, 0.0]))
+ # edges
+ st.insert_batch(np.array([[1, 5], [2, 5]]), np.array([1.0, 3.0]))
+
+ assert list(st.get_filtration()) == [
+ ([6], -5.0),
+ ([5], -3.0),
+ ([0], 0.0),
+ ([10], 0.0),
+ ([0, 10], 0.0),
+ ([11], 0.0),
+ ([0, 11], 0.0),
+ ([10, 11], 0.0),
+ ([0, 10, 11], 0.0),
+ ([1], 1.0),
+ ([2], 1.0),
+ ([1, 2], 1.0),
+ ([2, 5], 4.0),
+ ([2, 6], 4.0),
+ ([5, 6], 4.0),
+ ([2, 5, 6], 4.0),
+ ]
+
def test_expansion_with_blocker():
- st=SimplexTree()
- st.insert([0,1],0)
- st.insert([0,2],1)
- st.insert([0,3],2)
- st.insert([1,2],3)
- st.insert([1,3],4)
- st.insert([2,3],5)
- st.insert([2,4],6)
- st.insert([3,6],7)
- st.insert([4,5],8)
- st.insert([4,6],9)
- st.insert([5,6],10)
- st.insert([6],10)
+ st = SimplexTree()
+ st.insert([0, 1], 0)
+ st.insert([0, 2], 1)
+ st.insert([0, 3], 2)
+ st.insert([1, 2], 3)
+ st.insert([1, 3], 4)
+ st.insert([2, 3], 5)
+ st.insert([2, 4], 6)
+ st.insert([3, 6], 7)
+ st.insert([4, 5], 8)
+ st.insert([4, 6], 9)
+ st.insert([5, 6], 10)
+ st.insert([6], 10)
def blocker(simplex):
try:
# Block all simplices that contain vertex 6
simplex.index(6)
- print(simplex, ' is blocked')
+ print(simplex, " is blocked")
return True
except ValueError:
- print(simplex, ' is accepted')
- st.assign_filtration(simplex, st.filtration(simplex) + 1.)
+ print(simplex, " is accepted")
+ st.assign_filtration(simplex, st.filtration(simplex) + 1.0)
return False
st.expansion_with_blocker(2, blocker)
assert st.num_simplices() == 22
assert st.dimension() == 2
- assert st.find([4,5,6]) == False
- assert st.filtration([0,1,2]) == 4.
- assert st.filtration([0,1,3]) == 5.
- assert st.filtration([0,2,3]) == 6.
- assert st.filtration([1,2,3]) == 6.
+ assert st.find([4, 5, 6]) == False
+ assert st.filtration([0, 1, 2]) == 4.0
+ assert st.filtration([0, 1, 3]) == 5.0
+ assert st.filtration([0, 2, 3]) == 6.0
+ assert st.filtration([1, 2, 3]) == 6.0
st.expansion_with_blocker(3, blocker)
assert st.num_simplices() == 23
assert st.dimension() == 3
- assert st.find([4,5,6]) == False
- assert st.filtration([0,1,2]) == 4.
- assert st.filtration([0,1,3]) == 5.
- assert st.filtration([0,2,3]) == 6.
- assert st.filtration([1,2,3]) == 6.
- assert st.filtration([0,1,2,3]) == 7.
+ assert st.find([4, 5, 6]) == False
+ assert st.filtration([0, 1, 2]) == 4.0
+ assert st.filtration([0, 1, 3]) == 5.0
+ assert st.filtration([0, 2, 3]) == 6.0
+ assert st.filtration([1, 2, 3]) == 6.0
+ assert st.filtration([0, 1, 2, 3]) == 7.0
diff --git a/src/python/test/test_wasserstein_distance.py b/src/python/test/test_wasserstein_distance.py
index 3a004d77..a76b6ce7 100755
--- a/src/python/test/test_wasserstein_distance.py
+++ b/src/python/test/test_wasserstein_distance.py
@@ -90,10 +90,11 @@ def test_get_essential_parts():
def test_warn_infty():
- assert _warn_infty(matching=False)==np.inf
- c, m = _warn_infty(matching=True)
- assert (c == np.inf)
- assert (m is None)
+ with pytest.warns(UserWarning):
+ assert _warn_infty(matching=False)==np.inf
+ c, m = _warn_infty(matching=True)
+ assert (c == np.inf)
+ assert (m is None)
def _basic_wasserstein(wasserstein_distance, delta, test_infinity=True, test_matching=True):