summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authortlacombe <lacombe1993@gmail.com>2020-02-14 11:24:17 +0100
committertlacombe <lacombe1993@gmail.com>2020-02-14 11:24:17 +0100
commit5ad0b45a5b5387f49fe72843a74e96a346c6d6fe (patch)
tree69b389d16485c4b3e5187b14d312843b7e2dec93 /src
parent6a6bed7ca21c1ffcf6de9ed09c2a6512ecb66585 (diff)
parentbed30b19e57669c0b8ad385f1124586ed3499a2d (diff)
update from upstream/master and merge CMakeLists
Diffstat (limited to 'src')
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h14
-rw-r--r--src/Alpha_complex/doc/alpha_complex_representation.ipe6
-rw-r--r--src/Alpha_complex/doc/alpha_complex_representation.pngbin14606 -> 19568 bytes
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h18
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp60
-rw-r--r--src/Bottleneck_distance/include/gudhi/Persistence_graph.h2
-rw-r--r--src/Bottleneck_distance/test/bottleneck_unit_test.cpp37
-rw-r--r--src/Nerve_GIC/include/gudhi/GIC.h10
-rw-r--r--src/Persistence_representations/include/gudhi/Persistence_intervals.h5
-rw-r--r--src/Persistence_representations/test/persistence_intervals_test.cpp211
-rw-r--r--src/Simplex_tree/example/CMakeLists.txt3
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h2
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h4
-rw-r--r--src/Witness_complex/example/CMakeLists.txt10
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake4
-rw-r--r--src/cmake/modules/GUDHI_user_version_target.cmake7
-rw-r--r--src/common/doc/header.html2
-rw-r--r--src/common/doc/installation.h2
-rw-r--r--src/common/doc/main_page.md276
-rw-r--r--src/common/include/gudhi/Unitary_tests_utils.h11
-rw-r--r--src/python/CMakeLists.txt81
-rw-r--r--src/python/doc/_templates/layout.html2
-rw-r--r--src/python/doc/alpha_complex_sum.inc15
-rw-r--r--src/python/doc/alpha_complex_user.rst19
-rw-r--r--src/python/doc/cubical_complex_user.rst3
-rw-r--r--src/python/doc/diagram_readers_ref.rst (renamed from src/python/doc/reader_utils_ref.rst)10
-rw-r--r--src/python/doc/index.rst44
-rw-r--r--src/python/doc/installation.rst12
-rw-r--r--src/python/doc/persistence_graphical_tools_user.rst2
-rw-r--r--src/python/doc/point_cloud.rst22
-rw-r--r--src/python/doc/point_cloud_sum.inc15
-rw-r--r--src/python/doc/rips_complex_user.rst3
-rw-r--r--src/python/doc/wasserstein_distance_sum.inc6
-rw-r--r--src/python/doc/wasserstein_distance_user.rst19
-rw-r--r--src/python/doc/witness_complex_user.rst2
-rwxr-xr-xsrc/python/example/alpha_complex_from_points_example.py7
-rwxr-xr-xsrc/python/example/alpha_rips_persistence_bottleneck_distance.py2
-rwxr-xr-xsrc/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py2
-rwxr-xr-xsrc/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py2
-rwxr-xr-xsrc/python/example/plot_rips_complex.py2
-rwxr-xr-xsrc/python/example/rips_complex_diagram_persistence_from_off_file_example.py2
-rw-r--r--src/python/gudhi/__init__.py.in8
-rw-r--r--src/python/gudhi/alpha_complex.pyx35
-rw-r--r--src/python/gudhi/bottleneck.pyx10
-rw-r--r--src/python/gudhi/cubical_complex.pyx44
-rw-r--r--src/python/gudhi/euclidean_strong_witness_complex.pyx20
-rw-r--r--src/python/gudhi/euclidean_witness_complex.pyx20
-rw-r--r--src/python/gudhi/hera.cc71
-rw-r--r--src/python/gudhi/nerve_gic.pyx30
-rw-r--r--src/python/gudhi/off_reader.pyx16
-rw-r--r--src/python/gudhi/periodic_cubical_complex.pyx58
-rw-r--r--src/python/gudhi/persistence_graphical_tools.py14
-rw-r--r--src/python/gudhi/reader_utils.pyx34
-rw-r--r--src/python/gudhi/rips_complex.pyx20
-rw-r--r--src/python/gudhi/simplex_tree.pxd21
-rw-r--r--src/python/gudhi/simplex_tree.pyx10
-rw-r--r--src/python/gudhi/strong_witness_complex.pyx20
-rw-r--r--src/python/gudhi/subsampling.pyx38
-rw-r--r--src/python/gudhi/tangential_complex.pyx20
-rw-r--r--src/python/gudhi/wasserstein.py61
-rw-r--r--src/python/gudhi/witness_complex.pyx20
-rw-r--r--src/python/include/Alpha_complex_interface.h13
-rw-r--r--src/python/setup.py.in31
-rwxr-xr-xsrc/python/test/test_alpha_complex.py66
-rwxr-xr-xsrc/python/test/test_bottleneck_distance.py4
-rwxr-xr-xsrc/python/test/test_cover_complex.py4
-rwxr-xr-xsrc/python/test/test_cubical_complex.py63
-rwxr-xr-xsrc/python/test/test_euclidean_witness_complex.py4
-rwxr-xr-xsrc/python/test/test_reader_utils.py6
-rwxr-xr-xsrc/python/test/test_representations.py15
-rwxr-xr-xsrc/python/test/test_rips_complex.py6
-rwxr-xr-xsrc/python/test/test_simplex_tree.py4
-rwxr-xr-xsrc/python/test/test_subsampling.py4
-rwxr-xr-xsrc/python/test/test_tangential_complex.py4
-rwxr-xr-xsrc/python/test/test_wasserstein_distance.py73
-rwxr-xr-xsrc/python/test/test_witness_complex.py4
76 files changed, 1042 insertions, 785 deletions
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
index 3c32a1e6..a8b1a106 100644
--- a/src/Alpha_complex/doc/Intro_alpha_complex.h
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -31,8 +31,8 @@ namespace alpha_complex {
* circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
* values of the codimension 1 cofaces that make it not Gabriel otherwise.
*
- * All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into
- * the complex.
+ * All simplices that have a filtration value \f$ > \alpha^2 \f$ are removed from the Delaunay complex
+ * when creating the simplicial complex if it is specified.
*
* \image html "alpha_complex_representation.png" "Alpha-complex representation"
*
@@ -46,8 +46,8 @@ namespace alpha_complex {
* \cite cgal:s-gkd-19b from CGAL as template parameter.
*
* \remark
- * - When the simplicial complex is constructed with an infinite value of alpha, the complex is a Delaunay
- * complex.
+ * - When an \f$\alpha\f$-complex is constructed with an infinite value of \f$ \alpha^2 \f$, the complex is a Delaunay
+ * complex (with special filtration values).
* - For people only interested in the topology of the \ref alpha_complex (for instance persistence),
* \ref alpha_complex is equivalent to the \ref cech_complex and much smaller if you do not bound the radii.
* \ref cech_complex can still make sense in higher dimension precisely because you can bound the radii.
@@ -135,13 +135,13 @@ namespace alpha_complex {
*
* \subsubsection nondecreasing Non decreasing filtration values
*
- * As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not
- * quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
+ * As the squared radii computed by CGAL are an approximation, it might happen that these \f$ \alpha^2 \f$ values do
+ * not quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
* We fix that up by calling `SimplicialComplexForAlpha::make_filtration_non_decreasing()`.
*
* \subsubsection pruneabove Prune above given filtration value
*
- * The simplex tree is pruned from the given maximum alpha squared value (cf.
+ * The simplex tree is pruned from the given maximum \f$ \alpha^2 \f$ value (cf.
* `SimplicialComplexForAlpha::prune_above_filtration()`).
* In the following example, the value is given by the user as argument of the program.
*
diff --git a/src/Alpha_complex/doc/alpha_complex_representation.ipe b/src/Alpha_complex/doc/alpha_complex_representation.ipe
index e8096b93..40ff1d0f 100644
--- a/src/Alpha_complex/doc/alpha_complex_representation.ipe
+++ b/src/Alpha_complex/doc/alpha_complex_representation.ipe
@@ -1,7 +1,7 @@
<?xml version="1.0"?>
<!DOCTYPE ipe SYSTEM "ipe.dtd">
-<ipe version="70107" creator="Ipe 7.1.10">
-<info created="D:20150603143945" modified="D:20160404172133"/>
+<ipe version="70206" creator="Ipe 7.2.7">
+<info created="D:20150603143945" modified="D:20200110100102"/>
<ipestyle name="basic">
<symbol name="arrow/arc(spx)">
<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
@@ -305,7 +305,7 @@ h
108.275 743.531 m
166.45 743.531 l
</path>
-<text matrix="1 0 0 1 142.618 -109.867" transformations="translations" pos="127.397 746.763" stroke="darkgray" type="label" width="6.41" height="4.289" depth="0" valign="baseline">$\alpha$</text>
+<text matrix="1 0 0 1 126.618 -109.867" transformations="translations" pos="127.397 746.763" stroke="darkgray" type="label" width="45.707" height="9.041" depth="1.32" valign="baseline" style="math">\alpha = \sqrt{32.0}</text>
<use matrix="1 0 0 1 -209.478 12.0238" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
diff --git a/src/Alpha_complex/doc/alpha_complex_representation.png b/src/Alpha_complex/doc/alpha_complex_representation.png
index 7b81cd69..5ebb1e75 100644
--- a/src/Alpha_complex/doc/alpha_complex_representation.png
+++ b/src/Alpha_complex/doc/alpha_complex_representation.png
Binary files differ
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index 6b4d8463..f2a05e95 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -121,8 +121,8 @@ class Alpha_complex {
// size_type type from CGAL.
typedef typename Delaunay_triangulation::size_type size_type;
- // Map type to switch from simplex tree vertex handle to CGAL vertex iterator.
- typedef typename std::map< std::size_t, CGAL_vertex_iterator > Vector_vertex_iterator;
+ // Structure to switch from simplex tree vertex handle to CGAL vertex iterator.
+ typedef typename std::vector< CGAL_vertex_iterator > Vector_vertex_iterator;
private:
/** \brief Vertex iterator vector to switch from simplex tree vertex handle to CGAL vertex iterator.
@@ -191,14 +191,6 @@ class Alpha_complex {
return vertex_handle_to_iterator_.at(vertex)->point();
}
- /** \brief number_of_vertices returns the number of vertices (same as the number of points).
- *
- * @return The number of vertices.
- */
- std::size_t number_of_vertices() const {
- return vertex_handle_to_iterator_.size();
- }
-
private:
template<typename InputPointRange >
void init_from_range(const InputPointRange& points) {
@@ -238,14 +230,16 @@ class Alpha_complex {
hint = pos->full_cell();
}
// --------------------------------------------------------------------------------------------
- // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
+ // structure to retrieve CGAL points from vertex handle - one vertex handle per point.
+ // Needs to be constructed before as vertex handles arrives in no particular order.
+ vertex_handle_to_iterator_.resize(point_cloud.size());
// Loop on triangulation vertices list
for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
if (!triangulation_->is_infinite(*vit)) {
#ifdef DEBUG_TRACES
std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
#endif // DEBUG_TRACES
- vertex_handle_to_iterator_.emplace(vit->data(), vit);
+ vertex_handle_to_iterator_[vit->data()] = vit;
}
}
// --------------------------------------------------------------------------------------------
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 40b3fe09..27b671dd 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -53,20 +53,12 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of
Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_file(off_file_name);
- std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_file.number_of_vertices()
- << std::endl;
- BOOST_CHECK(alpha_complex_from_file.number_of_vertices() == 7);
-
Gudhi::Simplex_tree<> simplex_tree_60;
BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
std::cout << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
BOOST_CHECK(simplex_tree_60.dimension() == 2);
- std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_file.number_of_vertices()
- << std::endl;
- BOOST_CHECK(alpha_complex_from_file.number_of_vertices() == 7);
-
std::cout << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
@@ -128,10 +120,6 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
Gudhi::Simplex_tree<> simplex_tree;
BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
- std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_points.number_of_vertices()
- << std::endl;
- BOOST_CHECK(alpha_complex_from_points.number_of_vertices() == points.size());
-
// Another way to check num_simplices
std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
@@ -151,7 +139,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
BOOST_CHECK(simplex_tree.dimension() == 3);
std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
- BOOST_CHECK(simplex_tree.num_vertices() == 4);
+ BOOST_CHECK(simplex_tree.num_vertices() == points.size());
for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
switch (simplex_tree.dimension(f_simplex)) {
@@ -261,10 +249,6 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, lis
Gudhi::Simplex_tree<> simplex_tree;
BOOST_CHECK(!alpha_complex_from_points.create_complex(simplex_tree));
- std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_points.number_of_vertices()
- << std::endl;
- BOOST_CHECK(alpha_complex_from_points.number_of_vertices() == points.size());
-
std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree.num_simplices() == 0);
@@ -272,5 +256,45 @@ BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, lis
BOOST_CHECK(simplex_tree.dimension() == -1);
std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
- BOOST_CHECK(simplex_tree.num_vertices() == 0);
+ BOOST_CHECK(simplex_tree.num_vertices() == points.size());
+}
+
+using Inexact_kernel_2 = CGAL::Epick_d< CGAL::Dimension_tag<2> >;
+using Exact_kernel_2 = CGAL::Epeck_d< CGAL::Dimension_tag<2> >;
+using list_of_kernel_2_variants = boost::mpl::list<Inexact_kernel_2, Exact_kernel_2>;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_with_duplicated_points, TestedKernel, list_of_kernel_2_variants) {
+ std::cout << "========== Alpha_complex_with_duplicated_points ==========" << std::endl;
+
+ using Point = typename TestedKernel::Point_d;
+ using Vector_of_points = std::vector<Point>;
+
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+ points.push_back(Point(1.0, 1.0));
+ points.push_back(Point(7.0, 0.0));
+ points.push_back(Point(4.0, 6.0));
+ points.push_back(Point(9.0, 6.0));
+ points.push_back(Point(0.0, 14.0));
+ points.push_back(Point(2.0, 19.0));
+ points.push_back(Point(9.0, 17.0));
+ // duplicated points
+ points.push_back(Point(1.0, 1.0));
+ points.push_back(Point(7.0, 0.0));
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ std::cout << "Init" << std::endl;
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
+
+ Gudhi::Simplex_tree<> simplex_tree;
+ std::cout << "create_complex" << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices()
+ << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() < points.size());
}
diff --git a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
index f791e37c..e1e3522e 100644
--- a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
+++ b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
@@ -25,7 +25,7 @@ namespace Gudhi {
namespace persistence_diagram {
-/** \internal \brief Structure representing an euclidean bipartite graph containing
+/** \internal \brief Structure representing a Euclidean bipartite graph containing
* the points from the two persistence diagrams (including the projections).
*
* \ingroup bottleneck_distance
diff --git a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
index 3fc6fc7b..2c520045 100644
--- a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
+++ b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
@@ -15,6 +15,7 @@
#include <random>
#include <gudhi/Bottleneck.h>
+#include <gudhi/Unitary_tests_utils.h>
using namespace Gudhi::persistence_diagram;
@@ -59,24 +60,24 @@ BOOST_AUTO_TEST_CASE(persistence_graph) {
BOOST_CHECK(g.size() == (n1 + n2));
//
BOOST_CHECK((int) d.size() == (n1 + n2)*(n1 + n2) + n1 + n2 + 1);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, 0)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n1 - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n2 - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, n2)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(0, (n1 + n2) - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, 0)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n1 - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n2 - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, n2)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance(n1, (n1 + n2) - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, 0)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n1 - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n2 - 1)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, n2)) > 0);
- BOOST_CHECK(std::count(d.begin(), d.end(), g.distance((n1 + n2) - 1, (n1 + n2) - 1)) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(0, 0))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(0, n1 - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(0, n1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(0, n2 - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(0, n2))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(0, (n1 + n2) - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(n1, 0))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(n1, n1 - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(n1, n1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(n1, n2 - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(n1, n2))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance(n1, (n1 + n2) - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance((n1 + n2) - 1, 0))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance((n1 + n2) - 1, n1 - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance((n1 + n2) - 1, n1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance((n1 + n2) - 1, n2 - 1))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance((n1 + n2) - 1, n2))) > 0);
+ BOOST_CHECK(std::count(d.begin(), d.end(), GUDHI_PROTECT_FLOAT(g.distance((n1 + n2) - 1, (n1 + n2) - 1))) > 0);
}
BOOST_AUTO_TEST_CASE(neighbors_finder) {
diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h
index b8169c59..2a6d4788 100644
--- a/src/Nerve_GIC/include/gudhi/GIC.h
+++ b/src/Nerve_GIC/include/gudhi/GIC.h
@@ -14,7 +14,7 @@
#ifdef GUDHI_USE_TBB
#include <tbb/parallel_for.h>
-#include <tbb/mutex.h>
+#include <mutex>
#endif
#include <gudhi/Debug_utils.h>
@@ -459,7 +459,7 @@ class Cover_complex {
// This cannot be parallelized if thread_local is not defined
// thread_local is not defined for XCode < v.8
#if defined(GUDHI_USE_TBB) && defined(GUDHI_CAN_USE_CXX11_THREAD_LOCAL)
- tbb::mutex deltamutex;
+ std::mutex deltamutex;
tbb::parallel_for(0, N, [&](int i){
std::vector<int> samples(m);
SampleWithoutReplacement(n, m, samples);
@@ -707,7 +707,7 @@ class Cover_complex {
// Sort points according to function values
std::vector<int> points(n);
for (int i = 0; i < n; i++) points[i] = i;
- std::sort(points.begin(), points.end(), [=](const int & p1, const int & p2){return (this->func[p1] < this->func[p2]);});
+ std::sort(points.begin(), points.end(), [this](int p1, int p2){return (this->func[p1] < this->func[p2]);});
int id = 0;
int pos = 0;
@@ -765,7 +765,7 @@ class Cover_complex {
#ifdef GUDHI_USE_TBB
if (verbose) std::cout << "Computing connected components (parallelized)..." << std::endl;
- tbb::mutex covermutex, idmutex;
+ std::mutex covermutex, idmutex;
tbb::parallel_for(0, res, [&](int i){
// Compute connected components
Graph G = one_skeleton.create_subgraph();
@@ -895,7 +895,7 @@ class Cover_complex {
// Compute the geodesic distances to subsamples with Dijkstra
#ifdef GUDHI_USE_TBB
if (verbose) std::cout << "Computing geodesic distances (parallelized)..." << std::endl;
- tbb::mutex coverMutex; tbb::mutex mindistMutex;
+ std::mutex coverMutex; std::mutex mindistMutex;
tbb::parallel_for(0, m, [&](int i){
int seed = voronoi_subsamples[i];
std::vector<double> dmap(n);
diff --git a/src/Persistence_representations/include/gudhi/Persistence_intervals.h b/src/Persistence_representations/include/gudhi/Persistence_intervals.h
index e2db4572..ea4220ea 100644
--- a/src/Persistence_representations/include/gudhi/Persistence_intervals.h
+++ b/src/Persistence_representations/include/gudhi/Persistence_intervals.h
@@ -6,6 +6,8 @@
*
* Modification(s):
* - YYYY/MM Author: Description of the modification
+ * - 2019/12 Vincent Rouvreau: Fix #118 - Make histogram_of_lengths and cumulative_histogram_of_lengths
+ * return the exact number_of_bins (was failing on x86)
*/
#ifndef PERSISTENCE_INTERVALS_H_
@@ -335,6 +337,9 @@ std::vector<size_t> Persistence_intervals::histogram_of_lengths(size_t number_of
getchar();
}
}
+ // we want number of bins equals to number_of_bins (some unexpected results on x86)
+ result[number_of_bins-1]+=result[number_of_bins];
+ result.resize(number_of_bins);
if (dbg) {
for (size_t i = 0; i != result.size(); ++i) std::cerr << result[i] << std::endl;
diff --git a/src/Persistence_representations/test/persistence_intervals_test.cpp b/src/Persistence_representations/test/persistence_intervals_test.cpp
index 3b7a2049..02ea8edb 100644
--- a/src/Persistence_representations/test/persistence_intervals_test.cpp
+++ b/src/Persistence_representations/test/persistence_intervals_test.cpp
@@ -6,6 +6,8 @@
*
* Modification(s):
* - YYYY/MM Author: Description of the modification
+ * - 2019/12 Vincent Rouvreau: Fix #118 - Make histogram_of_lengths and cumulative_histogram_of_lengths
+ * return the exact number_of_bins (was failing on x86)
*/
#define BOOST_TEST_DYN_LINK
@@ -32,17 +34,8 @@ BOOST_AUTO_TEST_CASE(check_min_max_function) {
BOOST_AUTO_TEST_CASE(check_length_of_dominant_intervals) {
Persistence_intervals p("data/file_with_diagram");
std::vector<double> dominant_ten_intervals_length = p.length_of_dominant_intervals(10);
- std::vector<double> dominant_intervals_length;
- dominant_intervals_length.push_back(0.862625);
- dominant_intervals_length.push_back(0.800893);
- dominant_intervals_length.push_back(0.762061);
- dominant_intervals_length.push_back(0.756501);
- dominant_intervals_length.push_back(0.729367);
- dominant_intervals_length.push_back(0.718177);
- dominant_intervals_length.push_back(0.708395);
- dominant_intervals_length.push_back(0.702844);
- dominant_intervals_length.push_back(0.700468);
- dominant_intervals_length.push_back(0.622177);
+ std::vector<double> dominant_intervals_length{0.862625, 0.800893, 0.762061, 0.756501, 0.729367,
+ 0.718177, 0.708395, 0.702844, 0.700468, 0.622177};
for (size_t i = 0; i != dominant_ten_intervals_length.size(); ++i) {
GUDHI_TEST_FLOAT_EQUALITY_CHECK(dominant_ten_intervals_length[i], dominant_intervals_length[i],
Gudhi::Persistence_representations::epsi);
@@ -52,17 +45,11 @@ BOOST_AUTO_TEST_CASE(check_dominant_intervals) {
Persistence_intervals p("data/file_with_diagram");
std::vector<std::pair<double, double> > ten_dominant_intervals = p.dominant_intervals(10);
- std::vector<std::pair<double, double> > templ;
- templ.push_back(std::pair<double, double>(0.114718, 0.977343));
- templ.push_back(std::pair<double, double>(0.133638, 0.93453));
- templ.push_back(std::pair<double, double>(0.104599, 0.866659));
- templ.push_back(std::pair<double, double>(0.149798, 0.906299));
- templ.push_back(std::pair<double, double>(0.247352, 0.976719));
- templ.push_back(std::pair<double, double>(0.192675, 0.910852));
- templ.push_back(std::pair<double, double>(0.191836, 0.900231));
- templ.push_back(std::pair<double, double>(0.284998, 0.987842));
- templ.push_back(std::pair<double, double>(0.294069, 0.994537));
- templ.push_back(std::pair<double, double>(0.267421, 0.889597));
+ std::vector<std::pair<double, double> > templ{ {0.114718, 0.977343}, {0.133638, 0.93453},
+ {0.104599, 0.866659}, {0.149798, 0.906299},
+ {0.247352, 0.976719}, {0.192675, 0.910852},
+ {0.191836, 0.900231}, {0.284998, 0.987842},
+ {0.294069, 0.994537}, {0.267421, 0.889597} };
for (size_t i = 0; i != ten_dominant_intervals.size(); ++i) {
GUDHI_TEST_FLOAT_EQUALITY_CHECK(ten_dominant_intervals[i].first, templ[i].first,
@@ -75,18 +62,7 @@ BOOST_AUTO_TEST_CASE(check_dominant_intervals) {
BOOST_AUTO_TEST_CASE(check_histogram_of_lengths) {
Persistence_intervals p("data/file_with_diagram");
std::vector<size_t> histogram = p.histogram_of_lengths(10);
- std::vector<size_t> template_histogram;
- template_histogram.push_back(10);
- template_histogram.push_back(5);
- template_histogram.push_back(3);
- template_histogram.push_back(4);
- template_histogram.push_back(4);
- template_histogram.push_back(3);
- template_histogram.push_back(6);
- template_histogram.push_back(1);
- template_histogram.push_back(7);
- template_histogram.push_back(1);
- template_histogram.push_back(1);
+ std::vector<size_t> template_histogram{10, 5, 3, 4, 4, 3, 6, 1, 7, 2};
for (size_t i = 0; i != histogram.size(); ++i) {
BOOST_CHECK(histogram[i] == template_histogram[i]);
}
@@ -95,18 +71,7 @@ BOOST_AUTO_TEST_CASE(check_histogram_of_lengths) {
BOOST_AUTO_TEST_CASE(check_cumulative_histograms_of_lengths) {
Persistence_intervals p("data/file_with_diagram");
std::vector<size_t> cumulative_histogram = p.cumulative_histogram_of_lengths(10);
- std::vector<size_t> template_cumulative_histogram;
- template_cumulative_histogram.push_back(10);
- template_cumulative_histogram.push_back(15);
- template_cumulative_histogram.push_back(18);
- template_cumulative_histogram.push_back(22);
- template_cumulative_histogram.push_back(26);
- template_cumulative_histogram.push_back(29);
- template_cumulative_histogram.push_back(35);
- template_cumulative_histogram.push_back(36);
- template_cumulative_histogram.push_back(43);
- template_cumulative_histogram.push_back(44);
- template_cumulative_histogram.push_back(45);
+ std::vector<size_t> template_cumulative_histogram{10, 15, 18, 22, 26, 29, 35, 36, 43, 45};
for (size_t i = 0; i != cumulative_histogram.size(); ++i) {
BOOST_CHECK(cumulative_histogram[i] == template_cumulative_histogram[i]);
@@ -116,17 +81,8 @@ BOOST_AUTO_TEST_CASE(check_characteristic_function_of_diagram) {
Persistence_intervals p("data/file_with_diagram");
std::pair<double, double> min_max_ = p.get_x_range();
std::vector<double> char_funct_diag = p.characteristic_function_of_diagram(min_max_.first, min_max_.second);
- std::vector<double> template_char_funct_diag;
- template_char_funct_diag.push_back(0.370665);
- template_char_funct_diag.push_back(0.84058);
- template_char_funct_diag.push_back(1.24649);
- template_char_funct_diag.push_back(1.3664);
- template_char_funct_diag.push_back(1.34032);
- template_char_funct_diag.push_back(1.31904);
- template_char_funct_diag.push_back(1.14076);
- template_char_funct_diag.push_back(0.991259);
- template_char_funct_diag.push_back(0.800714);
- template_char_funct_diag.push_back(0.0676303);
+ std::vector<double> template_char_funct_diag{0.370665, 0.84058, 1.24649, 1.3664, 1.34032,
+ 1.31904, 1.14076, 0.991259, 0.800714, 0.0676303};
for (size_t i = 0; i != char_funct_diag.size(); ++i) {
GUDHI_TEST_FLOAT_EQUALITY_CHECK(char_funct_diag[i], template_char_funct_diag[i],
@@ -139,18 +95,8 @@ BOOST_AUTO_TEST_CASE(check_cumulative_characteristic_function_of_diagram) {
std::pair<double, double> min_max_ = p.get_x_range();
std::vector<double> cumul_char_funct_diag =
p.cumulative_characteristic_function_of_diagram(min_max_.first, min_max_.second);
- std::vector<double> template_char_funct_diag_cumul;
-
- template_char_funct_diag_cumul.push_back(0.370665);
- template_char_funct_diag_cumul.push_back(1.21125);
- template_char_funct_diag_cumul.push_back(2.45774);
- template_char_funct_diag_cumul.push_back(3.82414);
- template_char_funct_diag_cumul.push_back(5.16446);
- template_char_funct_diag_cumul.push_back(6.4835);
- template_char_funct_diag_cumul.push_back(7.62426);
- template_char_funct_diag_cumul.push_back(8.61552);
- template_char_funct_diag_cumul.push_back(9.41623);
- template_char_funct_diag_cumul.push_back(9.48386);
+ std::vector<double> template_char_funct_diag_cumul{0.370665, 1.21125, 2.45774, 3.82414, 5.16446,
+ 6.4835, 7.62426, 8.61552, 9.41623, 9.48386};
for (size_t i = 0; i != cumul_char_funct_diag.size(); ++i) {
GUDHI_TEST_FLOAT_EQUALITY_CHECK(cumul_char_funct_diag[i], template_char_funct_diag_cumul[i],
@@ -160,97 +106,29 @@ BOOST_AUTO_TEST_CASE(check_cumulative_characteristic_function_of_diagram) {
BOOST_AUTO_TEST_CASE(check_compute_persistent_betti_numbers) {
Persistence_intervals p("data/file_with_diagram");
- std::vector<std::pair<double, size_t> > pbns;
- pbns.push_back(std::pair<double, size_t>(0.0290362, 1));
- pbns.push_back(std::pair<double, size_t>(0.0307676, 2));
- pbns.push_back(std::pair<double, size_t>(0.0366312, 3));
- pbns.push_back(std::pair<double, size_t>(0.0544614, 4));
- pbns.push_back(std::pair<double, size_t>(0.0920033, 5));
- pbns.push_back(std::pair<double, size_t>(0.104599, 6));
- pbns.push_back(std::pair<double, size_t>(0.114718, 7));
- pbns.push_back(std::pair<double, size_t>(0.117379, 8));
- pbns.push_back(std::pair<double, size_t>(0.123493, 9));
- pbns.push_back(std::pair<double, size_t>(0.133638, 10));
- pbns.push_back(std::pair<double, size_t>(0.137798, 9));
- pbns.push_back(std::pair<double, size_t>(0.149798, 10));
- pbns.push_back(std::pair<double, size_t>(0.155421, 11));
- pbns.push_back(std::pair<double, size_t>(0.158443, 12));
- pbns.push_back(std::pair<double, size_t>(0.176956, 13));
- pbns.push_back(std::pair<double, size_t>(0.183234, 12));
- pbns.push_back(std::pair<double, size_t>(0.191069, 13));
- pbns.push_back(std::pair<double, size_t>(0.191333, 14));
- pbns.push_back(std::pair<double, size_t>(0.191836, 15));
- pbns.push_back(std::pair<double, size_t>(0.192675, 16));
- pbns.push_back(std::pair<double, size_t>(0.208564, 17));
- pbns.push_back(std::pair<double, size_t>(0.218425, 18));
- pbns.push_back(std::pair<double, size_t>(0.219902, 17));
- pbns.push_back(std::pair<double, size_t>(0.23233, 16));
- pbns.push_back(std::pair<double, size_t>(0.234558, 17));
- pbns.push_back(std::pair<double, size_t>(0.237166, 16));
- pbns.push_back(std::pair<double, size_t>(0.247352, 17));
- pbns.push_back(std::pair<double, size_t>(0.267421, 18));
- pbns.push_back(std::pair<double, size_t>(0.268093, 19));
- pbns.push_back(std::pair<double, size_t>(0.278734, 18));
- pbns.push_back(std::pair<double, size_t>(0.284722, 19));
- pbns.push_back(std::pair<double, size_t>(0.284998, 20));
- pbns.push_back(std::pair<double, size_t>(0.294069, 21));
- pbns.push_back(std::pair<double, size_t>(0.306293, 22));
- pbns.push_back(std::pair<double, size_t>(0.322361, 21));
- pbns.push_back(std::pair<double, size_t>(0.323152, 22));
- pbns.push_back(std::pair<double, size_t>(0.371021, 23));
- pbns.push_back(std::pair<double, size_t>(0.372395, 24));
- pbns.push_back(std::pair<double, size_t>(0.387744, 25));
- pbns.push_back(std::pair<double, size_t>(0.435537, 26));
- pbns.push_back(std::pair<double, size_t>(0.462911, 25));
- pbns.push_back(std::pair<double, size_t>(0.483569, 26));
- pbns.push_back(std::pair<double, size_t>(0.489209, 25));
- pbns.push_back(std::pair<double, size_t>(0.517115, 24));
- pbns.push_back(std::pair<double, size_t>(0.522197, 23));
- pbns.push_back(std::pair<double, size_t>(0.532665, 22));
- pbns.push_back(std::pair<double, size_t>(0.545262, 23));
- pbns.push_back(std::pair<double, size_t>(0.587227, 22));
- pbns.push_back(std::pair<double, size_t>(0.593036, 23));
- pbns.push_back(std::pair<double, size_t>(0.602647, 24));
- pbns.push_back(std::pair<double, size_t>(0.605044, 25));
- pbns.push_back(std::pair<double, size_t>(0.621962, 24));
- pbns.push_back(std::pair<double, size_t>(0.629449, 23));
- pbns.push_back(std::pair<double, size_t>(0.636719, 22));
- pbns.push_back(std::pair<double, size_t>(0.64957, 21));
- pbns.push_back(std::pair<double, size_t>(0.650781, 22));
- pbns.push_back(std::pair<double, size_t>(0.654951, 23));
- pbns.push_back(std::pair<double, size_t>(0.683489, 24));
- pbns.push_back(std::pair<double, size_t>(0.687172, 23));
- pbns.push_back(std::pair<double, size_t>(0.69703, 22));
- pbns.push_back(std::pair<double, size_t>(0.701174, 21));
- pbns.push_back(std::pair<double, size_t>(0.717623, 22));
- pbns.push_back(std::pair<double, size_t>(0.722023, 21));
- pbns.push_back(std::pair<double, size_t>(0.722298, 20));
- pbns.push_back(std::pair<double, size_t>(0.725347, 19));
- pbns.push_back(std::pair<double, size_t>(0.73071, 18));
- pbns.push_back(std::pair<double, size_t>(0.758355, 17));
- pbns.push_back(std::pair<double, size_t>(0.770913, 18));
- pbns.push_back(std::pair<double, size_t>(0.790833, 17));
- pbns.push_back(std::pair<double, size_t>(0.821211, 16));
- pbns.push_back(std::pair<double, size_t>(0.849305, 17));
- pbns.push_back(std::pair<double, size_t>(0.853669, 16));
- pbns.push_back(std::pair<double, size_t>(0.866659, 15));
- pbns.push_back(std::pair<double, size_t>(0.872896, 16));
- pbns.push_back(std::pair<double, size_t>(0.889597, 15));
- pbns.push_back(std::pair<double, size_t>(0.900231, 14));
- pbns.push_back(std::pair<double, size_t>(0.903847, 13));
- pbns.push_back(std::pair<double, size_t>(0.906299, 12));
- pbns.push_back(std::pair<double, size_t>(0.910852, 11));
- pbns.push_back(std::pair<double, size_t>(0.93453, 10));
- pbns.push_back(std::pair<double, size_t>(0.944757, 9));
- pbns.push_back(std::pair<double, size_t>(0.947812, 8));
- pbns.push_back(std::pair<double, size_t>(0.959154, 7));
- pbns.push_back(std::pair<double, size_t>(0.975654, 6));
- pbns.push_back(std::pair<double, size_t>(0.976719, 5));
- pbns.push_back(std::pair<double, size_t>(0.977343, 4));
- pbns.push_back(std::pair<double, size_t>(0.980129, 3));
- pbns.push_back(std::pair<double, size_t>(0.987842, 2));
- pbns.push_back(std::pair<double, size_t>(0.990127, 1));
- pbns.push_back(std::pair<double, size_t>(0.994537, 0));
+ std::vector<std::pair<double, size_t> > pbns{ {0.0290362, 1}, {0.0307676, 2}, {0.0366312, 3}, {0.0544614, 4},
+ {0.0920033, 5}, {0.104599, 6}, {0.114718, 7}, {0.117379, 8},
+ {0.123493, 9}, {0.133638, 10}, {0.137798, 9}, {0.149798, 10},
+ {0.155421, 11}, {0.158443, 12}, {0.176956, 13}, {0.183234, 12},
+ {0.191069, 13}, {0.191333, 14}, {0.191836, 15}, {0.192675, 16},
+ {0.208564, 17}, {0.218425, 18}, {0.219902, 17}, {0.23233, 16},
+ {0.234558, 17}, {0.237166, 16}, {0.247352, 17}, {0.267421, 18},
+ {0.268093, 19}, {0.278734, 18}, {0.284722, 19}, {0.284998, 20},
+ {0.294069, 21}, {0.306293, 22}, {0.322361, 21}, {0.323152, 22},
+ {0.371021, 23}, {0.372395, 24}, {0.387744, 25}, {0.435537, 26},
+ {0.462911, 25}, {0.483569, 26}, {0.489209, 25}, {0.517115, 24},
+ {0.522197, 23}, {0.532665, 22}, {0.545262, 23}, {0.587227, 22},
+ {0.593036, 23}, {0.602647, 24}, {0.605044, 25}, {0.621962, 24},
+ {0.629449, 23}, {0.636719, 22}, {0.64957, 21}, {0.650781, 22},
+ {0.654951, 23}, {0.683489, 24}, {0.687172, 23}, {0.69703, 22},
+ {0.701174, 21}, {0.717623, 22}, {0.722023, 21}, {0.722298, 20},
+ {0.725347, 19}, {0.73071, 18}, {0.758355, 17}, {0.770913, 18},
+ {0.790833, 17}, {0.821211, 16}, {0.849305, 17}, {0.853669, 16},
+ {0.866659, 15}, {0.872896, 16}, {0.889597, 15}, {0.900231, 14},
+ {0.903847, 13}, {0.906299, 12}, {0.910852, 11}, {0.93453, 10},
+ {0.944757, 9}, {0.947812, 8}, {0.959154, 7}, {0.975654, 6},
+ {0.976719, 5}, {0.977343, 4}, {0.980129, 3}, {0.987842, 2},
+ {0.990127, 1}, {0.994537, 0} };
std::vector<std::pair<double, size_t> > pbns_new = p.compute_persistent_betti_numbers();
for (size_t i = 0; i != pbns.size(); ++i) {
@@ -262,17 +140,8 @@ BOOST_AUTO_TEST_CASE(check_compute_persistent_betti_numbers) {
BOOST_AUTO_TEST_CASE(check_k_n_n) {
Persistence_intervals p("data/file_with_diagram");
std::vector<double> knn = p.k_n_n(5);
- std::vector<double> knn_template;
- knn_template.push_back(1.04208);
- knn_template.push_back(1.00344);
- knn_template.push_back(0.979395);
- knn_template.push_back(0.890643);
- knn_template.push_back(0.874769);
- knn_template.push_back(0.845787);
- knn_template.push_back(0.819713);
- knn_template.push_back(0.803984);
- knn_template.push_back(0.799864);
- knn_template.push_back(0.786945);
+ std::vector<double> knn_template{1.04208, 1.00344, 0.979395, 0.890643, 0.874769,
+ 0.845787, 0.819713, 0.803984, 0.799864, 0.786945};
for (size_t i = 0; i != knn.size(); ++i) {
GUDHI_TEST_FLOAT_EQUALITY_CHECK(knn[i], knn_template[i], Gudhi::Persistence_representations::epsi);
diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt
index f99b164c..8a8cac58 100644
--- a/src/Simplex_tree/example/CMakeLists.txt
+++ b/src/Simplex_tree/example/CMakeLists.txt
@@ -16,6 +16,9 @@ endif()
add_test(NAME Simplex_tree_example_simple_simplex_tree COMMAND $<TARGET_FILE:Simplex_tree_example_simple_simplex_tree>)
add_executable ( Simplex_tree_example_mini_simplex_tree mini_simplex_tree.cpp )
+if (TBB_FOUND)
+ target_link_libraries(Simplex_tree_example_mini_simplex_tree ${TBB_LIBRARIES})
+endif()
add_test(NAME Simplex_tree_example_mini_simplex_tree COMMAND $<TARGET_FILE:Simplex_tree_example_mini_simplex_tree>)
# An example with Simplex-tree using CGAL alpha_shapes_3
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index fafdb01c..76608008 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -1378,7 +1378,7 @@ class Simplex_tree {
private:
bool rec_prune_above_filtration(Siblings* sib, Filtration_value filt) {
auto&& list = sib->members();
- auto last = std::remove_if(list.begin(), list.end(), [=](Dit_value_t& simplex) {
+ auto last = std::remove_if(list.begin(), list.end(), [this,filt](Dit_value_t& simplex) {
if (simplex.second.filtration() <= filt) return false;
if (has_children(&simplex)) rec_delete(simplex.second.children());
// dimension may need to be lowered
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
index f058fa9f..f007bdd5 100644
--- a/src/Tangential_complex/include/gudhi/Tangential_complex.h
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -60,7 +60,7 @@
#ifdef GUDHI_USE_TBB
#include <tbb/parallel_for.h>
#include <tbb/combinable.h>
-#include <tbb/mutex.h>
+#include <mutex>
#endif
// #define GUDHI_TC_EXPORT_NORMALS // Only for 3D surfaces (k=2, d=3)
@@ -147,7 +147,7 @@ class Tangential_complex {
typedef typename Tr_traits::Vector_d Tr_vector;
#if defined(GUDHI_USE_TBB)
- typedef tbb::mutex Mutex_for_perturb;
+ typedef std::mutex Mutex_for_perturb;
typedef Vector Translation_for_perturb;
typedef std::vector<Atomic_wrapper<FT> > Weights;
#else
diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt
index 5860f3a3..2659798e 100644
--- a/src/Witness_complex/example/CMakeLists.txt
+++ b/src/Witness_complex/example/CMakeLists.txt
@@ -12,10 +12,18 @@ install(TARGETS Witness_complex_example_nearest_landmark_table DESTINATION bin)
# CGAL and Eigen3 are required for Euclidean version of Witness
if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
add_executable( Witness_complex_example_off example_witness_complex_off.cpp )
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_example_off ${TBB_LIBRARIES})
+ endif()
add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp )
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_example_sphere ${TBB_LIBRARIES})
+ endif()
add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
- target_link_libraries(Witness_complex_example_strong_off)
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_example_strong_off ${TBB_LIBRARIES})
+ endif()
add_test(NAME Witness_complex_example_off_test_torus
COMMAND $<TARGET_FILE:Witness_complex_example_off>
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index 24a34150..359d1c12 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -35,6 +35,9 @@ if(CGAL_FOUND)
include( ${CGAL_USE_FILE} )
endif()
+# For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera.
+set(HERA_WASSERSTEIN_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/geom_matching/wasserstein/include CACHE PATH "Directory where one can find Hera's wasserstein.h")
+
option(WITH_GUDHI_USE_TBB "Build with Intel TBB parallelization" ON)
# Find TBB package for parallel sort - not mandatory, just optional.
@@ -127,6 +130,7 @@ if( PYTHONINTERP_FOUND )
find_python_module("sphinx")
find_python_module("sklearn")
find_python_module("ot")
+ find_python_module("pybind11")
endif()
if(NOT GUDHI_PYTHON_PATH)
diff --git a/src/cmake/modules/GUDHI_user_version_target.cmake b/src/cmake/modules/GUDHI_user_version_target.cmake
index 4fa74330..5047252f 100644
--- a/src/cmake/modules/GUDHI_user_version_target.cmake
+++ b/src/cmake/modules/GUDHI_user_version_target.cmake
@@ -33,8 +33,6 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_CURRENT_BINARY_DIR}/biblio ${GUDHI_USER_VERSION_DIR}/biblio)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
- copy ${CMAKE_SOURCE_DIR}/Conventions.txt ${GUDHI_USER_VERSION_DIR}/Conventions.txt)
-add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy ${CMAKE_SOURCE_DIR}/README.md ${GUDHI_USER_VERSION_DIR}/README.md)
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy ${CMAKE_SOURCE_DIR}/LICENSE ${GUDHI_USER_VERSION_DIR}/LICENSE)
@@ -56,6 +54,9 @@ add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI)
+add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E
+ copy_directory ${CMAKE_SOURCE_DIR}/ext/hera/geom_matching/wasserstein/include ${GUDHI_USER_VERSION_DIR}/ext/hera/geom_matching/wasserstein/include)
+
set(GUDHI_DIRECTORIES "doc;example;concept;utilities")
set(GUDHI_INCLUDE_DIRECTORIES "include/gudhi")
@@ -95,4 +96,4 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
endforeach()
endforeach(GUDHI_INCLUDE_DIRECTORY ${GUDHI_INCLUDE_DIRECTORIES})
-endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST}) \ No newline at end of file
+endforeach(GUDHI_MODULE ${GUDHI_MODULES_FULL_LIST})
diff --git a/src/common/doc/header.html b/src/common/doc/header.html
index 9fdb2321..99ab6bb7 100644
--- a/src/common/doc/header.html
+++ b/src/common/doc/header.html
@@ -56,7 +56,7 @@ $extrastylesheet
<a href="#">Download</a>
<ul class="dropdown">
<li><a href="/licensing/">Licensing</a></li>
- <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5253/library-latest.zip" target="_blank">Get the latest sources</a></li>
+ <li><a href="https://github.com/GUDHI/gudhi-devel/releases/latest" target="_blank">Get the latest sources</a></li>
<li><a href="/conda/">Conda package</a></li>
<li><a href="/dockerfile/">Dockerfile</a></li>
</ul>
diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h
index d70a2efa..ce2c5448 100644
--- a/src/common/doc/installation.h
+++ b/src/common/doc/installation.h
@@ -33,7 +33,7 @@ make \endverbatim
* \subsection testsuites Test suites
* To test your build, run the following command in a terminal:
* \verbatim make test \endverbatim
- * `make test` is using <a href="https://cmake.org/cmake/help/latest/manual/ctest.1.html">Ctest<\a> (CMake test driver
+ * `make test` is using <a href="https://cmake.org/cmake/help/latest/manual/ctest.1.html">Ctest</a> (CMake test driver
* program). If some of the tests are failing, please send us the result of the following command:
* \verbatim ctest --output-on-failure \endverbatim
*
diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md
index e8d11fdf..6ea10b88 100644
--- a/src/common/doc/main_page.md
+++ b/src/common/doc/main_page.md
@@ -4,8 +4,8 @@
\image html "Gudhi_banner.png"
<br><br><br><br>
-## Complexes {#Complexes}
-### Cubical complex
+## Data structures for cell complexes {#Complexes}
+### Cubical complexes
<table>
<tr>
@@ -29,246 +29,269 @@
</tr>
</table>
-### Simplicial complex
-
-#### Alpha complex
+### Simplicial complexes
+#### Simplex tree
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "alpha_complex_representation.png"
+ \image html "Simplex_tree_representation.png"
</td>
<td width="50%">
- Alpha complex is a simplicial complex constructed from the finite cells of a Delaunay Triangulation.<br>
- The filtration value of each simplex is computed as the square of the circumradius of the simplex if the
- circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
- values of the codimension 1 cofaces that make it not Gabriel otherwise.
- All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into
- the complex.<br>
- For performances reasons, it is advised to use \ref cgal &ge; 5.0.0.
+ The simplex tree is an efficient and flexible
+ data structure for representing general (filtered) simplicial complexes. The data structure
+ is described in \cite boissonnatmariasimplextreealgorithmica .
</td>
<td width="15%">
- <b>Author:</b> Vincent Rouvreau<br>
- <b>Introduced in:</b> GUDHI 1.3.0<br>
- <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
- <b>Requires:</b> \ref eigen &ge; 3.1.0 and \ref cgal &ge; 4.11.0
+ <b>Author:</b> Cl&eacute;ment Maria<br>
+ <b>Introduced in:</b> GUDHI 1.0.0<br>
+ <b>Copyright:</b> MIT<br>
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref alpha_complex
+ <b>User manual:</b> \ref simplex_tree
</td>
</tr>
</table>
-#### ÄŒech complex
+#### Toplex Map
<table>
- <tr>
+ <tr>
<td width="35%" rowspan=2>
- \image html "cech_complex_representation.png"
+ \image html "map.png"
</td>
<td width="50%">
- The ÄŒech complex is a simplicial complex constructed from a proximity graph.
- The set of all simplices is filtered by the radius of their minimal enclosing ball.
+ The Toplex map data structure is composed firstly of a raw storage of toplices (the maximal simplices)
+ and secondly of a map which associate any vertex to a set of pointers toward all toplices
+ containing this vertex.
</td>
<td width="15%">
- <b>Author:</b> Vincent Rouvreau<br>
- <b>Introduced in:</b> GUDHI 2.2.0<br>
- <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
- <b>Includes:</b> [Miniball](https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html)<br>
+ <b>Author:</b> Fran&ccedil;ois Godi<br>
+ <b>Introduced in:</b> GUDHI 2.1.0<br>
+ <b>Copyright:</b> MIT<br>
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref cech_complex
+ <b>User manual:</b> \ref toplex_map
</td>
</tr>
</table>
-#### Rips complex
+#### Skeleton blocker
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "rips_complex_representation.png"
+ \image html "ds_representation.png"
</td>
<td width="50%">
- Rips complex is a simplicial complex constructed from a one skeleton graph.<br>
- The filtration value of each edge is computed from a user-given distance function and is inserted until a
- user-given threshold value.<br>
- This complex can be built from a point cloud and a distance function, or from a distance matrix.
+ The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit*
+ representation of its simplices \cite socg_blockers_2011,\cite blockers2012. Intuitively, it just stores the
+ 1-skeleton of a simplicial complex with a graph and the set of its "missing faces" that is very small in practice.
+ This data-structure handles all simplicial complexes operations such as simplex enumeration or simplex removal but
+ operations that are particularly efficient are operations that do not require simplex enumeration such as edge
+ iteration, link computation or simplex contraction.
</td>
<td width="15%">
- <b>Author:</b> Cl&eacute;ment Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse<br>
- <b>Introduced in:</b> GUDHI 2.0.0<br>
+ <b>Author:</b> David Salinas<br>
+ <b>Introduced in:</b> GUDHI 1.1.0<br>
<b>Copyright:</b> MIT<br>
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref rips_complex
+ <b>User manual:</b> \ref skbl
</td>
</tr>
</table>
-#### Witness complex
+#### Basic operation: contraction
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "Witness_complex_representation.png"
+ \image html "sphere_contraction_representation.png"
</td>
<td width="50%">
- Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$.
- The data structure is described in \cite boissonnatmariasimplextreealgorithmica .
+ The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge
+ simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most
+ used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice,
+ the size of this data-structure is even much lower than the total number of simplices.
</td>
<td width="15%">
- <b>Author:</b> Siargey Kachanovich<br>
- <b>Introduced in:</b> GUDHI 1.3.0<br>
- <b>Copyright:</b> MIT ([GPL v3](../../licensing/) for Euclidean version)<br>
- <b>Euclidean version requires:</b> \ref eigen &ge; 3.1.0 and \ref cgal &ge; 4.11.0
+ <b>Author:</b> David Salinas<br>
+ <b>Introduced in:</b> GUDHI 1.1.0<br>
+ <b>Copyright:</b> MIT [(LGPL v3)](../../licensing/)<br>
+ <b>Requires:</b> \ref cgal &ge; 4.11.0
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref witness_complex
+ <b>User manual:</b> \ref contr
</td>
</tr>
</table>
-### Cover Complexes
+## Filtrations and reconstructions {#FiltrationsReconstructions}
+### Alpha complex
+
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "gicvisu.jpg"
+ \image html "alpha_complex_representation.png"
</td>
<td width="50%">
- Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain
- topological information about the input data. They can be computed with a cover of the
- data, that comes i.e. from the preimage of a family of intervals covering the image
- of a scalar-valued function defined on the data. <br>
+ Alpha complex is a simplicial complex constructed from the finite cells of a Delaunay Triangulation.<br>
+ The filtration value of each simplex is computed as the square of the circumradius of the simplex if the
+ circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
+ values of the codimension 1 cofaces that make it not Gabriel otherwise.
+ All simplices that have a filtration value \f$ > \alpha^2 \f$ are removed from the Delaunay complex
+ when creating the simplicial complex if it is specified.<br>
+ For performances reasons, it is advised to use \ref cgal &ge; 5.0.0.
</td>
<td width="15%">
- <b>Author:</b> Mathieu Carri&egrave;re<br>
- <b>Introduced in:</b> GUDHI 2.1.0<br>
+ <b>Author:</b> Vincent Rouvreau<br>
+ <b>Introduced in:</b> GUDHI 1.3.0<br>
<b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
- <b>Requires:</b> \ref cgal &ge; 4.11.0
+ <b>Requires:</b> \ref eigen &ge; 3.1.0 and \ref cgal &ge; 4.11.0
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref cover_complex
+ <b>User manual:</b> \ref alpha_complex
</td>
</tr>
</table>
-## Data structures and basic operations {#DataStructuresAndBasicOperations}
+### ÄŒech complex
+
+<table>
+ <tr>
+ <td width="35%" rowspan=2>
+ \image html "cech_complex_representation.png"
+ </td>
+ <td width="50%">
+ The ÄŒech complex is a simplicial complex constructed from a proximity graph.
+ The set of all simplices is filtered by the radius of their minimal enclosing ball.
+ </td>
+ <td width="15%">
+ <b>Author:</b> Vincent Rouvreau<br>
+ <b>Introduced in:</b> GUDHI 2.2.0<br>
+ <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
+ <b>Includes:</b> [Miniball](https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html)<br>
+ </td>
+ </tr>
+ <tr>
+ <td colspan=2 height="25">
+ <b>User manual:</b> \ref cech_complex
+ </td>
+ </tr>
+</table>
-### Data structures
+### Rips complex
-#### Simplex tree
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "Simplex_tree_representation.png"
+ \image html "rips_complex_representation.png"
</td>
<td width="50%">
- The simplex tree is an efficient and flexible
- data structure for representing general (filtered) simplicial complexes. The data structure
- is described in \cite boissonnatmariasimplextreealgorithmica .
+ Rips complex is a simplicial complex constructed from a one skeleton graph.<br>
+ The filtration value of each edge is computed from a user-given distance function and is inserted until a
+ user-given threshold value.<br>
+ This complex can be built from a point cloud and a distance function, or from a distance matrix.
</td>
<td width="15%">
- <b>Author:</b> Cl&eacute;ment Maria<br>
- <b>Introduced in:</b> GUDHI 1.0.0<br>
+ <b>Author:</b> Cl&eacute;ment Maria, Pawel Dlotko, Vincent Rouvreau, Marc Glisse<br>
+ <b>Introduced in:</b> GUDHI 2.0.0<br>
<b>Copyright:</b> MIT<br>
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref simplex_tree
+ <b>User manual:</b> \ref rips_complex
</td>
</tr>
</table>
-#### Skeleton blocker
+### Witness complex
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "ds_representation.png"
+ \image html "Witness_complex_representation.png"
</td>
<td width="50%">
- The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit*
- representation of its simplices \cite socg_blockers_2011,\cite blockers2012. Intuitively, it just stores the
- 1-skeleton of a simplicial complex with a graph and the set of its "missing faces" that is very small in practice.
- This data-structure handles all simplicial complexes operations such as simplex enumeration or simplex removal but
- operations that are particularly efficient are operations that do not require simplex enumeration such as edge
- iteration, link computation or simplex contraction.
+ Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$.
+ The data structure is described in \cite boissonnatmariasimplextreealgorithmica .
</td>
<td width="15%">
- <b>Author:</b> David Salinas<br>
- <b>Introduced in:</b> GUDHI 1.1.0<br>
- <b>Copyright:</b> MIT<br>
+ <b>Author:</b> Siargey Kachanovich<br>
+ <b>Introduced in:</b> GUDHI 1.3.0<br>
+ <b>Copyright:</b> MIT ([GPL v3](../../licensing/) for Euclidean version)<br>
+ <b>Euclidean version requires:</b> \ref eigen &ge; 3.1.0 and \ref cgal &ge; 4.11.0
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref skbl
+ <b>User manual:</b> \ref witness_complex
</td>
</tr>
</table>
-#### Toplex Map
-
+### Cover Complexes
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "map.png"
+ \image html "gicvisu.jpg"
</td>
<td width="50%">
- The Toplex map data structure is composed firstly of a raw storage of toplices (the maximal simplices)
- and secondly of a map which associate any vertex to a set of pointers toward all toplices
- containing this vertex.
+ Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain
+ topological information about the input data. They can be computed with a cover of the
+ data, that comes i.e. from the preimage of a family of intervals covering the image
+ of a scalar-valued function defined on the data. <br>
</td>
<td width="15%">
- <b>Author:</b> Fran&ccedil;ois Godi<br>
+ <b>Author:</b> Mathieu Carri&egrave;re<br>
<b>Introduced in:</b> GUDHI 2.1.0<br>
- <b>Copyright:</b> MIT<br>
+ <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
+ <b>Requires:</b> \ref cgal &ge; 4.11.0
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref toplex_map
+ <b>User manual:</b> \ref cover_complex
</td>
</tr>
</table>
-### Basic operations
-
-#### Contraction
+### Tangential complex
<table>
<tr>
<td width="35%" rowspan=2>
- \image html "sphere_contraction_representation.png"
+ \image html "tc_examples.png"
</td>
<td width="50%">
- The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge
- simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most
- used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice,
- the size of this data-structure is even much lower than the total number of simplices.
+ A Tangential Delaunay complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
+ designed to reconstruct a \f$ k \f$-dimensional manifold embedded in \f$ d \f$-dimensional Euclidean space.
+ The input is a point sample coming from an unknown manifold.
+ The running time depends only linearly on the extrinsic dimension \f$ d \f$
+ and exponentially on the intrinsic dimension \f$ k \f$.
</td>
<td width="15%">
- <b>Author:</b> David Salinas<br>
- <b>Introduced in:</b> GUDHI 1.1.0<br>
- <b>Copyright:</b> MIT [(LGPL v3)](../../licensing/)<br>
- <b>Requires:</b> \ref cgal &ge; 4.11.0
+ <b>Author:</b> Cl&eacute;ment Jamin<br>
+ <b>Introduced in:</b> GUDHI 2.0.0<br>
+ <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
+ <b>Requires:</b> \ref eigen &ge; 3.1.0 and \ref cgal &ge; 4.11.0
</td>
</tr>
<tr>
<td colspan=2 height="25">
- <b>User manual:</b> \ref contr
+ <b>User manual:</b> \ref tangential_complex
</td>
</tr>
</table>
@@ -305,36 +328,6 @@
</tr>
</table>
-## Manifold reconstruction {#ManifoldReconstruction}
-
-### Tangential complex
-
-<table>
- <tr>
- <td width="35%" rowspan=2>
- \image html "tc_examples.png"
- </td>
- <td width="50%">
- A Tangential Delaunay complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
- designed to reconstruct a \f$ k \f$-dimensional manifold embedded in \f$ d \f$-dimensional Euclidean space.
- The input is a point sample coming from an unknown manifold.
- The running time depends only linearly on the extrinsic dimension \f$ d \f$
- and exponentially on the intrinsic dimension \f$ k \f$.
- </td>
- <td width="15%">
- <b>Author:</b> Cl&eacute;ment Jamin<br>
- <b>Introduced in:</b> GUDHI 2.0.0<br>
- <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
- <b>Requires:</b> \ref eigen &ge; 3.1.0 and \ref cgal &ge; 4.11.0
- </td>
- </tr>
- <tr>
- <td colspan=2 height="25">
- <b>User manual:</b> \ref tangential_complex
- </td>
- </tr>
-</table>
-
## Topological descriptors tools {#TopologicalDescriptorsTools}
### Bottleneck distance
@@ -390,3 +383,26 @@
</td>
</tr>
</table>
+
+## Point cloud utilities {#PointCloudUtils}
+
+<table>
+ <tr>
+ <td width="35%" rowspan=2>
+ \f$(x_1,\ldots,x_d)\f$
+ </td>
+ <td width="50%">
+ This contains various tools to handle point clouds: spatial searching, subsampling, etc.
+ </td>
+ <td width="15%">
+ <b>Author:</b> Clément Jamin<br>
+ <b>Introduced in:</b> GUDHI 1.3.0<br>
+ <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
+ </td>
+ </tr>
+ <tr>
+ <td colspan=2 height="25">
+ <b>Manuals:</b> \ref spatial_searching, \ref subsampling
+ </td>
+ </tr>
+</table>
diff --git a/src/common/include/gudhi/Unitary_tests_utils.h b/src/common/include/gudhi/Unitary_tests_utils.h
index 7d039304..9b86460a 100644
--- a/src/common/include/gudhi/Unitary_tests_utils.h
+++ b/src/common/include/gudhi/Unitary_tests_utils.h
@@ -26,4 +26,15 @@ void GUDHI_TEST_FLOAT_EQUALITY_CHECK(FloatingType a, FloatingType b,
BOOST_CHECK(std::fabs(a - b) <= epsilon);
}
+// That's the usual x86 issue where a+b==a+b can return false (without any NaN) because one of them was stored in
+// memory (and thus rounded to 64 bits) while the other is still in a register (80 bits).
+template<typename FloatingType >
+FloatingType GUDHI_PROTECT_FLOAT(FloatingType value) {
+ volatile FloatingType protected_value = value;
+#ifdef DEBUG_TRACES
+ std::cout << "GUDHI_PROTECT_FLOAT - " << protected_value << std::endl;
+#endif
+ return protected_value;
+}
+
#endif // UNITARY_TESTS_UTILS_H_
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 7f9ff38f..8c36f7ee 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -32,6 +32,10 @@ function( add_gudhi_debug_info DEBUG_INFO )
endfunction( add_gudhi_debug_info )
if(PYTHONINTERP_FOUND)
+ if(PYBIND11_FOUND)
+ add_gudhi_debug_info("Pybind11 version ${PYBIND11_VERSION}")
+ set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'hera', ")
+ endif()
if(CYTHON_FOUND)
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'off_reader', ")
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'simplex_tree', ")
@@ -87,6 +91,7 @@ if(PYTHONINTERP_FOUND)
endif(MSVC)
if(CMAKE_COMPILER_IS_GNUCXX)
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ")
+ set(GUDHI_PYBIND11_EXTRA_COMPILE_ARGS "${GUDHI_PYBIND11_EXTRA_COMPILE_ARGS}'-fvisibility=hidden', ")
endif(CMAKE_COMPILER_IS_GNUCXX)
if (CMAKE_CXX_COMPILER_ID MATCHES Intel)
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-fp-model strict', ")
@@ -115,9 +120,9 @@ if(PYTHONINTERP_FOUND)
set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'nerve_gic', ")
endif ()
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'alpha_complex', ")
set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'subsampling', ")
set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'tangential_complex', ")
- set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'alpha_complex', ")
set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'euclidean_witness_complex', ")
set(GUDHI_PYTHON_MODULES_TO_COMPILE "${GUDHI_PYTHON_MODULES_TO_COMPILE}'euclidean_strong_witness_complex', ")
endif ()
@@ -163,10 +168,21 @@ if(PYTHONINTERP_FOUND)
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_GMPXX', ")
add_GUDHI_PYTHON_lib("${GMPXX_LIBRARIES}")
set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${GMPXX_LIBRARIES_DIR}', ")
- message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}")
+ message("** Add gmpxx ${GMPXX_LIBRARIES_DIR}")
endif(GMPXX_FOUND)
endif(GMP_FOUND)
- endif(CGAL_FOUND)
+ if(MPFR_FOUND)
+ add_gudhi_debug_info("MPFR_LIBRARIES = ${MPFR_LIBRARIES}")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_USE_MPFR', ")
+ add_GUDHI_PYTHON_lib("${MPFR_LIBRARIES}")
+ # In case CGAL is not header only, all MPFR variables are set except MPFR_LIBRARIES_DIR - Just set it
+ if(NOT MPFR_LIBRARIES_DIR)
+ get_filename_component(MPFR_LIBRARIES_DIR ${MPFR_LIBRARIES} PATH)
+ endif(NOT MPFR_LIBRARIES_DIR)
+ set(GUDHI_PYTHON_LIBRARY_DIRS "${GUDHI_PYTHON_LIBRARY_DIRS}'${MPFR_LIBRARIES_DIR}', ")
+ message("** Add mpfr ${MPFR_LIBRARIES}")
+ endif(MPFR_FOUND)
+endif(CGAL_FOUND)
# Specific for Mac
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
@@ -231,7 +247,6 @@ if(PYTHONINTERP_FOUND)
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_rips_persistence_bottleneck_distance.py"
-f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -t 0.15 -d 3)
-
if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
# Tangential
add_test(NAME tangential_complex_plain_homology_from_off_file_example_py_test
@@ -307,7 +322,6 @@ if(PYTHONINTERP_FOUND)
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_from_points_example.py")
-
if(MATPLOTLIB_FOUND AND NUMPY_FOUND)
add_test(NAME alpha_complex_diagram_persistence_from_off_file_example_py_test
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
@@ -315,9 +329,7 @@ if(PYTHONINTERP_FOUND)
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/example/alpha_complex_diagram_persistence_from_off_file_example.py"
--no-diagram -f ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -a 0.6)
endif()
-
add_gudhi_py_test(test_alpha_complex)
-
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
@@ -385,10 +397,10 @@ if(PYTHONINTERP_FOUND)
add_gudhi_py_test(test_reader_utils)
# Wasserstein
- if(OT_FOUND)
+ if(OT_FOUND AND PYBIND11_FOUND)
add_gudhi_py_test(test_wasserstein_distance)
add_gudhi_py_test(test_wasserstein_barycenter)
- endif(OT_FOUND)
+ endif()
# Representations
if(SKLEARN_FOUND AND MATPLOTLIB_FOUND)
@@ -402,32 +414,37 @@ if(PYTHONINTERP_FOUND)
if(SCIPY_FOUND)
if(SKLEARN_FOUND)
if(OT_FOUND)
- if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
- # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
- # Images and biblio warnings because not found on developper version
- if (GUDHI_PYTHON_PATH STREQUAL "src/python")
- set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
- endif()
- # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
- add_custom_target(sphinx
- WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
- DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
- COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
+ if(PYBIND11_FOUND)
+ if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ set (GUDHI_SPHINX_MESSAGE "Generating API documentation with Sphinx in ${CMAKE_CURRENT_BINARY_DIR}/sphinx/")
+ # User warning - Sphinx is a static pages generator, and configured to work fine with user_version
+ # Images and biblio warnings because not found on developper version
+ if (GUDHI_PYTHON_PATH STREQUAL "src/python")
+ set (GUDHI_SPHINX_MESSAGE "${GUDHI_SPHINX_MESSAGE} \n WARNING : Sphinx is configured for user version, you run it on developper version. Images and biblio will miss")
+ endif()
+ # sphinx target requires gudhi.so, because conf.py reads gudhi version from it
+ add_custom_target(sphinx
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doc
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b html ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/sphinx
+ DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/gudhi.so"
+ COMMENT "${GUDHI_SPHINX_MESSAGE}" VERBATIM)
- add_test(NAME sphinx_py_test
- WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
- COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
- ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
+ add_test(NAME sphinx_py_test
+ WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
+ COMMAND ${CMAKE_COMMAND} -E env "PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}"
+ ${SPHINX_PATH} -b doctest ${CMAKE_CURRENT_SOURCE_DIR}/doc ${CMAKE_CURRENT_BINARY_DIR}/doctest)
- # Set missing or not modules
- set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES")
- else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
- message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0")
+ # Set missing or not modules
+ set(GUDHI_MODULES ${GUDHI_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MODULES")
+ else(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ message("++ Python documentation module will not be compiled because it requires a Eigen3 and CGAL version >= 4.11.0")
+ set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
+ endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ else(PYBIND11_FOUND)
+ message("++ Python documentation module will not be compiled because pybind11 was not found")
set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
- endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ endif(PYBIND11_FOUND)
else(OT_FOUND)
message("++ Python documentation module will not be compiled because POT was not found")
set(GUDHI_MISSING_MODULES ${GUDHI_MISSING_MODULES} "python-documentation" CACHE INTERNAL "GUDHI_MISSING_MODULES")
diff --git a/src/python/doc/_templates/layout.html b/src/python/doc/_templates/layout.html
index 2f2d9c72..a672a281 100644
--- a/src/python/doc/_templates/layout.html
+++ b/src/python/doc/_templates/layout.html
@@ -201,7 +201,7 @@
<a href="#">Download</a>
<ul class="dropdown">
<li><a href="/licensing/">Licensing</a></li>
- <li><a href="https://gforge.inria.fr/frs/download.php/latestzip/5253/library-latest.zip" target="_blank">Get the latest sources</a></li>
+ <li><a href="https://github.com/GUDHI/gudhi-devel/releases/latest" target="_blank">Get the latest sources</a></li>
<li><a href="/conda/">Conda package</a></li>
<li><a href="/dockerfile/">Dockerfile</a></li>
</ul>
diff --git a/src/python/doc/alpha_complex_sum.inc b/src/python/doc/alpha_complex_sum.inc
index c5ba9dc7..b5af0d27 100644
--- a/src/python/doc/alpha_complex_sum.inc
+++ b/src/python/doc/alpha_complex_sum.inc
@@ -5,16 +5,13 @@
| .. figure:: | Alpha complex is a simplicial complex constructed from the finite | :Author: Vincent Rouvreau |
| ../../doc/Alpha_complex/alpha_complex_representation.png | cells of a Delaunay Triangulation. | |
| :alt: Alpha complex representation | | :Introduced in: GUDHI 2.0.0 |
- | :figclass: align-center | The filtration value of each simplex is computed as the square of the | |
- | | circumradius of the simplex if the circumsphere is empty (the simplex | :Copyright: MIT (`GPL v3 </licensing/>`_) |
- | | is then said to be Gabriel), and as the minimum of the filtration | |
- | | values of the codimension 1 cofaces that make it not Gabriel | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
- | | otherwise. All simplices that have a filtration value strictly | |
- | | greater than a given alpha squared value are not inserted into the | |
- | | complex. | |
+ | :figclass: align-center | The filtration value of each simplex is computed as the **square** of | |
+ | | the circumradius of the simplex if the circumsphere is empty (the | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | simplex is then said to be Gabriel), and as the minimum of the | |
+ | | filtration values of the codimension 1 cofaces that make it not | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+ | | Gabriel otherwise. | |
| | | |
- | | This package requires having CGAL version 4.7 or higher (4.8.1 is | |
- | | advised for better performance). | |
+ | | For performances reasons, it is advised to use CGAL ≥ 5.0.0. | |
+----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
| * :doc:`alpha_complex_user` | * :doc:`alpha_complex_ref` |
+----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/alpha_complex_user.rst b/src/python/doc/alpha_complex_user.rst
index b7e69e12..60319e84 100644
--- a/src/python/doc/alpha_complex_user.rst
+++ b/src/python/doc/alpha_complex_user.rst
@@ -16,7 +16,8 @@ Definition
Remarks
^^^^^^^
-When an :math:`\alpha`-complex is constructed with an infinite value of :math:`\alpha`, the complex is a Delaunay complex (with special filtration values).
+When an :math:`\alpha`-complex is constructed with an infinite value of :math:`\alpha^2`,
+the complex is a Delaunay complex (with special filtration values).
Example from points
-------------------
@@ -137,19 +138,20 @@ sets the filtration value (0 in case of a vertex - propagation will have no effe
Non decreasing filtration values
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not
-quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
+As the squared radii computed by CGAL are an approximation, it might happen that these
+:math:`\alpha^2` values do not quite define a proper filtration (i.e. non-decreasing with
+respect to inclusion).
We fix that up by calling :func:`~gudhi.SimplexTree.make_filtration_non_decreasing` (cf.
`C++ version <http://gudhi.gforge.inria.fr/doc/latest/index.html>`_).
Prune above given filtration value
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-The simplex tree is pruned from the given maximum alpha squared value (cf.
+The simplex tree is pruned from the given maximum :math:`\alpha^2` value (cf.
:func:`~gudhi.SimplexTree.prune_above_filtration`). Note that this does not provide any kind
of speed-up, since we always first build the full filtered complex, so it is recommended not to use
:paramref:`~gudhi.AlphaComplex.create_simplex_tree.max_alpha_square`.
-In the following example, a threshold of 59 is used.
+In the following example, a threshold of :math:`\alpha^2 = 32.0` is used.
Example from OFF file
@@ -166,7 +168,7 @@ Then, it is asked to display information about the alpha complex:
import gudhi
alpha_complex = gudhi.AlphaComplex(off_file=gudhi.__root_source_dir__ + \
'/data/points/alphacomplexdoc.off')
- simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=59.0)
+ simplex_tree = alpha_complex.create_simplex_tree(max_alpha_square=32.0)
result_str = 'Alpha complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
repr(simplex_tree.num_simplices()) + ' simplices - ' + \
repr(simplex_tree.num_vertices()) + ' vertices.'
@@ -179,7 +181,7 @@ the program output is:
.. testoutput::
- Alpha complex is of dimension 2 - 23 simplices - 7 vertices.
+ Alpha complex is of dimension 2 - 20 simplices - 7 vertices.
[0] -> 0.00
[1] -> 0.00
[2] -> 0.00
@@ -200,9 +202,6 @@ the program output is:
[4, 6] -> 22.74
[4, 5, 6] -> 22.74
[3, 6] -> 30.25
- [2, 6] -> 36.50
- [2, 3, 6] -> 36.50
- [2, 4, 6] -> 37.24
CGAL citations
==============
diff --git a/src/python/doc/cubical_complex_user.rst b/src/python/doc/cubical_complex_user.rst
index b13b500e..56cf0170 100644
--- a/src/python/doc/cubical_complex_user.rst
+++ b/src/python/doc/cubical_complex_user.rst
@@ -142,8 +142,7 @@ Or it can be defined as follows:
.. testcode::
from gudhi import PeriodicCubicalComplex as pcc
- periodic_cc = pcc(dimensions=[3,3],
- top_dimensional_cells= [0, 0, 0, 0, 1, 0, 0, 0, 0],
+ periodic_cc = pcc(top_dimensional_cells = [[0, 0, 0], [0, 1, 0], [0, 0, 0]],
periodic_dimensions=[True, False])
result_str = 'Periodic cubical complex is of dimension ' + repr(periodic_cc.dimension()) + ' - ' + \
repr(periodic_cc.num_simplices()) + ' simplices.'
diff --git a/src/python/doc/reader_utils_ref.rst b/src/python/doc/diagram_readers_ref.rst
index f3ecebad..c79daf9c 100644
--- a/src/python/doc/reader_utils_ref.rst
+++ b/src/python/doc/diagram_readers_ref.rst
@@ -2,13 +2,9 @@
.. To get rid of WARNING: document isn't included in any toctree
-=============================
-Reader utils reference manual
-=============================
-
-.. autofunction:: gudhi.read_off
-
-.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file
+================================
+Diagram readers reference manual
+================================
.. autofunction:: gudhi.read_persistence_intervals_grouped_by_dimension
diff --git a/src/python/doc/index.rst b/src/python/doc/index.rst
index c36a578f..3387a64f 100644
--- a/src/python/doc/index.rst
+++ b/src/python/doc/index.rst
@@ -6,8 +6,8 @@ GUDHI Python modules documentation
:alt: Gudhi banner
:figclass: align-center
-Complexes
-*********
+Data structures for cell complexes
+**********************************
Cubical complexes
=================
@@ -17,18 +17,26 @@ Cubical complexes
Simplicial complexes
====================
+Simplex tree
+------------
+
+.. include:: simplex_tree_sum.inc
+
+Filtrations and reconstructions
+*******************************
+
Alpha complex
--------------
+=============
.. include:: alpha_complex_sum.inc
Rips complex
-------------
+============
.. include:: rips_complex_sum.inc
Witness complex
----------------
+===============
.. include:: witness_complex_sum.inc
@@ -37,16 +45,10 @@ Cover complexes
.. include:: nerve_gic_complex_sum.inc
-Data structures and basic operations
-************************************
-
-Data structures
-===============
-
-Simplex tree
-------------
+Tangential complex
+==================
-.. include:: simplex_tree_sum.inc
+.. include:: tangential_complex_sum.inc
Topological descriptors computation
***********************************
@@ -56,15 +58,6 @@ Persistence cohomology
.. include:: persistent_cohomology_sum.inc
-Manifold reconstruction
-***********************
-
-Tangential complex
-==================
-
-.. include:: tangential_complex_sum.inc
-
-
Topological descriptors tools
*****************************
@@ -88,6 +81,11 @@ Persistence graphical tools
.. include:: persistence_graphical_tools_sum.inc
+Point cloud utilities
+*********************
+
+.. include:: point_cloud_sum.inc
+
Bibliography
************
diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst
index 50a697c7..d459145b 100644
--- a/src/python/doc/installation.rst
+++ b/src/python/doc/installation.rst
@@ -14,10 +14,11 @@ Compiling
*********
The library uses c++14 and requires `Boost <https://www.boost.org/>`_ ≥ 1.56.0,
`CMake <https://www.cmake.org/>`_ ≥ 3.1 to generate makefiles,
-`NumPy <http://numpy.org>`_ and `Cython <https://www.cython.org/>`_ to compile
+`NumPy <http://numpy.org>`_, `Cython <https://www.cython.org/>`_ and
+`pybind11 <https://github.com/pybind/pybind11>`_ to compile
the GUDHI Python module.
It is a multi-platform library and compiles on Linux, Mac OSX and Visual
-Studio 2015.
+Studio 2017.
On `Windows <https://wiki.python.org/moin/WindowsCompilers>`_ , only Python
≥ 3.5 are available because of the required Visual Studio version.
@@ -257,6 +258,13 @@ The :doc:`Wasserstein distance </wasserstein_distance_user>`
module requires `POT <https://pot.readthedocs.io/>`_, a library that provides
several solvers for optimization problems related to Optimal Transport.
+Scikit-learn
+============
+
+The :doc:`persistence representations </representations>` module require
+`scikit-learn <https://scikit-learn.org/>`_, a Python-based ecosystem of
+open-source software for machine learning.
+
SciPy
=====
diff --git a/src/python/doc/persistence_graphical_tools_user.rst b/src/python/doc/persistence_graphical_tools_user.rst
index f41a926b..80002db6 100644
--- a/src/python/doc/persistence_graphical_tools_user.rst
+++ b/src/python/doc/persistence_graphical_tools_user.rst
@@ -24,7 +24,7 @@ This function can display the persistence result as a barcode:
import gudhi
off_file = gudhi.__root_source_dir__ + '/data/points/tore3D_300.off'
- point_cloud = gudhi.read_off(off_file=off_file)
+ point_cloud = gudhi.read_points_from_off_file(off_file=off_file)
rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=0.7)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=3)
diff --git a/src/python/doc/point_cloud.rst b/src/python/doc/point_cloud.rst
new file mode 100644
index 00000000..d668428a
--- /dev/null
+++ b/src/python/doc/point_cloud.rst
@@ -0,0 +1,22 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+============================
+Point cloud utilities manual
+============================
+
+File Readers
+------------
+
+.. autofunction:: gudhi.read_points_from_off_file
+
+.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file
+
+Subsampling
+-----------
+
+.. automodule:: gudhi.subsampling
+ :members:
+ :special-members:
+ :show-inheritance:
diff --git a/src/python/doc/point_cloud_sum.inc b/src/python/doc/point_cloud_sum.inc
new file mode 100644
index 00000000..85d52de7
--- /dev/null
+++ b/src/python/doc/point_cloud_sum.inc
@@ -0,0 +1,15 @@
+.. table::
+ :widths: 30 50 20
+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
+ | | :math:`(x_1, x_2, \ldots, x_d)` | Utilities to process point clouds: read from file, subsample, etc. | :Author: Vincent Rouvreau |
+ | | :math:`(y_1, y_2, \ldots, y_d)` | | |
+ | | | :Introduced in: GUDHI 2.0.0 |
+ | | | |
+ | | | :Copyright: MIT (`GPL v3 </licensing/>`_) |
+ | | Parts of this package require CGAL. | |
+ | | | :Requires: `Eigen <installation.html#eigen>`__ :math:`\geq` 3.1.0 and `CGAL <installation.html#cgal>`__ :math:`\geq` 4.11.0 |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------+
+ | * :doc:`point_cloud` |
+ +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/src/python/doc/rips_complex_user.rst b/src/python/doc/rips_complex_user.rst
index a8659542..a27573e8 100644
--- a/src/python/doc/rips_complex_user.rst
+++ b/src/python/doc/rips_complex_user.rst
@@ -136,7 +136,8 @@ Finally, it is asked to display information about the Rips complex.
.. testcode::
import gudhi
- point_cloud = gudhi.read_off(off_file=gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off')
+ off_file = gudhi.__root_source_dir__ + '/data/points/alphacomplexdoc.off'
+ point_cloud = gudhi.read_points_from_off_file(off_file = off_file)
rips_complex = gudhi.RipsComplex(points=point_cloud, max_edge_length=12.0)
simplex_tree = rips_complex.create_simplex_tree(max_dimension=1)
result_str = 'Rips complex is of dimension ' + repr(simplex_tree.dimension()) + ' - ' + \
diff --git a/src/python/doc/wasserstein_distance_sum.inc b/src/python/doc/wasserstein_distance_sum.inc
index ffd4d312..a97f428d 100644
--- a/src/python/doc/wasserstein_distance_sum.inc
+++ b/src/python/doc/wasserstein_distance_sum.inc
@@ -2,12 +2,12 @@
:widths: 30 50 20
+-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
- | .. figure:: | The p-Wasserstein distance measures the similarity between two | :Author: Theo Lacombe |
+ | .. figure:: | The q-Wasserstein distance measures the similarity between two | :Author: Theo Lacombe |
| ../../doc/Bottleneck_distance/perturb_pd.png | persistence diagrams. It's the minimum value c that can be achieved | |
| :figclass: align-center | by a perfect matching between the points of the two diagrams (+ all | :Introduced in: GUDHI 3.1.0 |
| | diagonal points), where the value of a matching is defined as the | |
- | Wasserstein distance is the p-th root of the sum of the | p-th root of the sum of all edge lengths to the power p. Edge lengths| :Copyright: MIT |
- | edge lengths to the power p. | are measured in norm q, for :math:`1 \leq q \leq \infty`. | |
+ | Wasserstein distance is the q-th root of the sum of the | q-th root of the sum of all edge lengths to the power q. Edge lengths| :Copyright: MIT |
+ | edge lengths to the power q. | are measured in norm p, for :math:`1 \leq p \leq \infty`. | |
| | | :Requires: Python Optimal Transport (POT) :math:`\geq` 0.5.1 |
+-----------------------------------------------------------------+----------------------------------------------------------------------+------------------------------------------------------------------+
| * :doc:`wasserstein_distance_user` | |
diff --git a/src/python/doc/wasserstein_distance_user.rst b/src/python/doc/wasserstein_distance_user.rst
index a049cfb5..94b454e2 100644
--- a/src/python/doc/wasserstein_distance_user.rst
+++ b/src/python/doc/wasserstein_distance_user.rst
@@ -9,17 +9,26 @@ Definition
.. include:: wasserstein_distance_sum.inc
-This implementation is based on ideas from "Large Scale Computation of Means and Cluster for Persistence Diagrams via Optimal Transport".
+Functions
+---------
+This implementation uses the Python Optimal Transport library and is based on
+ideas from "Large Scale Computation of Means and Cluster for Persistence
+Diagrams via Optimal Transport" :cite:`10.5555/3327546.3327645`.
-Function
---------
.. autofunction:: gudhi.wasserstein.wasserstein_distance
+This other implementation comes from `Hera
+<https://bitbucket.org/grey_narn/hera/src/master/>`_ (BSD-3-Clause) which is
+based on "Geometry Helps to Compare Persistence Diagrams"
+:cite:`Kerber:2017:GHC:3047249.3064175` by Michael Kerber, Dmitriy
+Morozov, and Arnur Nigmetov.
+
+.. autofunction:: gudhi.hera.wasserstein_distance
Basic example
-------------
-This example computes the 1-Wasserstein distance from 2 persistence diagrams with euclidean ground metric.
+This example computes the 1-Wasserstein distance from 2 persistence diagrams with Euclidean ground metric.
Note that persistence diagrams must be submitted as (n x 2) numpy arrays and must not contain inf values.
.. testcode::
@@ -30,7 +39,7 @@ Note that persistence diagrams must be submitted as (n x 2) numpy arrays and mus
diag1 = np.array([[2.7, 3.7],[9.6, 14.],[34.2, 34.974]])
diag2 = np.array([[2.8, 4.45],[9.5, 14.1]])
- message = "Wasserstein distance value = " + '%.2f' % gudhi.wasserstein.wasserstein_distance(diag1, diag2, q=2., p=1.)
+ message = "Wasserstein distance value = " + '%.2f' % gudhi.wasserstein.wasserstein_distance(diag1, diag2, order=1., internal_p=2.)
print(message)
The output is:
diff --git a/src/python/doc/witness_complex_user.rst b/src/python/doc/witness_complex_user.rst
index 45ba5b3b..7087fa98 100644
--- a/src/python/doc/witness_complex_user.rst
+++ b/src/python/doc/witness_complex_user.rst
@@ -101,7 +101,7 @@ Let's start with a simple example, which reads an off point file and computes a
print("#####################################################################")
print("EuclideanWitnessComplex creation from points read in a OFF file")
- witnesses = gudhi.read_off(off_file=args.file)
+ witnesses = gudhi.read_points_from_off_file(off_file=args.file)
landmarks = gudhi.pick_n_random_points(points=witnesses, nb_points=args.number_of_landmarks)
message = "EuclideanWitnessComplex with max_edge_length=" + repr(args.max_alpha_square) + \
diff --git a/src/python/example/alpha_complex_from_points_example.py b/src/python/example/alpha_complex_from_points_example.py
index a746998c..844d7a82 100755
--- a/src/python/example/alpha_complex_from_points_example.py
+++ b/src/python/example/alpha_complex_from_points_example.py
@@ -52,4 +52,9 @@ print("star([0])=", simplex_tree.get_star([0]))
print("coface([0], 1)=", simplex_tree.get_cofaces([0], 1))
print("point[0]=", alpha_complex.get_point(0))
-print("point[5]=", alpha_complex.get_point(5))
+try:
+ print("point[5]=", alpha_complex.get_point(5))
+except IndexError:
+ pass
+else:
+ assert False
diff --git a/src/python/example/alpha_rips_persistence_bottleneck_distance.py b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
index 086307ee..d5c33ec8 100755
--- a/src/python/example/alpha_rips_persistence_bottleneck_distance.py
+++ b/src/python/example/alpha_rips_persistence_bottleneck_distance.py
@@ -35,7 +35,7 @@ args = parser.parse_args()
with open(args.file, "r") as f:
first_line = f.readline()
if (first_line == "OFF\n") or (first_line == "nOFF\n"):
- point_cloud = gudhi.read_off(off_file=args.file)
+ point_cloud = gudhi.read_points_from_off_file(off_file=args.file)
print("#####################################################################")
print("RipsComplex creation from points read in a OFF file")
diff --git a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
index 0eedd140..4903667e 100755
--- a/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/euclidean_strong_witness_complex_diagram_persistence_from_off_file_example.py
@@ -47,7 +47,7 @@ with open(args.file, "r") as f:
print("#####################################################################")
print("EuclideanStrongWitnessComplex creation from points read in a OFF file")
- witnesses = gudhi.read_off(off_file=args.file)
+ witnesses = gudhi.read_points_from_off_file(off_file=args.file)
landmarks = gudhi.pick_n_random_points(
points=witnesses, nb_points=args.number_of_landmarks
)
diff --git a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
index 1fe55737..339a8577 100755
--- a/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/euclidean_witness_complex_diagram_persistence_from_off_file_example.py
@@ -46,7 +46,7 @@ with open(args.file, "r") as f:
print("#####################################################################")
print("EuclideanWitnessComplex creation from points read in a OFF file")
- witnesses = gudhi.read_off(off_file=args.file)
+ witnesses = gudhi.read_points_from_off_file(off_file=args.file)
landmarks = gudhi.pick_n_random_points(
points=witnesses, nb_points=args.number_of_landmarks
)
diff --git a/src/python/example/plot_rips_complex.py b/src/python/example/plot_rips_complex.py
index 1c878db1..214a3c0a 100755
--- a/src/python/example/plot_rips_complex.py
+++ b/src/python/example/plot_rips_complex.py
@@ -2,7 +2,7 @@
import numpy as np
import gudhi
-points = np.array(gudhi.read_off('../../data/points/Kl.off'))
+points = np.array(gudhi.read_points_from_off_file('../../data/points/Kl.off'))
rc = gudhi.RipsComplex(points=points, max_edge_length=.2)
st = rc.create_simplex_tree(max_dimension=2)
# We are only going to plot the triangles
diff --git a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
index b9074cf9..c757aca7 100755
--- a/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
+++ b/src/python/example/rips_complex_diagram_persistence_from_off_file_example.py
@@ -48,7 +48,7 @@ with open(args.file, "r") as f:
message = "RipsComplex with max_edge_length=" + repr(args.max_edge_length)
print(message)
- point_cloud = gudhi.read_off(off_file=args.file)
+ point_cloud = gudhi.read_points_from_off_file(off_file=args.file)
rips_complex = gudhi.RipsComplex(
points=point_cloud, max_edge_length=args.max_edge_length
)
diff --git a/src/python/gudhi/__init__.py.in b/src/python/gudhi/__init__.py.in
index 0c462b02..79e12fbc 100644
--- a/src/python/gudhi/__init__.py.in
+++ b/src/python/gudhi/__init__.py.in
@@ -1,5 +1,3 @@
-from importlib import import_module
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -9,6 +7,9 @@ from importlib import import_module
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from importlib import import_module
+from sys import exc_info
+
__author__ = "GUDHI Editorial Board"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "https://gudhi.inria.fr/licensing/"
@@ -17,9 +18,6 @@ __version__ = "@GUDHI_VERSION@"
__root_source_dir__ = "@CMAKE_SOURCE_DIR@"
__debug_info__ = @GUDHI_PYTHON_DEBUG_INFO@
-from sys import exc_info
-from importlib import import_module
-
__all__ = [@GUDHI_PYTHON_MODULES@ @GUDHI_PYTHON_MODULES_EXTRA@]
__available_modules = ''
diff --git a/src/python/gudhi/alpha_complex.pyx b/src/python/gudhi/alpha_complex.pyx
index 8f2c98d5..fff3e920 100644
--- a/src/python/gudhi/alpha_complex.pyx
+++ b/src/python/gudhi/alpha_complex.pyx
@@ -1,3 +1,12 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Vincent Rouvreau
+#
+# Copyright (C) 2016 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
@@ -9,26 +18,17 @@ import os
from gudhi.simplex_tree cimport *
from gudhi.simplex_tree import SimplexTree
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Vincent Rouvreau
-#
-# Copyright (C) 2016 Inria
-#
-# Modification(s):
-# - YYYY/MM Author: Description of the modification
-
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
cdef extern from "Alpha_complex_interface.h" namespace "Gudhi":
cdef cppclass Alpha_complex_interface "Gudhi::alpha_complex::Alpha_complex_interface":
- Alpha_complex_interface(vector[vector[double]] points)
+ Alpha_complex_interface(vector[vector[double]] points) except +
# bool from_file is a workaround for cython to find the correct signature
- Alpha_complex_interface(string off_file, bool from_file)
- vector[double] get_point(int vertex)
- void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ Alpha_complex_interface(string off_file, bool from_file) except +
+ vector[double] get_point(int vertex) except +
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) except +
# AlphaComplex python interface
cdef class AlphaComplex:
@@ -66,10 +66,10 @@ cdef class AlphaComplex:
"""
# The real cython constructor
- def __cinit__(self, points=None, off_file=''):
+ def __cinit__(self, points = None, off_file = ''):
if off_file:
if os.path.isfile(off_file):
- self.thisptr = new Alpha_complex_interface(str.encode(off_file), True)
+ self.thisptr = new Alpha_complex_interface(off_file.encode('utf-8'), True)
else:
print("file " + off_file + " not found.")
else:
@@ -96,10 +96,9 @@ cdef class AlphaComplex:
:rtype: list of float
:returns: the point.
"""
- cdef vector[double] point = self.thisptr.get_point(vertex)
- return point
+ return self.thisptr.get_point(vertex)
- def create_simplex_tree(self, max_alpha_square=float('inf')):
+ def create_simplex_tree(self, max_alpha_square = float('inf')):
"""
:param max_alpha_square: The maximum alpha square threshold the
simplices shall not exceed. Default is set to infinity, and
diff --git a/src/python/gudhi/bottleneck.pyx b/src/python/gudhi/bottleneck.pyx
index c2361024..af011e88 100644
--- a/src/python/gudhi/bottleneck.pyx
+++ b/src/python/gudhi/bottleneck.pyx
@@ -1,8 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-import os
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -12,6 +7,11 @@ import os
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+import os
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
diff --git a/src/python/gudhi/cubical_complex.pyx b/src/python/gudhi/cubical_complex.pyx
index 011c407c..cbeda014 100644
--- a/src/python/gudhi/cubical_complex.pyx
+++ b/src/python/gudhi/cubical_complex.pyx
@@ -1,12 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libcpp.string cimport string
-from libcpp cimport bool
-import os
-
-from numpy import array as np_array
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -16,6 +7,15 @@ from numpy import array as np_array
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+import numpy as np
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -47,7 +47,7 @@ cdef class CubicalComplex:
# Fake constructor that does nothing but documenting the constructor
def __init__(self, dimensions=None, top_dimensional_cells=None,
- perseus_file=''):
+ perseus_file=''):
"""CubicalComplex constructor from dimensions and
top_dimensional_cells or from a Perseus-style file name.
@@ -58,6 +58,12 @@ cdef class CubicalComplex:
Or
+ :param top_dimensional_cells: A multidimensional array of cells
+ filtration values.
+ :type top_dimensional_cells: anything convertible to a numpy ndarray
+
+ Or
+
:param perseus_file: A Perseus-style file name.
:type perseus_file: string
"""
@@ -65,11 +71,21 @@ cdef class CubicalComplex:
# The real cython constructor
def __cinit__(self, dimensions=None, top_dimensional_cells=None,
perseus_file=''):
- if (dimensions is not None) and (top_dimensional_cells is not None) and (perseus_file == ''):
+ if ((dimensions is not None) and (top_dimensional_cells is not None)
+ and (perseus_file == '')):
+ self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells)
+ elif ((dimensions is None) and (top_dimensional_cells is not None)
+ and (perseus_file == '')):
+ top_dimensional_cells = np.array(top_dimensional_cells,
+ copy = False,
+ order = 'F')
+ dimensions = top_dimensional_cells.shape
+ top_dimensional_cells = top_dimensional_cells.ravel(order='F')
self.thisptr = new Bitmap_cubical_complex_base_interface(dimensions, top_dimensional_cells)
- elif (dimensions is None) and (top_dimensional_cells is None) and (perseus_file != ''):
+ elif ((dimensions is None) and (top_dimensional_cells is None)
+ and (perseus_file != '')):
if os.path.isfile(perseus_file):
- self.thisptr = new Bitmap_cubical_complex_base_interface(str.encode(perseus_file))
+ self.thisptr = new Bitmap_cubical_complex_base_interface(perseus_file.encode('utf-8'))
else:
print("file " + perseus_file + " not found.")
else:
@@ -184,4 +200,4 @@ cdef class CubicalComplex:
else:
print("intervals_in_dim function requires persistence function"
" to be launched first.")
- return np_array(intervals_result)
+ return np.array(intervals_result)
diff --git a/src/python/gudhi/euclidean_strong_witness_complex.pyx b/src/python/gudhi/euclidean_strong_witness_complex.pyx
index e3f451f0..aca6084e 100644
--- a/src/python/gudhi/euclidean_strong_witness_complex.pyx
+++ b/src/python/gudhi/euclidean_strong_witness_complex.pyx
@@ -1,11 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libc.stdint cimport intptr_t
-
-from gudhi.simplex_tree cimport *
-from gudhi.simplex_tree import SimplexTree
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -15,6 +7,14 @@ from gudhi.simplex_tree import SimplexTree
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
@@ -22,9 +22,9 @@ __license__ = "GPL v3"
cdef extern from "Euclidean_strong_witness_complex_interface.h" namespace "Gudhi":
cdef cppclass Euclidean_strong_witness_complex_interface "Gudhi::witness_complex::Euclidean_strong_witness_complex_interface":
Euclidean_strong_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses)
- void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) except +
void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
- unsigned limit_dimension)
+ unsigned limit_dimension) except +
vector[double] get_point(unsigned vertex)
# EuclideanStrongWitnessComplex python interface
diff --git a/src/python/gudhi/euclidean_witness_complex.pyx b/src/python/gudhi/euclidean_witness_complex.pyx
index 84a8ea1a..fb0c2201 100644
--- a/src/python/gudhi/euclidean_witness_complex.pyx
+++ b/src/python/gudhi/euclidean_witness_complex.pyx
@@ -1,11 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libc.stdint cimport intptr_t
-
-from gudhi.simplex_tree cimport *
-from gudhi.simplex_tree import SimplexTree
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -15,6 +7,14 @@ from gudhi.simplex_tree import SimplexTree
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
@@ -22,9 +22,9 @@ __license__ = "GPL v3"
cdef extern from "Euclidean_witness_complex_interface.h" namespace "Gudhi":
cdef cppclass Euclidean_witness_complex_interface "Gudhi::witness_complex::Euclidean_witness_complex_interface":
Euclidean_witness_complex_interface(vector[vector[double]] landmarks, vector[vector[double]] witnesses)
- void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) except +
void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
- unsigned limit_dimension)
+ unsigned limit_dimension) except +
vector[double] get_point(unsigned vertex)
# EuclideanWitnessComplex python interface
diff --git a/src/python/gudhi/hera.cc b/src/python/gudhi/hera.cc
new file mode 100644
index 00000000..0d562b4c
--- /dev/null
+++ b/src/python/gudhi/hera.cc
@@ -0,0 +1,71 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Marc Glisse
+ *
+ * Copyright (C) 2020 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#include <pybind11/pybind11.h>
+#include <pybind11/numpy.h>
+
+#include <boost/range/iterator_range.hpp>
+
+#include <wasserstein.h> // Hera
+
+#include <array>
+
+namespace py = pybind11;
+typedef py::array_t<double, py::array::c_style | py::array::forcecast> Dgm;
+
+double wasserstein_distance(
+ Dgm d1, Dgm d2,
+ double wasserstein_power, double internal_p,
+ double delta)
+{
+ py::buffer_info buf1 = d1.request();
+ py::buffer_info buf2 = d2.request();
+ // shape (n,2) or (0) for empty
+ if((buf1.ndim!=2 || buf1.shape[1]!=2) && (buf1.ndim!=1 || buf1.shape[0]!=0))
+ throw std::runtime_error("Diagram 1 must be an array of size n x 2");
+ if((buf2.ndim!=2 || buf2.shape[1]!=2) && (buf2.ndim!=1 || buf2.shape[0]!=0))
+ throw std::runtime_error("Diagram 2 must be an array of size n x 2");
+ typedef std::array<double, 2> Point;
+ auto p1 = (Point*)buf1.ptr;
+ auto p2 = (Point*)buf2.ptr;
+ auto diag1 = boost::make_iterator_range(p1, p1+buf1.shape[0]);
+ auto diag2 = boost::make_iterator_range(p2, p2+buf2.shape[0]);
+
+ hera::AuctionParams<double> params;
+ params.wasserstein_power = wasserstein_power;
+ // hera encodes infinity as -1...
+ if(std::isinf(internal_p)) internal_p = hera::get_infinity<double>();
+ params.internal_p = internal_p;
+ params.delta = delta;
+ // The extra parameters are purposedly not exposed for now.
+ return hera::wasserstein_dist(diag1, diag2, params);
+}
+
+PYBIND11_MODULE(hera, m) {
+ m.def("wasserstein_distance", &wasserstein_distance,
+ py::arg("X"), py::arg("Y"),
+ py::arg("order") = 1,
+ py::arg("internal_p") = std::numeric_limits<double>::infinity(),
+ py::arg("delta") = .01,
+ R"pbdoc(
+ Compute the Wasserstein distance between two diagrams.
+ Points at infinity are supported.
+
+ Parameters:
+ X (n x 2 numpy array): First diagram
+ Y (n x 2 numpy array): Second diagram
+ order (float): Wasserstein exponent W_q
+ internal_p (float): Internal Minkowski norm L^p in R^2
+ delta (float): Relative error 1+delta
+
+ Returns:
+ float: Approximate Wasserstein distance W_q(X,Y)
+ )pbdoc");
+}
diff --git a/src/python/gudhi/nerve_gic.pyx b/src/python/gudhi/nerve_gic.pyx
index acb78564..382e71c5 100644
--- a/src/python/gudhi/nerve_gic.pyx
+++ b/src/python/gudhi/nerve_gic.pyx
@@ -1,3 +1,12 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Vincent Rouvreau
+#
+# Copyright (C) 2018 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
@@ -9,15 +18,6 @@ from libc.stdint cimport intptr_t
from gudhi.simplex_tree cimport *
from gudhi.simplex_tree import SimplexTree
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Vincent Rouvreau
-#
-# Copyright (C) 2018 Inria
-#
-# Modification(s):
-# - YYYY/MM Author: Description of the modification
-
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2018 Inria"
__license__ = "GPL v3"
@@ -180,7 +180,7 @@ cdef class CoverComplex:
:returns: Read file status.
"""
if os.path.isfile(off_file):
- return self.thisptr.read_point_cloud(str.encode(off_file))
+ return self.thisptr.read_point_cloud(off_file.encode('utf-8'))
else:
print("file " + off_file + " not found.")
return False
@@ -212,7 +212,7 @@ cdef class CoverComplex:
:type color_file_name: string
"""
if os.path.isfile(color_file_name):
- self.thisptr.set_color_from_file(str.encode(color_file_name))
+ self.thisptr.set_color_from_file(color_file_name.encode('utf-8'))
else:
print("file " + color_file_name + " not found.")
@@ -233,7 +233,7 @@ cdef class CoverComplex:
:type cover_file_name: string
"""
if os.path.isfile(cover_file_name):
- self.thisptr.set_cover_from_file(str.encode(cover_file_name))
+ self.thisptr.set_cover_from_file(cover_file_name.encode('utf-8'))
else:
print("file " + cover_file_name + " not found.")
@@ -266,7 +266,7 @@ cdef class CoverComplex:
:type func_file_name: string
"""
if os.path.isfile(func_file_name):
- self.thisptr.set_function_from_file(str.encode(func_file_name))
+ self.thisptr.set_function_from_file(func_file_name.encode('utf-8'))
else:
print("file " + func_file_name + " not found.")
@@ -307,7 +307,7 @@ cdef class CoverComplex:
:type graph_file_name: string
"""
if os.path.isfile(graph_file_name):
- self.thisptr.set_graph_from_file(str.encode(graph_file_name))
+ self.thisptr.set_graph_from_file(graph_file_name.encode('utf-8'))
else:
print("file " + graph_file_name + " not found.")
@@ -368,7 +368,7 @@ cdef class CoverComplex:
:param type: either "GIC" or "Nerve".
:type type: string
"""
- self.thisptr.set_type(str.encode(type))
+ self.thisptr.set_type(type.encode('utf-8'))
def set_verbose(self, verbose):
"""Specifies whether the program should display information or not.
diff --git a/src/python/gudhi/off_reader.pyx b/src/python/gudhi/off_reader.pyx
index 225e981c..7e6d9d80 100644
--- a/src/python/gudhi/off_reader.pyx
+++ b/src/python/gudhi/off_reader.pyx
@@ -1,8 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.string cimport string
-import os
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -12,6 +7,11 @@ import os
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+import os
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -19,18 +19,18 @@ __license__ = "MIT"
cdef extern from "Off_reader_interface.h" namespace "Gudhi":
vector[vector[double]] read_points_from_OFF_file(string off_file)
-def read_off(off_file=''):
+def read_points_from_off_file(off_file=''):
"""Read points from OFF file.
:param off_file: An OFF file style name.
:type off_file: string
:returns: The point set.
- :rtype: vector[vector[double]]
+ :rtype: List[List[float]]
"""
if off_file:
if os.path.isfile(off_file):
- return read_points_from_OFF_file(str.encode(off_file))
+ return read_points_from_OFF_file(off_file.encode('utf-8'))
else:
print("file " + off_file + " not found.")
return []
diff --git a/src/python/gudhi/periodic_cubical_complex.pyx b/src/python/gudhi/periodic_cubical_complex.pyx
index c89055db..37f76201 100644
--- a/src/python/gudhi/periodic_cubical_complex.pyx
+++ b/src/python/gudhi/periodic_cubical_complex.pyx
@@ -1,12 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libcpp.string cimport string
-from libcpp cimport bool
-import os
-
-from numpy import array as np_array
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -16,6 +7,15 @@ from numpy import array as np_array
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
+import numpy as np
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -47,7 +47,7 @@ cdef class PeriodicCubicalComplex:
# Fake constructor that does nothing but documenting the constructor
def __init__(self, dimensions=None, top_dimensional_cells=None,
- periodic_dimensions=None, perseus_file=''):
+ periodic_dimensions=None, perseus_file=''):
"""PeriodicCubicalComplex constructor from dimensions and
top_dimensional_cells or from a Perseus-style file name.
@@ -60,6 +60,14 @@ cdef class PeriodicCubicalComplex:
Or
+ :param top_dimensional_cells: A multidimensional array of cells
+ filtration values.
+ :type top_dimensional_cells: anything convertible to a numpy ndarray
+ :param periodic_dimensions: A list of top dimensional cells periodicity value.
+ :type periodic_dimensions: list of boolean
+
+ Or
+
:param perseus_file: A Perseus-style file name.
:type perseus_file: string
"""
@@ -67,16 +75,32 @@ cdef class PeriodicCubicalComplex:
# The real cython constructor
def __cinit__(self, dimensions=None, top_dimensional_cells=None,
periodic_dimensions=None, perseus_file=''):
- if (dimensions is not None) and (top_dimensional_cells is not None) and (periodic_dimensions is not None) and (perseus_file == ''):
- self.thisptr = new Periodic_cubical_complex_base_interface(dimensions, top_dimensional_cells, periodic_dimensions)
- elif (dimensions is None) and (top_dimensional_cells is None) and (periodic_dimensions is None) and (perseus_file != ''):
+ if ((dimensions is not None) and (top_dimensional_cells is not None)
+ and (periodic_dimensions is not None) and (perseus_file == '')):
+ self.thisptr = new Periodic_cubical_complex_base_interface(dimensions,
+ top_dimensional_cells,
+ periodic_dimensions)
+ elif ((dimensions is None) and (top_dimensional_cells is not None)
+ and (periodic_dimensions is not None) and (perseus_file == '')):
+ top_dimensional_cells = np.array(top_dimensional_cells,
+ copy = False,
+ order = 'F')
+ dimensions = top_dimensional_cells.shape
+ top_dimensional_cells = top_dimensional_cells.ravel(order='F')
+ self.thisptr = new Periodic_cubical_complex_base_interface(dimensions,
+ top_dimensional_cells,
+ periodic_dimensions)
+ elif ((dimensions is None) and (top_dimensional_cells is None)
+ and (periodic_dimensions is None) and (perseus_file != '')):
if os.path.isfile(perseus_file):
- self.thisptr = new Periodic_cubical_complex_base_interface(str.encode(perseus_file))
+ self.thisptr = new Periodic_cubical_complex_base_interface(perseus_file.encode('utf-8'))
else:
print("file " + perseus_file + " not found.")
else:
- print("CubicalComplex can be constructed from dimensions and "
- "top_dimensional_cells or from a Perseus-style file name.")
+ print("CubicalComplex can be constructed from dimensions, "
+ "top_dimensional_cells and periodic_dimensions, or from "
+ "top_dimensional_cells and periodic_dimensions or from "
+ "a Perseus-style file name.")
def __dealloc__(self):
if self.thisptr != NULL:
@@ -186,4 +210,4 @@ cdef class PeriodicCubicalComplex:
else:
print("intervals_in_dim function requires persistence function"
" to be launched first.")
- return np_array(intervals_result)
+ return np.array(intervals_result)
diff --git a/src/python/gudhi/persistence_graphical_tools.py b/src/python/gudhi/persistence_graphical_tools.py
index 7d232c85..246280de 100644
--- a/src/python/gudhi/persistence_graphical_tools.py
+++ b/src/python/gudhi/persistence_graphical_tools.py
@@ -1,10 +1,3 @@
-from os import path
-from math import isfinite
-import numpy as np
-
-from gudhi.reader_utils import read_persistence_intervals_in_dimension
-from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau, Bertrand Michel
@@ -14,6 +7,13 @@ from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from os import path
+from math import isfinite
+import numpy as np
+
+from gudhi.reader_utils import read_persistence_intervals_in_dimension
+from gudhi.reader_utils import read_persistence_intervals_grouped_by_dimension
+
__author__ = "Vincent Rouvreau, Bertrand Michel"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/gudhi/reader_utils.pyx b/src/python/gudhi/reader_utils.pyx
index 6994c4f9..fe1c3a2e 100644
--- a/src/python/gudhi/reader_utils.pyx
+++ b/src/python/gudhi/reader_utils.pyx
@@ -1,12 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.string cimport string
-from libcpp.map cimport map
-from libcpp.pair cimport pair
-
-from os import path
-from numpy import array as np_array
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -16,6 +7,15 @@ from numpy import array as np_array
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp.map cimport map
+from libcpp.pair cimport pair
+
+from os import path
+from numpy import array as np_array
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2017 Inria"
__license__ = "MIT"
@@ -34,30 +34,30 @@ def read_lower_triangular_matrix_from_csv_file(csv_file='', separator=';'):
:type separator: char
:returns: The lower triangular matrix.
- :rtype: vector[vector[double]]
+ :rtype: List[List[float]]
"""
if csv_file:
if path.isfile(csv_file):
- return read_matrix_from_csv_file(str.encode(csv_file), ord(separator[0]))
+ return read_matrix_from_csv_file(csv_file.encode('utf-8'), ord(separator[0]))
print("file " + csv_file + " not set or not found.")
return []
def read_persistence_intervals_grouped_by_dimension(persistence_file=''):
"""Reads a file containing persistence intervals.
Each line might contain 2, 3 or 4 values: [[field] dimension] birth death
- The return value is an `map[dim, vector[pair[birth, death]]]`
- where `dim` is an `int`, `birth` a `double`, and `death` a `double`.
+ The return value is a `dict(dim, list(tuple(birth, death)))`
+ where `dim` is an `int`, `birth` a `float`, and `death` a `float`.
Note: the function does not check that birth <= death.
:param persistence_file: A persistence file style name.
:type persistence_file: string
:returns: The persistence pairs grouped by dimension.
- :rtype: map[int, vector[pair[double, double]]]
+ :rtype: Dict[int, List[Tuple[float, float]]]
"""
if persistence_file:
if path.isfile(persistence_file):
- return read_pers_intervals_grouped_by_dimension(str.encode(persistence_file))
+ return read_pers_intervals_grouped_by_dimension(persistence_file.encode('utf-8'))
print("file " + persistence_file + " not set or not found.")
return []
@@ -80,7 +80,7 @@ def read_persistence_intervals_in_dimension(persistence_file='', only_this_dim=-
"""
if persistence_file:
if path.isfile(persistence_file):
- return np_array(read_pers_intervals_in_dimension(str.encode(
- persistence_file), only_this_dim))
+ return np_array(read_pers_intervals_in_dimension(persistence_file.encode(
+ 'utf-8'), only_this_dim))
print("file " + persistence_file + " not set or not found.")
return []
diff --git a/src/python/gudhi/rips_complex.pyx b/src/python/gudhi/rips_complex.pyx
index cbbbab0d..deb8057a 100644
--- a/src/python/gudhi/rips_complex.pyx
+++ b/src/python/gudhi/rips_complex.pyx
@@ -1,3 +1,12 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Vincent Rouvreau
+#
+# Copyright (C) 2016 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
@@ -8,15 +17,6 @@ from libc.stdint cimport intptr_t
from gudhi.simplex_tree cimport *
from gudhi.simplex_tree import SimplexTree
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Vincent Rouvreau
-#
-# Copyright (C) 2016 Inria
-#
-# Modification(s):
-# - YYYY/MM Author: Description of the modification
-
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -28,7 +28,7 @@ cdef extern from "Rips_complex_interface.h" namespace "Gudhi":
void init_matrix(vector[vector[double]] values, double threshold)
void init_points_sparse(vector[vector[double]] values, double threshold, double sparse)
void init_matrix_sparse(vector[vector[double]] values, double threshold, double sparse)
- void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, int dim_max) except +
# RipsComplex python interface
cdef class RipsComplex:
diff --git a/src/python/gudhi/simplex_tree.pxd b/src/python/gudhi/simplex_tree.pxd
index 5f86cfe2..96d14079 100644
--- a/src/python/gudhi/simplex_tree.pxd
+++ b/src/python/gudhi/simplex_tree.pxd
@@ -1,19 +1,18 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Vincent Rouvreau
+#
+# Copyright (C) 2016 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
from libcpp cimport bool
from libcpp.string cimport string
-""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
- Author(s): Vincent Rouvreau
-
- Copyright (C) 2016 Inria
-
- Modification(s):
- - YYYY/MM Author: Description of the modification
-"""
-
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -40,7 +39,7 @@ cdef extern from "Simplex_tree_interface.h" namespace "Gudhi":
vector[pair[vector[int], double]] get_star(vector[int] simplex)
vector[pair[vector[int], double]] get_cofaces(vector[int] simplex,
int dimension)
- void expansion(int max_dim)
+ void expansion(int max_dim) except +
void remove_maximal_simplex(vector[int] simplex)
bool prune_above_filtration(double filtration)
bool make_filtration_non_decreasing()
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
index 4a3cd9bc..b18627c4 100644
--- a/src/python/gudhi/simplex_tree.pyx
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -1,7 +1,3 @@
-from libc.stdint cimport intptr_t
-from numpy import array as np_array
-cimport simplex_tree
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -11,6 +7,10 @@ cimport simplex_tree
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from libc.stdint cimport intptr_t
+from numpy import array as np_array
+cimport simplex_tree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -508,7 +508,7 @@ cdef class SimplexTree:
"""
if self.pcohptr != NULL:
if persistence_file != '':
- self.pcohptr.write_output_diagram(str.encode(persistence_file))
+ self.pcohptr.write_output_diagram(persistence_file.encode('utf-8'))
else:
print("persistence_file must be specified")
else:
diff --git a/src/python/gudhi/strong_witness_complex.pyx b/src/python/gudhi/strong_witness_complex.pyx
index 66d49b49..9f89d3ae 100644
--- a/src/python/gudhi/strong_witness_complex.pyx
+++ b/src/python/gudhi/strong_witness_complex.pyx
@@ -1,11 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libc.stdint cimport intptr_t
-
-from gudhi.simplex_tree cimport *
-from gudhi.simplex_tree import SimplexTree
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -15,6 +7,14 @@ from gudhi.simplex_tree import SimplexTree
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -22,9 +22,9 @@ __license__ = "MIT"
cdef extern from "Strong_witness_complex_interface.h" namespace "Gudhi":
cdef cppclass Strong_witness_complex_interface "Gudhi::witness_complex::Strong_witness_complex_interface":
Strong_witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table)
- void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) except +
void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
- unsigned limit_dimension)
+ unsigned limit_dimension) except +
# StrongWitnessComplex python interface
cdef class StrongWitnessComplex:
diff --git a/src/python/gudhi/subsampling.pyx b/src/python/gudhi/subsampling.pyx
index e0cd1348..f77c6f75 100644
--- a/src/python/gudhi/subsampling.pyx
+++ b/src/python/gudhi/subsampling.pyx
@@ -1,9 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.string cimport string
-from libcpp cimport bool
-import os
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -13,6 +7,12 @@ import os
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.string cimport string
+from libcpp cimport bool
+import os
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
@@ -33,13 +33,15 @@ def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_poi
The iteration starts with the landmark `starting point`.
:param points: The input point set.
- :type points: vector[vector[double]].
+ :type points: Iterable[Iterable[float]].
Or
:param off_file: An OFF file style name.
:type off_file: string
+ And in both cases
+
:param nb_points: Number of points of the subsample.
:type nb_points: unsigned.
:param starting_point: The iteration starts with the landmark `starting \
@@ -47,15 +49,15 @@ def choose_n_farthest_points(points=None, off_file='', nb_points=0, starting_poi
index is chosen randomly.
:type starting_point: unsigned.
:returns: The subsample point set.
- :rtype: vector[vector[double]]
+ :rtype: List[List[float]].
"""
if off_file:
if os.path.isfile(off_file):
if starting_point == '':
- return subsampling_n_farthest_points_from_file(str.encode(off_file),
+ return subsampling_n_farthest_points_from_file(off_file.encode('utf-8'),
nb_points)
else:
- return subsampling_n_farthest_points_from_file(str.encode(off_file),
+ return subsampling_n_farthest_points_from_file(off_file.encode('utf-8'),
nb_points,
starting_point)
else:
@@ -74,21 +76,23 @@ def pick_n_random_points(points=None, off_file='', nb_points=0):
"""Subsample a point set by picking random vertices.
:param points: The input point set.
- :type points: vector[vector[double]].
+ :type points: Iterable[Iterable[float]].
Or
:param off_file: An OFF file style name.
:type off_file: string
+ And in both cases
+
:param nb_points: Number of points of the subsample.
:type nb_points: unsigned.
:returns: The subsample point set.
- :rtype: vector[vector[double]]
+ :rtype: List[List[float]]
"""
if off_file:
if os.path.isfile(off_file):
- return subsampling_n_random_points_from_file(str.encode(off_file),
+ return subsampling_n_random_points_from_file(off_file.encode('utf-8'),
nb_points)
else:
print("file " + off_file + " not found.")
@@ -103,22 +107,24 @@ def sparsify_point_set(points=None, off_file='', min_squared_dist=0.0):
between any two points is greater than or equal to min_squared_dist.
:param points: The input point set.
- :type points: vector[vector[double]].
+ :type points: Iterable[Iterable[float]].
Or
:param off_file: An OFF file style name.
:type off_file: string
+ And in both cases
+
:param min_squared_dist: Minimum squared distance separating the output \
points.
:type min_squared_dist: float.
:returns: The subsample point set.
- :rtype: vector[vector[double]]
+ :rtype: List[List[float]]
"""
if off_file:
if os.path.isfile(off_file):
- return subsampling_sparsify_points_from_file(str.encode(off_file),
+ return subsampling_sparsify_points_from_file(off_file.encode('utf-8'),
min_squared_dist)
else:
print("file " + off_file + " not found.")
diff --git a/src/python/gudhi/tangential_complex.pyx b/src/python/gudhi/tangential_complex.pyx
index f4c8b079..6391488c 100644
--- a/src/python/gudhi/tangential_complex.pyx
+++ b/src/python/gudhi/tangential_complex.pyx
@@ -1,3 +1,12 @@
+# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+# Author(s): Vincent Rouvreau
+#
+# Copyright (C) 2016 Inria
+#
+# Modification(s):
+# - YYYY/MM Author: Description of the modification
+
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.utility cimport pair
@@ -9,15 +18,6 @@ import os
from gudhi.simplex_tree cimport *
from gudhi.simplex_tree import SimplexTree
-# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
-# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
-# Author(s): Vincent Rouvreau
-#
-# Copyright (C) 2016 Inria
-#
-# Modification(s):
-# - YYYY/MM Author: Description of the modification
-
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "GPL v3"
@@ -66,7 +66,7 @@ cdef class TangentialComplex:
def __cinit__(self, intrisic_dim, points=None, off_file=''):
if off_file:
if os.path.isfile(off_file):
- self.thisptr = new Tangential_complex_interface(intrisic_dim, str.encode(off_file), True)
+ self.thisptr = new Tangential_complex_interface(intrisic_dim, off_file.encode('utf-8'), True)
else:
print("file " + off_file + " not found.")
else:
diff --git a/src/python/gudhi/wasserstein.py b/src/python/gudhi/wasserstein.py
index d8a3104c..13102094 100644
--- a/src/python/gudhi/wasserstein.py
+++ b/src/python/gudhi/wasserstein.py
@@ -1,10 +1,3 @@
-import numpy as np
-import scipy.spatial.distance as sc
-try:
- import ot
-except ImportError:
- print("POT (Python Optimal Transport) package is not installed. Try to run $ conda install -c conda-forge pot ; or $ pip install POT")
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Theo Lacombe
@@ -14,6 +7,13 @@ except ImportError:
# Modification(s):
# - YYYY/MM Author: Description of the modification
+import numpy as np
+import scipy.spatial.distance as sc
+try:
+ import ot
+except ImportError:
+ print("POT (Python Optimal Transport) package is not installed. Try to run $ conda install -c conda-forge pot ; or $ pip install POT")
+
def _proj_on_diag(X):
'''
:param X: (n x 2) array encoding the points of a persistent diagram.
@@ -23,26 +23,26 @@ def _proj_on_diag(X):
return np.array([Z , Z]).T
-def _build_dist_matrix(X, Y, p=2., q=2.):
+def _build_dist_matrix(X, Y, order=2., internal_p=2.):
'''
:param X: (n x 2) numpy.array encoding the (points of the) first diagram.
:param Y: (m x 2) numpy.array encoding the second diagram.
- :param q: Ground metric (i.e. norm l_q).
- :param p: exponent for the Wasserstein metric.
+ :param order: exponent for the Wasserstein metric.
+ :param internal_p: Ground metric (i.e. norm L^p).
:returns: (n+1) x (m+1) np.array encoding the cost matrix C.
For 1 <= i <= n, 1 <= j <= m, C[i,j] encodes the distance between X[i] and Y[j], while C[i, m+1] (resp. C[n+1, j]) encodes the distance (to the p) between X[i] (resp Y[j]) and its orthogonal proj onto the diagonal.
note also that C[n+1, m+1] = 0 (it costs nothing to move from the diagonal to the diagonal).
'''
Xdiag = _proj_on_diag(X)
Ydiag = _proj_on_diag(Y)
- if np.isinf(q):
- C = sc.cdist(X,Y, metric='chebyshev')**p
- Cxd = np.linalg.norm(X - Xdiag, ord=q, axis=1)**p
- Cdy = np.linalg.norm(Y - Ydiag, ord=q, axis=1)**p
+ if np.isinf(internal_p):
+ C = sc.cdist(X,Y, metric='chebyshev')**order
+ Cxd = np.linalg.norm(X - Xdiag, ord=internal_p, axis=1)**order
+ Cdy = np.linalg.norm(Y - Ydiag, ord=internal_p, axis=1)**order
else:
- C = sc.cdist(X,Y, metric='minkowski', p=q)**p
- Cxd = np.linalg.norm(X - Xdiag, ord=q, axis=1)**p
- Cdy = np.linalg.norm(Y - Ydiag, ord=q, axis=1)**p
+ C = sc.cdist(X,Y, metric='minkowski', p=internal_p)**order
+ Cxd = np.linalg.norm(X - Xdiag, ord=internal_p, axis=1)**order
+ Cdy = np.linalg.norm(Y - Ydiag, ord=internal_p, axis=1)**order
Cf = np.hstack((C, Cxd[:,None]))
Cdy = np.append(Cdy, 0)
@@ -51,24 +51,24 @@ def _build_dist_matrix(X, Y, p=2., q=2.):
return Cf
-def _perstot(X, p, q):
+def _perstot(X, order, internal_p):
'''
:param X: (n x 2) numpy.array (points of a given diagram).
- :param q: Ground metric on the (upper-half) plane (i.e. norm l_q in R^2); Default value is 2 (Euclidean norm).
- :param p: exponent for Wasserstein; Default value is 2.
+ :param order: exponent for Wasserstein. Default value is 2.
+ :param internal_p: Ground metric on the (upper-half) plane (i.e. norm L^p in R^2); Default value is 2 (Euclidean norm).
:returns: float, the total persistence of the diagram (that is, its distance to the empty diagram).
'''
Xdiag = _proj_on_diag(X)
- return (np.sum(np.linalg.norm(X - Xdiag, ord=q, axis=1)**p))**(1./p)
+ return (np.sum(np.linalg.norm(X - Xdiag, ord=internal_p, axis=1)**order))**(1./order)
-def wasserstein_distance(X, Y, p=2., q=2.):
+def wasserstein_distance(X, Y, order=2., internal_p=2.):
'''
:param X: (n x 2) numpy.array encoding the (finite points of the) first diagram. Must not contain essential points (i.e. with infinite coordinate).
:param Y: (m x 2) numpy.array encoding the second diagram.
- :param q: Ground metric on the (upper-half) plane (i.e. norm l_q in R^2); Default value is 2 (euclidean norm).
- :param p: exponent for Wasserstein; Default value is 2.
- :returns: the p-Wasserstein distance (1 <= p < infinity) with respect to the q-norm as ground metric.
+ :param order: exponent for Wasserstein; Default value is 2.
+ :param internal_p: Ground metric on the (upper-half) plane (i.e. norm L^p in R^2); Default value is 2 (Euclidean norm).
+ :returns: the Wasserstein distance of order q (1 <= q < infinity) between persistence diagrams with respect to the internal_p-norm as ground metric.
:rtype: float
'''
n = len(X)
@@ -79,20 +79,19 @@ def wasserstein_distance(X, Y, p=2., q=2.):
if Y.size == 0:
return 0.
else:
- return _perstot(Y, p, q)
+ return _perstot(Y, order, internal_p)
elif Y.size == 0:
- return _perstot(X, p, q)
+ return _perstot(X, order, internal_p)
- M = _build_dist_matrix(X, Y, p=p, q=q)
+ M = _build_dist_matrix(X, Y, order=order, internal_p=internal_p)
a = np.full(n+1, 1. / (n + m) ) # weight vector of the input diagram. Uniform here.
a[-1] = a[-1] * m # normalized so that we have a probability measure, required by POT
b = np.full(m+1, 1. / (n + m) ) # weight vector of the input diagram. Uniform here.
b[-1] = b[-1] * n # so that we have a probability measure, required by POT
# Comptuation of the otcost using the ot.emd2 library.
- # Note: it is the squared Wasserstein distance.
+ # Note: it is the Wasserstein distance to the power q.
# The default numItermax=100000 is not sufficient for some examples with 5000 points, what is a good value?
ot_cost = (n+m) * ot.emd2(a, b, M, numItermax=2000000)
- return ot_cost ** (1./p)
-
+ return ot_cost ** (1./order)
diff --git a/src/python/gudhi/witness_complex.pyx b/src/python/gudhi/witness_complex.pyx
index 153fc615..e589d006 100644
--- a/src/python/gudhi/witness_complex.pyx
+++ b/src/python/gudhi/witness_complex.pyx
@@ -1,11 +1,3 @@
-from cython cimport numeric
-from libcpp.vector cimport vector
-from libcpp.utility cimport pair
-from libc.stdint cimport intptr_t
-
-from gudhi.simplex_tree cimport *
-from gudhi.simplex_tree import SimplexTree
-
# This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
# See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
# Author(s): Vincent Rouvreau
@@ -15,6 +7,14 @@ from gudhi.simplex_tree import SimplexTree
# Modification(s):
# - YYYY/MM Author: Description of the modification
+from cython cimport numeric
+from libcpp.vector cimport vector
+from libcpp.utility cimport pair
+from libc.stdint cimport intptr_t
+
+from gudhi.simplex_tree cimport *
+from gudhi.simplex_tree import SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -22,9 +22,9 @@ __license__ = "MIT"
cdef extern from "Witness_complex_interface.h" namespace "Gudhi":
cdef cppclass Witness_complex_interface "Gudhi::witness_complex::Witness_complex_interface":
Witness_complex_interface(vector[vector[pair[size_t, double]]] nearest_landmark_table)
- void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square)
+ void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square) except +
void create_simplex_tree(Simplex_tree_interface_full_featured* simplex_tree, double max_alpha_square,
- unsigned limit_dimension)
+ unsigned limit_dimension) except +
# WitnessComplex python interface
cdef class WitnessComplex:
diff --git a/src/python/include/Alpha_complex_interface.h b/src/python/include/Alpha_complex_interface.h
index 96353cc4..8614eee3 100644
--- a/src/python/include/Alpha_complex_interface.h
+++ b/src/python/include/Alpha_complex_interface.h
@@ -13,6 +13,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Alpha_complex.h>
+#include <CGAL/Epeck_d.h>
#include <CGAL/Epick_d.h>
#include <boost/range/adaptor/transformed.hpp>
@@ -28,7 +29,7 @@ namespace Gudhi {
namespace alpha_complex {
class Alpha_complex_interface {
- using Dynamic_kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Dynamic_kernel = CGAL::Epeck_d< CGAL::Dynamic_dimension_tag >;
using Point_d = Dynamic_kernel::Point_d;
public:
@@ -49,13 +50,9 @@ class Alpha_complex_interface {
std::vector<double> get_point(int vh) {
std::vector<double> vd;
- try {
- Point_d const& ph = alpha_complex_->get_point(vh);
- for (auto coord = ph.cartesian_begin(); coord < ph.cartesian_end(); coord++)
- vd.push_back(CGAL::to_double(*coord));
- } catch (std::out_of_range const&) {
- // std::out_of_range is thrown in case not found. Other exceptions must be re-thrown
- }
+ Point_d const& ph = alpha_complex_->get_point(vh);
+ for (auto coord = ph.cartesian_begin(); coord != ph.cartesian_end(); coord++)
+ vd.push_back(CGAL::to_double(*coord));
return vd;
}
diff --git a/src/python/setup.py.in b/src/python/setup.py.in
index 3f1d4424..f968bd59 100644
--- a/src/python/setup.py.in
+++ b/src/python/setup.py.in
@@ -1,7 +1,3 @@
-from setuptools import setup, Extension
-from Cython.Build import cythonize
-from numpy import get_include as numpy_get_include
-
"""This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -12,6 +8,12 @@ from numpy import get_include as numpy_get_include
- YYYY/MM Author: Description of the modification
"""
+from setuptools import setup, Extension, find_packages
+from Cython.Build import cythonize
+from numpy import get_include as numpy_get_include
+import sys
+import pybind11
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -38,16 +40,29 @@ for module in modules:
libraries=libraries,
library_dirs=library_dirs,
include_dirs=include_dirs,
- runtime_library_dirs=runtime_library_dirs,))
+ runtime_library_dirs=runtime_library_dirs,
+ cython_directives = {'language_level': str(sys.version_info[0])},))
+
+ext_modules = cythonize(ext_modules)
+
+ext_modules.append(Extension(
+ 'gudhi.hera',
+ sources = [source_dir + 'hera.cc'],
+ language = 'c++',
+ include_dirs = include_dirs +
+ ['@HERA_WASSERSTEIN_INCLUDE_DIR@',
+ pybind11.get_include(False), pybind11.get_include(True)],
+ extra_compile_args=extra_compile_args + [@GUDHI_PYBIND11_EXTRA_COMPILE_ARGS@],
+ ))
setup(
name = 'gudhi',
- packages=["gudhi",],
+ packages=find_packages(), # find_namespace_packages(include=["gudhi*"])
author='GUDHI Editorial Board',
author_email='gudhi-contact@lists.gforge.inria.fr',
version='@GUDHI_VERSION@',
url='http://gudhi.gforge.inria.fr/',
- ext_modules = cythonize(ext_modules),
+ ext_modules = ext_modules,
install_requires = ['cython','numpy >= 1.9',],
- setup_requires = ['numpy >= 1.9',],
+ setup_requires = ['numpy >= 1.9','pybind11',],
)
diff --git a/src/python/test/test_alpha_complex.py b/src/python/test/test_alpha_complex.py
index 24f8bf53..3761fe16 100755
--- a/src/python/test/test_alpha_complex.py
+++ b/src/python/test/test_alpha_complex.py
@@ -1,5 +1,3 @@
-from gudhi import AlphaComplex, SimplexTree
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,17 @@ from gudhi import AlphaComplex, SimplexTree
- YYYY/MM Author: Description of the modification
"""
+from gudhi import AlphaComplex, SimplexTree
+import math
+import numpy as np
+import pytest
+try:
+ # python3
+ from itertools import zip_longest
+except ImportError:
+ # python2
+ from itertools import izip_longest as zip_longest
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -56,8 +65,18 @@ def test_infinite_alpha():
assert point_list[1] == alpha_complex.get_point(1)
assert point_list[2] == alpha_complex.get_point(2)
assert point_list[3] == alpha_complex.get_point(3)
- assert alpha_complex.get_point(4) == []
- assert alpha_complex.get_point(125) == []
+ try:
+ alpha_complex.get_point(4) == []
+ except IndexError:
+ pass
+ else:
+ assert False
+ try:
+ alpha_complex.get_point(125) == []
+ except IndexError:
+ pass
+ else:
+ assert False
def test_filtered_alpha():
@@ -73,8 +92,18 @@ def test_filtered_alpha():
assert point_list[1] == filtered_alpha.get_point(1)
assert point_list[2] == filtered_alpha.get_point(2)
assert point_list[3] == filtered_alpha.get_point(3)
- assert filtered_alpha.get_point(4) == []
- assert filtered_alpha.get_point(125) == []
+ try:
+ filtered_alpha.get_point(4) == []
+ except IndexError:
+ pass
+ else:
+ assert False
+ try:
+ filtered_alpha.get_point(125) == []
+ except IndexError:
+ pass
+ else:
+ assert False
assert simplex_tree.get_filtration() == [
([0], 0.0),
@@ -88,3 +117,28 @@ def test_filtered_alpha():
]
assert simplex_tree.get_star([0]) == [([0], 0.0), ([0, 1], 0.25), ([0, 2], 0.25)]
assert simplex_tree.get_cofaces([0], 1) == [([0, 1], 0.25), ([0, 2], 0.25)]
+
+def test_safe_alpha_persistence_comparison():
+ #generate periodic signal
+ time = np.arange(0, 10, 1)
+ signal = [math.sin(x) for x in time]
+ delta = math.pi
+ delayed = [math.sin(x + delta) for x in time]
+
+ #construct embedding
+ embedding1 = [[signal[i], -signal[i]] for i in range(len(time))]
+ embedding2 = [[signal[i], delayed[i]] for i in range(len(time))]
+
+ #build alpha complex and simplex tree
+ alpha_complex1 = AlphaComplex(points=embedding1)
+ simplex_tree1 = alpha_complex1.create_simplex_tree()
+
+ alpha_complex2 = AlphaComplex(points=embedding2)
+ simplex_tree2 = alpha_complex2.create_simplex_tree()
+
+ diag1 = simplex_tree1.persistence()
+ diag2 = simplex_tree2.persistence()
+
+ for (first_p, second_p) in zip_longest(diag1, diag2):
+ assert first_p[0] == pytest.approx(second_p[0])
+ assert first_p[1] == pytest.approx(second_p[1])
diff --git a/src/python/test/test_bottleneck_distance.py b/src/python/test/test_bottleneck_distance.py
index f5f019b9..70b2abad 100755
--- a/src/python/test/test_bottleneck_distance.py
+++ b/src/python/test/test_bottleneck_distance.py
@@ -1,5 +1,3 @@
-import gudhi
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ import gudhi
- YYYY/MM Author: Description of the modification
"""
+import gudhi
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_cover_complex.py b/src/python/test/test_cover_complex.py
index 8cd12272..32bc5a26 100755
--- a/src/python/test/test_cover_complex.py
+++ b/src/python/test/test_cover_complex.py
@@ -1,5 +1,3 @@
-from gudhi import CoverComplex
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ from gudhi import CoverComplex
- YYYY/MM Author: Description of the modification
"""
+from gudhi import CoverComplex
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2018 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_cubical_complex.py b/src/python/test/test_cubical_complex.py
index 68f54fbe..8c1b2600 100755
--- a/src/python/test/test_cubical_complex.py
+++ b/src/python/test/test_cubical_complex.py
@@ -1,5 +1,3 @@
-from gudhi import CubicalComplex
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,9 @@ from gudhi import CubicalComplex
- YYYY/MM Author: Description of the modification
"""
+from gudhi import CubicalComplex, PeriodicCubicalComplex
+import numpy as np
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
@@ -56,7 +57,7 @@ def test_dimension_or_perseus_file_constructor():
assert cub.__is_persistence_defined() == False
-def test_dimension_simple_constructor():
+def simple_constructor(cub):
cub = CubicalComplex(
dimensions=[3, 3], top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9]
)
@@ -67,12 +68,22 @@ def test_dimension_simple_constructor():
assert cub.betti_numbers() == [1, 0, 0]
assert cub.persistent_betti_numbers(0, 1000) == [0, 0, 0]
-
-def test_user_case_simple_constructor():
+def test_simple_constructor_from_top_cells():
cub = CubicalComplex(
dimensions=[3, 3],
- top_dimensional_cells=[float("inf"), 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
+ top_dimensional_cells=[1, 2, 3, 4, 5, 6, 7, 8, 9],
)
+ simple_constructor(cub)
+
+def test_simple_constructor_from_numpy_array():
+ cub = CubicalComplex(
+ top_dimensional_cells=np.array([[1, 2, 3],
+ [4, 5, 6],
+ [7, 8, 9]])
+ )
+ simple_constructor(cub)
+
+def user_case_simple_constructor(cub):
assert cub.__is_defined() == True
assert cub.__is_persistence_defined() == False
assert cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))]
@@ -83,6 +94,20 @@ def test_user_case_simple_constructor():
)
assert other_cub.persistence() == [(1, (0.0, 1.0)), (0, (0.0, float("inf")))]
+def test_user_case_simple_constructor_from_top_cells():
+ cub = CubicalComplex(
+ dimensions=[3, 3],
+ top_dimensional_cells=[float("inf"), 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
+ )
+ user_case_simple_constructor(cub)
+
+def test_user_case_simple_constructor_from_numpy_array():
+ cub = CubicalComplex(
+ top_dimensional_cells=np.array([[float("inf"), 0.0, 0.0],
+ [0.0, 1.0, 0.0],
+ [0.0, 0.0, 0.0]])
+ )
+ user_case_simple_constructor(cub)
def test_dimension_file_constructor():
# Create test file
@@ -96,3 +121,29 @@ def test_dimension_file_constructor():
assert cub.__is_persistence_defined() == True
assert cub.betti_numbers() == [1, 0, 0]
assert cub.persistent_betti_numbers(0, 1000) == [1, 0, 0]
+
+def test_connected_sublevel_sets():
+ array_cells = np.array([[3, 3], [2, 2], [4, 4]])
+ linear_cells = [3, 3, 2, 2, 4, 4]
+ dimensions = [2, 3]
+ periodic_dimensions = [False, False]
+ # with a numpy array version
+ cub = CubicalComplex(top_dimensional_cells = array_cells)
+ assert cub.persistence() == [(0, (2.0, float("inf")))]
+ assert cub.betti_numbers() == [1, 0, 0]
+ # with vector of dimensions
+ cub = CubicalComplex(dimensions = dimensions,
+ top_dimensional_cells = linear_cells)
+ assert cub.persistence() == [(0, (2.0, float("inf")))]
+ assert cub.betti_numbers() == [1, 0, 0]
+ # periodic with a numpy array version
+ cub = PeriodicCubicalComplex(top_dimensional_cells = array_cells,
+ periodic_dimensions = periodic_dimensions)
+ assert cub.persistence() == [(0, (2.0, float("inf")))]
+ assert cub.betti_numbers() == [1, 0, 0]
+ # periodic with vector of dimensions
+ cub = PeriodicCubicalComplex(dimensions = dimensions,
+ top_dimensional_cells = linear_cells,
+ periodic_dimensions = periodic_dimensions)
+ assert cub.persistence() == [(0, (2.0, float("inf")))]
+ assert cub.betti_numbers() == [1, 0, 0]
diff --git a/src/python/test/test_euclidean_witness_complex.py b/src/python/test/test_euclidean_witness_complex.py
index f5eae5fa..c18d2484 100755
--- a/src/python/test/test_euclidean_witness_complex.py
+++ b/src/python/test/test_euclidean_witness_complex.py
@@ -1,5 +1,3 @@
-import gudhi
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ import gudhi
- YYYY/MM Author: Description of the modification
"""
+import gudhi
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_reader_utils.py b/src/python/test/test_reader_utils.py
index 4c7b32c2..90da6651 100755
--- a/src/python/test/test_reader_utils.py
+++ b/src/python/test/test_reader_utils.py
@@ -1,6 +1,3 @@
-import gudhi
-import numpy as np
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -11,6 +8,9 @@ import numpy as np
- YYYY/MM Author: Description of the modification
"""
+import gudhi
+import numpy as np
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2017 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py
index 4ff65f98..dba7f952 100755
--- a/src/python/test/test_representations.py
+++ b/src/python/test/test_representations.py
@@ -1,11 +1,12 @@
import os
import sys
import matplotlib.pyplot as plt
-# Disable graphics for testing purposes
-plt.show = lambda:None
-here = os.path.dirname(os.path.realpath(__file__))
-sys.path.append(here + "/../example")
-import diagram_vectorizations_distances_kernels
-# pytest is unhappy if there are 0 tests
-def test_nothing():
+
+def test_representations_examples():
+ # Disable graphics for testing purposes
+ plt.show = lambda:None
+ here = os.path.dirname(os.path.realpath(__file__))
+ sys.path.append(here + "/../example")
+ import diagram_vectorizations_distances_kernels
+
return None
diff --git a/src/python/test/test_rips_complex.py b/src/python/test/test_rips_complex.py
index d55ae22f..b02a68e1 100755
--- a/src/python/test/test_rips_complex.py
+++ b/src/python/test/test_rips_complex.py
@@ -1,6 +1,3 @@
-from gudhi import RipsComplex
-from math import sqrt
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -11,6 +8,9 @@ from math import sqrt
- YYYY/MM Author: Description of the modification
"""
+from gudhi import RipsComplex
+from math import sqrt
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_simplex_tree.py b/src/python/test/test_simplex_tree.py
index 8d8971c1..1822c43b 100755
--- a/src/python/test/test_simplex_tree.py
+++ b/src/python/test/test_simplex_tree.py
@@ -1,5 +1,3 @@
-from gudhi import SimplexTree
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ from gudhi import SimplexTree
- YYYY/MM Author: Description of the modification
"""
+from gudhi import SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py
index c816e203..fe0985fa 100755
--- a/src/python/test/test_subsampling.py
+++ b/src/python/test/test_subsampling.py
@@ -1,5 +1,3 @@
-import gudhi
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ import gudhi
- YYYY/MM Author: Description of the modification
"""
+import gudhi
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_tangential_complex.py b/src/python/test/test_tangential_complex.py
index 0f828d8e..e650e99c 100755
--- a/src/python/test/test_tangential_complex.py
+++ b/src/python/test/test_tangential_complex.py
@@ -1,5 +1,3 @@
-from gudhi import TangentialComplex, SimplexTree
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ from gudhi import TangentialComplex, SimplexTree
- YYYY/MM Author: Description of the modification
"""
+from gudhi import TangentialComplex, SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"
diff --git a/src/python/test/test_wasserstein_distance.py b/src/python/test/test_wasserstein_distance.py
index a6bf9901..6a6b217b 100755
--- a/src/python/test/test_wasserstein_distance.py
+++ b/src/python/test/test_wasserstein_distance.py
@@ -1,9 +1,6 @@
-from gudhi.wasserstein import wasserstein_distance
-import numpy as np
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
- Author(s): Theo Lacombe
+ Author(s): Theo Lacombe, Marc Glisse
Copyright (C) 2019 Inria
@@ -11,38 +8,66 @@ import numpy as np
- YYYY/MM Author: Description of the modification
"""
+from gudhi.wasserstein import wasserstein_distance as pot
+from gudhi.hera import wasserstein_distance as hera
+import numpy as np
+import pytest
+
__author__ = "Theo Lacombe"
__copyright__ = "Copyright (C) 2019 Inria"
__license__ = "MIT"
-
-def test_basic_wasserstein():
+def _basic_wasserstein(wasserstein_distance, delta, test_infinity=True):
diag1 = np.array([[2.7, 3.7], [9.6, 14.0], [34.2, 34.974]])
diag2 = np.array([[2.8, 4.45], [9.5, 14.1]])
diag3 = np.array([[0, 2], [4, 6]])
diag4 = np.array([[0, 3], [4, 8]])
- emptydiag = np.array([[]])
+ emptydiag = np.array([])
+
+ # We just need to handle positive numbers here
+ def approx(x):
+ return pytest.approx(x, rel=delta)
+
+ assert wasserstein_distance(emptydiag, emptydiag, internal_p=2., order=1.) == 0.
+ assert wasserstein_distance(emptydiag, emptydiag, internal_p=np.inf, order=1.) == 0.
+ assert wasserstein_distance(emptydiag, emptydiag, internal_p=np.inf, order=2.) == 0.
+ assert wasserstein_distance(emptydiag, emptydiag, internal_p=2., order=2.) == 0.
+
+ assert wasserstein_distance(diag3, emptydiag, internal_p=np.inf, order=1.) == approx(2.)
+ assert wasserstein_distance(diag3, emptydiag, internal_p=1., order=1.) == approx(4.)
+
+ assert wasserstein_distance(diag4, emptydiag, internal_p=1., order=2.) == approx(5.) # thank you Pythagorician triplets
+ assert wasserstein_distance(diag4, emptydiag, internal_p=np.inf, order=2.) == approx(2.5)
+ assert wasserstein_distance(diag4, emptydiag, internal_p=2., order=2.) == approx(3.5355339059327378)
+
+ assert wasserstein_distance(diag1, diag2, internal_p=2., order=1.) == approx(1.4453593023967701)
+ assert wasserstein_distance(diag1, diag2, internal_p=2.35, order=1.74) == approx(0.9772734057168739)
+
+ assert wasserstein_distance(diag1, emptydiag, internal_p=2.35, order=1.7863) == approx(3.141592214572228)
- assert wasserstein_distance(emptydiag, emptydiag, q=2., p=1.) == 0.
- assert wasserstein_distance(emptydiag, emptydiag, q=np.inf, p=1.) == 0.
- assert wasserstein_distance(emptydiag, emptydiag, q=np.inf, p=2.) == 0.
- assert wasserstein_distance(emptydiag, emptydiag, q=2., p=2.) == 0.
+ assert wasserstein_distance(diag3, diag4, internal_p=1., order=1.) == approx(3.)
+ assert wasserstein_distance(diag3, diag4, internal_p=np.inf, order=1.) == approx(3.) # no diag matching here
+ assert wasserstein_distance(diag3, diag4, internal_p=np.inf, order=2.) == approx(np.sqrt(5))
+ assert wasserstein_distance(diag3, diag4, internal_p=1., order=2.) == approx(np.sqrt(5))
+ assert wasserstein_distance(diag3, diag4, internal_p=4.5, order=2.) == approx(np.sqrt(5))
- assert wasserstein_distance(diag3, emptydiag, q=np.inf, p=1.) == 2.
- assert wasserstein_distance(diag3, emptydiag, q=1., p=1.) == 4.
+ if(not test_infinity):
+ return
- assert wasserstein_distance(diag4, emptydiag, q=1., p=2.) == 5. # thank you Pythagorician triplets
- assert wasserstein_distance(diag4, emptydiag, q=np.inf, p=2.) == 2.5
- assert wasserstein_distance(diag4, emptydiag, q=2., p=2.) == 3.5355339059327378
+ diag5 = np.array([[0, 3], [4, np.inf]])
+ diag6 = np.array([[7, 8], [4, 6], [3, np.inf]])
- assert wasserstein_distance(diag1, diag2, q=2., p=1.) == 1.4453593023967701
- assert wasserstein_distance(diag1, diag2, q=2.35, p=1.74) == 0.9772734057168739
+ assert wasserstein_distance(diag4, diag5) == np.inf
+ assert wasserstein_distance(diag5, diag6, order=1, internal_p=np.inf) == approx(4.)
- assert wasserstein_distance(diag1, emptydiag, q=2.35, p=1.7863) == 3.141592214572228
+def hera_wrap(delta):
+ def fun(*kargs,**kwargs):
+ return hera(*kargs,**kwargs,delta=delta)
+ return fun
- assert wasserstein_distance(diag3, diag4, q=1., p=1.) == 3.
- assert wasserstein_distance(diag3, diag4, q=np.inf, p=1.) == 3. # no diag matching here
- assert wasserstein_distance(diag3, diag4, q=np.inf, p=2.) == np.sqrt(5)
- assert wasserstein_distance(diag3, diag4, q=1., p=2.) == np.sqrt(5)
- assert wasserstein_distance(diag3, diag4, q=4.5, p=2.) == np.sqrt(5)
+def test_wasserstein_distance_pot():
+ _basic_wasserstein(pot, 1e-15, test_infinity=False)
+def test_wasserstein_distance_hera():
+ _basic_wasserstein(hera_wrap(1e-12), 1e-12)
+ _basic_wasserstein(hera_wrap(.1), .1)
diff --git a/src/python/test/test_witness_complex.py b/src/python/test/test_witness_complex.py
index 36ced635..7baf18c9 100755
--- a/src/python/test/test_witness_complex.py
+++ b/src/python/test/test_witness_complex.py
@@ -1,5 +1,3 @@
-from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree
-
""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
Author(s): Vincent Rouvreau
@@ -10,6 +8,8 @@ from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree
- YYYY/MM Author: Description of the modification
"""
+from gudhi import WitnessComplex, StrongWitnessComplex, SimplexTree
+
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
__license__ = "MIT"