summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMarc Glisse <marc.glisse@inria.fr>2022-11-16 09:46:14 +0100
committerMarc Glisse <marc.glisse@inria.fr>2022-11-16 09:46:14 +0100
commitcd613b73b3a9181c1358e1b37d56029f46eb9c91 (patch)
treede0ced04b3dcea2f6f439346c8a2ec0bc1bd66d2 /src
parent19412d57d281acfd2d14efd15764e45da837b87a (diff)
parent7c064bb64135bd94417ec7a52eeb2bee0a115075 (diff)
Merge branch 'master' into insert
Diffstat (limited to 'src')
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h39
-rw-r--r--src/Alpha_complex/test/Alpha_complex_dim3_unit_test.cpp117
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp97
-rw-r--r--src/Alpha_complex/test/CMakeLists.txt6
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp2
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_persistence.cpp2
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h16
-rw-r--r--src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h16
-rw-r--r--src/Bottleneck_distance/include/gudhi/Persistence_graph.h57
-rw-r--r--src/Bottleneck_distance/test/bottleneck_unit_test.cpp78
-rw-r--r--src/Cech_complex/utilities/CMakeLists.txt20
-rw-r--r--src/Cech_complex/utilities/cech_persistence.cpp69
-rw-r--r--src/Cech_complex/utilities/cechcomplex.md8
-rw-r--r--src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp2
-rw-r--r--src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp2
-rw-r--r--src/GudhUI/view/Viewer.cpp4
-rw-r--r--src/Persistent_cohomology/example/persistence_from_file.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_multifield_persistence.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp2
-rw-r--r--src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp2
-rw-r--r--src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp2
-rw-r--r--src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp2
-rw-r--r--src/Rips_complex/utilities/rips_persistence.cpp2
-rw-r--r--src/Rips_complex/utilities/sparse_rips_persistence.cpp2
-rw-r--r--src/Spatial_searching/example/example_spatial_searching.cpp4
-rw-r--r--src/Spatial_searching/test/test_Kd_tree_search.cpp4
-rw-r--r--src/Tangential_complex/benchmark/XML_exporter.h2
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h10
-rw-r--r--src/Witness_complex/utilities/strong_witness_persistence.cpp2
-rw-r--r--src/Witness_complex/utilities/weak_witness_persistence.cpp2
-rw-r--r--src/Witness_complex/utilities/witnesscomplex.md4
-rw-r--r--src/cmake/modules/GUDHI_compilation_flags.cmake3
-rw-r--r--src/common/doc/installation.h4
-rw-r--r--src/python/CMakeLists.txt12
-rw-r--r--src/python/doc/clustering.rst5
-rw-r--r--src/python/doc/installation.rst8
-rw-r--r--src/python/doc/point_cloud.rst5
-rw-r--r--src/python/gudhi/off_utils.pyx (renamed from src/python/gudhi/off_reader.pyx)23
-rw-r--r--src/python/gudhi/persistence_graphical_tools.py8
-rw-r--r--src/python/gudhi/point_cloud/knn.py4
-rw-r--r--src/python/gudhi/representations/vector_methods.py123
-rw-r--r--src/python/gudhi/rips_complex.pyx13
-rw-r--r--src/python/test/test_off.py21
-rwxr-xr-xsrc/python/test/test_representations.py64
-rwxr-xr-xsrc/python/test/test_simplex_generators.py2
-rwxr-xr-xsrc/python/test/test_subsampling.py103
46 files changed, 613 insertions, 364 deletions
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index aec8c1b1..a7372f19 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -17,8 +17,7 @@
// to construct Alpha_complex from a OFF file of points
#include <gudhi/Points_off_io.h>
-#include <stdlib.h>
-#include <math.h> // isnan, fmax
+#include <cmath> // isnan, fmax
#include <memory> // for std::unique_ptr
#include <cstddef> // for std::size_t
@@ -45,6 +44,7 @@
#include <utility> // std::pair
#include <stdexcept>
#include <numeric> // for std::iota
+#include <algorithm> // for std::sort
// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10
#if CGAL_VERSION_NR < 1041101000
@@ -101,13 +101,17 @@ template<typename D> struct Is_Epeck_D<CGAL::Epeck_d<D>> { static const bool val
*/
template<class Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>, bool Weighted = false>
class Alpha_complex {
+ private:
+ // Vertex_handle internal type (required by triangulation_ and vertices_).
+ using Internal_vertex_handle = std::ptrdiff_t;
+
public:
/** \brief Geometric traits class that provides the geometric types and predicates needed by the triangulations.*/
using Geom_traits = std::conditional_t<Weighted, CGAL::Regular_triangulation_traits_adapter<Kernel>, Kernel>;
// Add an int in TDS to save point index in the structure
using TDS = CGAL::Triangulation_data_structure<typename Geom_traits::Dimension,
- CGAL::Triangulation_vertex<Geom_traits, std::ptrdiff_t>,
+ CGAL::Triangulation_vertex<Geom_traits, Internal_vertex_handle>,
CGAL::Triangulation_full_cell<Geom_traits> >;
/** \brief A (Weighted or not) Delaunay triangulation of a set of points in \f$ \mathbb{R}^D\f$.*/
@@ -132,9 +136,6 @@ class Alpha_complex {
// Vertex_iterator type from CGAL.
using CGAL_vertex_iterator = typename Triangulation::Vertex_iterator;
- // size_type type from CGAL.
- using size_type = typename Triangulation::size_type;
-
// Structure to switch from simplex tree vertex handle to CGAL vertex iterator.
using Vector_vertex_iterator = std::vector< CGAL_vertex_iterator >;
@@ -146,6 +147,10 @@ class Alpha_complex {
std::unique_ptr<Triangulation> triangulation_;
/** \brief Kernel for triangulation_ functions access.*/
A_kernel_d kernel_;
+ /** \brief Vertices to be inserted first by the create_complex method to avoid quadratic complexity.
+ * It isn't just [0, n) if some points have multiplicity (only one copy appears in the complex).
+ */
+ std::vector<Internal_vertex_handle> vertices_;
/** \brief Cache for geometric constructions: circumcenter and squared radius of a simplex.*/
std::vector<Sphere> cache_, old_cache_;
@@ -257,11 +262,11 @@ class Alpha_complex {
std::vector<Point_d> point_cloud(first, last);
// Creates a vector {0, 1, ..., N-1}
- std::vector<std::ptrdiff_t> indices(boost::counting_iterator<std::ptrdiff_t>(0),
- boost::counting_iterator<std::ptrdiff_t>(point_cloud.size()));
+ std::vector<Internal_vertex_handle> indices(boost::counting_iterator<Internal_vertex_handle>(0),
+ boost::counting_iterator<Internal_vertex_handle>(point_cloud.size()));
using Point_property_map = boost::iterator_property_map<typename std::vector<Point_d>::iterator,
- CGAL::Identity_property_map<std::ptrdiff_t>>;
+ CGAL::Identity_property_map<Internal_vertex_handle>>;
using Search_traits_d = CGAL::Spatial_sort_traits_adapter_d<Geom_traits, Point_property_map>;
CGAL::spatial_sort(indices.begin(), indices.end(), Search_traits_d(std::begin(point_cloud)));
@@ -279,6 +284,9 @@ class Alpha_complex {
// structure to retrieve CGAL points from vertex handle - one vertex handle per point.
// Needs to be constructed before as vertex handles arrives in no particular order.
vertex_handle_to_iterator_.resize(point_cloud.size());
+ // List of sorted unique vertices in the triangulation. We take advantage of the existing loop to construct it
+ // Vertices list avoids quadratic complexity with the Simplex_tree. We should not fill it up with Toplex_map e.g.
+ vertices_.reserve(triangulation_->number_of_vertices());
// Loop on triangulation vertices list
for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
if (!triangulation_->is_infinite(*vit)) {
@@ -286,8 +294,10 @@ class Alpha_complex {
std::clog << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
#endif // DEBUG_TRACES
vertex_handle_to_iterator_[vit->data()] = vit;
+ vertices_.push_back(vit->data());
}
}
+ std::sort(vertices_.begin(), vertices_.end());
// --------------------------------------------------------------------------------------------
}
}
@@ -384,12 +394,21 @@ class Alpha_complex {
// --------------------------------------------------------------------------------------------
// Simplex_tree construction from loop on triangulation finite full cells list
if (num_vertices() > 0) {
+ std::vector<Vertex_handle> one_vertex(1);
+ for (auto vertex : vertices_) {
+#ifdef DEBUG_TRACES
+ std::clog << "SimplicialComplex insertion " << vertex << std::endl;
+#endif // DEBUG_TRACES
+ one_vertex[0] = vertex;
+ complex.insert_simplex_and_subfaces(one_vertex, std::numeric_limits<double>::quiet_NaN());
+ }
+
for (auto cit = triangulation_->finite_full_cells_begin();
cit != triangulation_->finite_full_cells_end();
++cit) {
Vector_vertex vertexVector;
#ifdef DEBUG_TRACES
- std::clog << "Simplex_tree insertion ";
+ std::clog << "SimplicialComplex insertion ";
#endif // DEBUG_TRACES
for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
if (*vit != nullptr) {
diff --git a/src/Alpha_complex/test/Alpha_complex_dim3_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_dim3_unit_test.cpp
new file mode 100644
index 00000000..e7c261f1
--- /dev/null
+++ b/src/Alpha_complex/test/Alpha_complex_dim3_unit_test.cpp
@@ -0,0 +1,117 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "alpha_complex_dim3"
+#include <boost/test/unit_test.hpp>
+#include <boost/mpl/list.hpp>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Epeck_d.h>
+
+#include <stdexcept> // std::out_of_range
+#include <string>
+#include <vector>
+
+#include <gudhi/Alpha_complex.h>
+#include <gudhi/Simplex_tree.h>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epeck_d< CGAL::Dynamic_dimension_tag > Exact_kernel_d;
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epeck_d< CGAL::Dimension_tag<3> > Exact_kernel_s;
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Inexact_kernel_d;
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<3> > Inexact_kernel_s;
+// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
+
+typedef boost::mpl::list<Exact_kernel_d, Exact_kernel_s, Inexact_kernel_d, Inexact_kernel_s> list_of_kernel_variants;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of_kernel_variants) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of an alpha-complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double max_alpha_square_value = 60.0;
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ max_alpha_square_value << "==========" << std::endl;
+
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_file(off_file_name);
+
+ Gudhi::Simplex_tree<> simplex_tree_60;
+ BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
+
+ std::clog << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == 7);
+
+ std::clog << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree_60.dimension() == 2);
+
+ std::clog << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
+
+ std::clog << "simplex_tree_60.num_simplices()=" << simplex_tree_60.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree_60.num_simplices() == 25);
+
+ max_alpha_square_value = 59.0;
+ std::clog << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ max_alpha_square_value << "==========" << std::endl;
+
+ Gudhi::Simplex_tree<> simplex_tree_59;
+ BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_59, max_alpha_square_value));
+
+ std::clog << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == 7);
+
+ std::clog << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree_59.dimension() == 2);
+
+ std::clog << "simplex_tree_59.num_vertices()=" << simplex_tree_59.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree_59.num_vertices() == 7);
+
+ std::clog << "simplex_tree_59.num_simplices()=" << simplex_tree_59.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree_59.num_simplices() == 23);
+}
+
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, list_of_kernel_variants) {
+ std::clog << "========== Alpha_complex_from_empty_points ==========" << std::endl;
+
+ // ----------------------------------------------------------------------------
+ // Init of an empty list of points
+ // ----------------------------------------------------------------------------
+ std::vector<typename TestedKernel::Point_d> points;
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
+
+ std::clog << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() == points.size());
+
+ // Test to the limit
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
+
+ Gudhi::Simplex_tree<> simplex_tree;
+ BOOST_CHECK(!alpha_complex_from_points.create_complex(simplex_tree));
+
+ std::clog << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 0);
+
+ std::clog << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == -1);
+
+ std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == points.size());
+}
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index f74ad217..b474917f 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -13,81 +13,17 @@
#include <boost/test/unit_test.hpp>
#include <boost/mpl/list.hpp>
-#include <CGAL/Delaunay_triangulation.h>
#include <CGAL/Epick_d.h>
#include <CGAL/Epeck_d.h>
-#include <cmath> // float comparison
-#include <limits>
+#include <stdexcept> // std::out_of_range
#include <string>
#include <vector>
#include <gudhi/Alpha_complex.h>
-// to construct a simplex_tree from Delaunay_triangulation
-#include <gudhi/graph_simplicial_complex.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Unitary_tests_utils.h>
-// Use dynamic_dimension_tag for the user to be able to set dimension
-typedef CGAL::Epeck_d< CGAL::Dynamic_dimension_tag > Exact_kernel_d;
-// Use static dimension_tag for the user not to be able to set dimension
-typedef CGAL::Epeck_d< CGAL::Dimension_tag<3> > Exact_kernel_s;
-// Use dynamic_dimension_tag for the user to be able to set dimension
-typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Inexact_kernel_d;
-// Use static dimension_tag for the user not to be able to set dimension
-typedef CGAL::Epick_d< CGAL::Dimension_tag<3> > Inexact_kernel_s;
-// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
-
-typedef boost::mpl::list<Exact_kernel_d, Exact_kernel_s, Inexact_kernel_d, Inexact_kernel_s> list_of_kernel_variants;
-
-BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of_kernel_variants) {
- // ----------------------------------------------------------------------------
- //
- // Init of an alpha-complex from a OFF file
- //
- // ----------------------------------------------------------------------------
- std::string off_file_name("alphacomplexdoc.off");
- double max_alpha_square_value = 60.0;
- std::clog << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
- max_alpha_square_value << "==========" << std::endl;
-
- Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_file(off_file_name);
-
- Gudhi::Simplex_tree<> simplex_tree_60;
- BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
-
- std::clog << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_vertices() == 7);
-
- std::clog << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
- BOOST_CHECK(simplex_tree_60.dimension() == 2);
-
- std::clog << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
- BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
-
- std::clog << "simplex_tree_60.num_simplices()=" << simplex_tree_60.num_simplices() << std::endl;
- BOOST_CHECK(simplex_tree_60.num_simplices() == 25);
-
- max_alpha_square_value = 59.0;
- std::clog << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
- max_alpha_square_value << "==========" << std::endl;
-
- Gudhi::Simplex_tree<> simplex_tree_59;
- BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_59, max_alpha_square_value));
-
- std::clog << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_vertices() == 7);
-
- std::clog << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
- BOOST_CHECK(simplex_tree_59.dimension() == 2);
-
- std::clog << "simplex_tree_59.num_vertices()=" << simplex_tree_59.num_vertices() << std::endl;
- BOOST_CHECK(simplex_tree_59.num_vertices() == 7);
-
- std::clog << "simplex_tree_59.num_simplices()=" << simplex_tree_59.num_simplices() << std::endl;
- BOOST_CHECK(simplex_tree_59.num_simplices() == 23);
-}
-
// Use static dimension_tag for the user not to be able to set dimension
typedef CGAL::Epeck_d< CGAL::Dimension_tag<4> > Kernel_4;
typedef Kernel_4::Point_d Point_4;
@@ -236,37 +172,6 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
-BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, list_of_kernel_variants) {
- std::clog << "========== Alpha_complex_from_empty_points ==========" << std::endl;
-
- // ----------------------------------------------------------------------------
- // Init of an empty list of points
- // ----------------------------------------------------------------------------
- std::vector<typename TestedKernel::Point_d> points;
-
- // ----------------------------------------------------------------------------
- // Init of an alpha complex from the list of points
- // ----------------------------------------------------------------------------
- Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
-
- std::clog << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_vertices() == points.size());
-
- // Test to the limit
- BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
-
- Gudhi::Simplex_tree<> simplex_tree;
- BOOST_CHECK(!alpha_complex_from_points.create_complex(simplex_tree));
-
- std::clog << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
- BOOST_CHECK(simplex_tree.num_simplices() == 0);
-
- std::clog << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
- BOOST_CHECK(simplex_tree.dimension() == -1);
-
- std::clog << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
- BOOST_CHECK(simplex_tree.num_vertices() == points.size());
-}
using Inexact_kernel_2 = CGAL::Epick_d< CGAL::Dimension_tag<2> >;
using Exact_kernel_2 = CGAL::Epeck_d< CGAL::Dimension_tag<2> >;
diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt
index 0595ca92..dd2c235f 100644
--- a/src/Alpha_complex/test/CMakeLists.txt
+++ b/src/Alpha_complex/test/CMakeLists.txt
@@ -8,14 +8,18 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
add_executable ( Alpha_complex_test_unit Alpha_complex_unit_test.cpp )
target_link_libraries(Alpha_complex_test_unit ${CGAL_LIBRARY})
+ add_executable ( Alpha_complex_dim3_test_unit Alpha_complex_dim3_unit_test.cpp )
+ target_link_libraries(Alpha_complex_dim3_test_unit ${CGAL_LIBRARY})
add_executable ( Delaunay_complex_test_unit Delaunay_complex_unit_test.cpp )
target_link_libraries(Delaunay_complex_test_unit ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Alpha_complex_test_unit ${TBB_LIBRARIES})
+ target_link_libraries(Alpha_complex_dim3_test_unit ${TBB_LIBRARIES})
target_link_libraries(Delaunay_complex_test_unit ${TBB_LIBRARIES})
endif()
gudhi_add_boost_test(Alpha_complex_test_unit)
+ gudhi_add_boost_test(Alpha_complex_dim3_test_unit)
gudhi_add_boost_test(Delaunay_complex_test_unit)
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
@@ -73,4 +77,4 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
endif()
gudhi_add_boost_test(Zero_weighted_alpha_complex_test_unit)
-endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0) \ No newline at end of file
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.1.0)
diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
index 91899040..e65d8c6f 100644
--- a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
@@ -263,7 +263,7 @@ void program_options(int argc, char *argv[], std::string &off_file_points, bool
"cuboid-file,c", po::value<std::string>(&cuboid_file),
"Name of file describing the periodic domain. Format is:\n min_hx min_hy min_hz\n max_hx max_hy max_hz")(
"output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-alpha-square-value,r",
po::value<Filtration_value>(&alpha_square_max_value)
->default_value(std::numeric_limits<Filtration_value>::infinity()),
diff --git a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
index e86b34e2..29edbd8e 100644
--- a/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
@@ -163,7 +163,7 @@ void program_options(int argc, char *argv[], std::string &off_file_points, bool
"weight-file,w", po::value<std::string>(&weight_file)->default_value(std::string()),
"Name of file containing a point weights. Format is one weight per line:\n W1\n ...\n Wn ")(
"output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-alpha-square-value,r", po::value<Filtration_value>(&alpha_square_max_value)
->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal alpha square value for the Alpha complex construction.")(
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
index 4a6af3a4..29fabc6c 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h
@@ -241,10 +241,16 @@ class Bitmap_cubical_complex : public T {
**/
class Filtration_simplex_range;
- class Filtration_simplex_iterator : std::iterator<std::input_iterator_tag, Simplex_handle> {
+ class Filtration_simplex_iterator {
// Iterator over all simplices of the complex in the order of the indexing scheme.
// 'value_type' must be 'Simplex_handle'.
public:
+ typedef std::input_iterator_tag iterator_category;
+ typedef Simplex_handle value_type;
+ typedef std::ptrdiff_t difference_type;
+ typedef value_type* pointer;
+ typedef value_type reference;
+
Filtration_simplex_iterator(Bitmap_cubical_complex* b) : b(b), position(0) {}
Filtration_simplex_iterator() : b(NULL), position(0) {}
@@ -386,10 +392,16 @@ class Bitmap_cubical_complex : public T {
**/
class Skeleton_simplex_range;
- class Skeleton_simplex_iterator : std::iterator<std::input_iterator_tag, Simplex_handle> {
+ class Skeleton_simplex_iterator {
// Iterator over all simplices of the complex in the order of the indexing scheme.
// 'value_type' must be 'Simplex_handle'.
public:
+ typedef std::input_iterator_tag iterator_category;
+ typedef Simplex_handle value_type;
+ typedef std::ptrdiff_t difference_type;
+ typedef value_type* pointer;
+ typedef value_type reference;
+
Skeleton_simplex_iterator(Bitmap_cubical_complex* b, std::size_t d) : b(b), dimension(d) {
if (globalDbg) {
std::clog << "Skeleton_simplex_iterator ( Bitmap_cubical_complex* b , std::size_t d )\n";
diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
index bafe7981..2bf62f9b 100644
--- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
+++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h
@@ -251,8 +251,14 @@ class Bitmap_cubical_complex_base {
* @brief Iterator through all cells in the complex (in order they appear in the structure -- i.e.
* in lexicographical order).
**/
- class All_cells_iterator : std::iterator<std::input_iterator_tag, T> {
+ class All_cells_iterator {
public:
+ typedef std::input_iterator_tag iterator_category;
+ typedef std::size_t value_type;
+ typedef std::ptrdiff_t difference_type;
+ typedef value_type* pointer;
+ typedef value_type reference;
+
All_cells_iterator() { this->counter = 0; }
All_cells_iterator operator++() {
@@ -355,8 +361,14 @@ class Bitmap_cubical_complex_base {
* @brief Iterator through top dimensional cells of the complex. The cells appear in order they are stored
* in the structure (i.e. in lexicographical order)
**/
- class Top_dimensional_cells_iterator : std::iterator<std::input_iterator_tag, T> {
+ class Top_dimensional_cells_iterator {
public:
+ typedef std::input_iterator_tag iterator_category;
+ typedef std::size_t value_type;
+ typedef std::ptrdiff_t difference_type;
+ typedef value_type* pointer;
+ typedef value_type reference;
+
Top_dimensional_cells_iterator(Bitmap_cubical_complex_base& b) : b(b) {
this->counter = std::vector<std::size_t>(b.dimension());
// std::fill( this->counter.begin() , this->counter.end() , 0 );
diff --git a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
index 33f03b9c..c1e10f8e 100644
--- a/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
+++ b/src/Bottleneck_distance/include/gudhi/Persistence_graph.h
@@ -20,6 +20,7 @@
#include <vector>
#include <algorithm>
#include <limits> // for numeric_limits
+#include <cmath>
namespace Gudhi {
@@ -31,7 +32,7 @@ namespace persistence_diagram {
* \ingroup bottleneck_distance
*/
class Persistence_graph {
- public:
+public:
/** \internal \brief Constructor taking 2 PersistenceDiagrams (concept) as parameters. */
template<typename Persistence_diagram1, typename Persistence_diagram2>
Persistence_graph(const Persistence_diagram1& diag1, const Persistence_diagram2& diag2, double e);
@@ -58,7 +59,7 @@ class Persistence_graph {
/** \internal \brief Returns the corresponding internal point */
Internal_point get_v_point(int v_point_index) const;
- private:
+private:
std::vector<Internal_point> u;
std::vector<Internal_point> v;
double b_alive;
@@ -67,30 +68,54 @@ class Persistence_graph {
template<typename Persistence_diagram1, typename Persistence_diagram2>
Persistence_graph::Persistence_graph(const Persistence_diagram1 &diag1,
const Persistence_diagram2 &diag2, double e)
- : u(), v(), b_alive(0.) {
+ : u(), v(), b_alive(0.) {
std::vector<double> u_alive;
std::vector<double> v_alive;
+ std::vector<double> u_nalive;
+ std::vector<double> v_nalive;
+ int u_inf = 0;
+ int v_inf = 0;
+ double inf = std::numeric_limits<double>::infinity();
+ double neginf = -inf;
+
for (auto it = std::begin(diag1); it != std::end(diag1); ++it) {
- if (std::get<1>(*it) == std::numeric_limits<double>::infinity())
- u_alive.push_back(std::get<0>(*it));
- else if (std::get<1>(*it) - std::get<0>(*it) > e)
- u.push_back(Internal_point(std::get<0>(*it), std::get<1>(*it), u.size()));
+ if (std::get<0>(*it) != inf && std::get<1>(*it) != neginf){
+ if (std::get<0>(*it) == neginf && std::get<1>(*it) == inf)
+ u_inf++;
+ else if (std::get<0>(*it) == neginf)
+ u_nalive.push_back(std::get<1>(*it));
+ else if (std::get<1>(*it) == inf)
+ u_alive.push_back(std::get<0>(*it));
+ else if (std::get<1>(*it) - std::get<0>(*it) > e)
+ u.push_back(Internal_point(std::get<0>(*it), std::get<1>(*it), u.size()));
+ }
}
for (auto it = std::begin(diag2); it != std::end(diag2); ++it) {
- if (std::get<1>(*it) == std::numeric_limits<double>::infinity())
- v_alive.push_back(std::get<0>(*it));
- else if (std::get<1>(*it) - std::get<0>(*it) > e)
- v.push_back(Internal_point(std::get<0>(*it), std::get<1>(*it), v.size()));
+ if (std::get<0>(*it) != inf && std::get<1>(*it) != neginf){
+ if (std::get<0>(*it) == neginf && std::get<1>(*it) == inf)
+ v_inf++;
+ else if (std::get<0>(*it) == neginf)
+ v_nalive.push_back(std::get<1>(*it));
+ else if (std::get<1>(*it) == inf)
+ v_alive.push_back(std::get<0>(*it));
+ else if (std::get<1>(*it) - std::get<0>(*it) > e)
+ v.push_back(Internal_point(std::get<0>(*it), std::get<1>(*it), v.size()));
+ }
}
if (u.size() < v.size())
swap(u, v);
- std::sort(u_alive.begin(), u_alive.end());
- std::sort(v_alive.begin(), v_alive.end());
- if (u_alive.size() != v_alive.size()) {
+
+ if (u_alive.size() != v_alive.size() || u_nalive.size() != v_nalive.size() || u_inf != v_inf) {
b_alive = std::numeric_limits<double>::infinity();
} else {
+ std::sort(u_alive.begin(), u_alive.end());
+ std::sort(v_alive.begin(), v_alive.end());
+ std::sort(u_nalive.begin(), u_nalive.end());
+ std::sort(v_nalive.begin(), v_nalive.end());
for (auto it_u = u_alive.cbegin(), it_v = v_alive.cbegin(); it_u != u_alive.cend(); ++it_u, ++it_v)
b_alive = (std::max)(b_alive, std::fabs(*it_u - *it_v));
+ for (auto it_u = u_nalive.cbegin(), it_v = v_nalive.cbegin(); it_u != u_nalive.cend(); ++it_u, ++it_v)
+ b_alive = (std::max)(b_alive, std::fabs(*it_u - *it_v));
}
}
@@ -104,12 +129,12 @@ inline bool Persistence_graph::on_the_v_diagonal(int v_point_index) const {
inline int Persistence_graph::corresponding_point_in_u(int v_point_index) const {
return on_the_v_diagonal(v_point_index) ?
- v_point_index - static_cast<int> (v.size()) : v_point_index + static_cast<int> (u.size());
+ v_point_index - static_cast<int> (v.size()) : v_point_index + static_cast<int> (u.size());
}
inline int Persistence_graph::corresponding_point_in_v(int u_point_index) const {
return on_the_u_diagonal(u_point_index) ?
- u_point_index - static_cast<int> (u.size()) : u_point_index + static_cast<int> (v.size());
+ u_point_index - static_cast<int> (u.size()) : u_point_index + static_cast<int> (v.size());
}
inline double Persistence_graph::distance(int u_point_index, int v_point_index) const {
diff --git a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
index 44141baa..9872f20c 100644
--- a/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
+++ b/src/Bottleneck_distance/test/bottleneck_unit_test.cpp
@@ -159,3 +159,81 @@ BOOST_AUTO_TEST_CASE(global) {
BOOST_CHECK(bottleneck_distance(empty, empty) == 0);
BOOST_CHECK(bottleneck_distance(empty, one) == 1);
}
+
+BOOST_AUTO_TEST_CASE(neg_global) {
+ std::uniform_real_distribution<double> unif1(0., upper_bound);
+ std::uniform_real_distribution<double> unif2(upper_bound / 10000., upper_bound / 100.);
+ std::default_random_engine re;
+ std::vector< std::pair<double, double> > v1, v2;
+ for (int i = 0; i < n1; i++) {
+ double a = std::log(unif1(re));
+ double b = std::log(unif1(re));
+ double x = std::log(unif2(re));
+ double y = std::log(unif2(re));
+ v1.emplace_back(std::min(a, b), std::max(a, b));
+ v2.emplace_back(std::min(a, b) + std::min(x, y), std::max(a, b) + std::max(x, y));
+ if (i % 5 == 0)
+ v1.emplace_back(std::min(a, b), std::min(a, b) + x);
+ if (i % 3 == 0)
+ v2.emplace_back(std::max(a, b), std::max(a, b) + y);
+ }
+ BOOST_CHECK(bottleneck_distance(v1, v2, 0.) <= upper_bound / 100.);
+ BOOST_CHECK(bottleneck_distance(v1, v2, upper_bound / 10000.) <= upper_bound / 100. + upper_bound / 10000.);
+ BOOST_CHECK(std::abs(bottleneck_distance(v1, v2, 0.) - bottleneck_distance(v1, v2, upper_bound / 10000.)) <= upper_bound / 10000.);
+
+ std::vector< std::pair<double, double> > empty;
+ std::vector< std::pair<double, double> > one = {{8, 10}};
+ BOOST_CHECK(bottleneck_distance(empty, empty) == 0);
+ BOOST_CHECK(bottleneck_distance(empty, one) == 1);
+}
+
+BOOST_AUTO_TEST_CASE(bottleneck_simple_test) {
+ std::vector< std::pair<double, double> > v1, v2;
+ double inf = std::numeric_limits<double>::infinity();
+ double neginf = -inf;
+ double b;
+
+ v1.emplace_back(9.6, 14.);
+ v2.emplace_back(9.5, 14.1);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK(b > 0.09 && b < 0.11);
+
+ v1.emplace_back(-34.974, -34.2);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK(b > 0.386 && b < 0.388);
+
+ v1.emplace_back(neginf, 3.7);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK_EQUAL(b, inf);
+
+ v2.emplace_back(neginf, 4.45);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK(b > 0.74 && b < 0.76);
+
+ v1.emplace_back(-60.6, 52.1);
+ v2.emplace_back(-61.5, 53.);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK(b > 0.89 && b < 0.91);
+
+ v1.emplace_back(3., inf);
+ v2.emplace_back(3.2, inf);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK(b > 0.89 && b < 0.91);
+
+ v1.emplace_back(neginf, inf);
+ v2.emplace_back(neginf, inf);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK(b > 0.89 && b < 0.91);
+
+ v2.emplace_back(6, inf);
+
+ b = Gudhi::persistence_diagram::bottleneck_distance(v1, v2, 0.);
+ BOOST_CHECK_EQUAL(b, inf);
+}
diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt
index e80a698e..64557cee 100644
--- a/src/Cech_complex/utilities/CMakeLists.txt
+++ b/src/Cech_complex/utilities/CMakeLists.txt
@@ -9,8 +9,24 @@ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1)
target_link_libraries(cech_persistence ${TBB_LIBRARIES})
endif()
- add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:cech_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
+ add_test(NAME Cech_complex_utility_from_rips_on_tore_3D_safe COMMAND $<TARGET_FILE:cech_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3" "-o" "safe.pers")
+ add_test(NAME Cech_complex_utility_from_rips_on_tore_3D_fast COMMAND $<TARGET_FILE:cech_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3" "-o" "fast.pers" "-f")
+ add_test(NAME Cech_complex_utility_from_rips_on_tore_3D_exact COMMAND $<TARGET_FILE:cech_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3" "-o" "exact.pers" "-e")
+
+ if (DIFF_PATH)
+ add_test(Cech_complex_utilities_diff_exact ${DIFF_PATH}
+ "exact.pers" "safe.pers")
+ set_tests_properties(Cech_complex_utilities_diff_exact PROPERTIES DEPENDS
+ "Cech_complex_utility_from_rips_on_tore_3D_safe;Cech_complex_utility_from_rips_on_tore_3D_exact")
+
+ add_test(Cech_complex_utilities_diff_fast ${DIFF_PATH}
+ "fast.pers" "safe.pers")
+ set_tests_properties(Cech_complex_utilities_diff_fast PROPERTIES DEPENDS
+ "Cech_complex_utility_from_rips_on_tore_3D_safe;Cech_complex_utility_from_rips_on_tore_3D_fast")
+ endif()
install(TARGETS cech_persistence DESTINATION bin)
endif()
diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp
index 75d10c0f..e6419f3d 100644
--- a/src/Cech_complex/utilities/cech_persistence.cpp
+++ b/src/Cech_complex/utilities/cech_persistence.cpp
@@ -16,6 +16,7 @@
#include <boost/program_options.hpp>
#include <CGAL/Epeck_d.h> // For EXACT or SAFE version
+#include <CGAL/Epick_d.h> // For FAST version
#include <string>
#include <vector>
@@ -25,41 +26,66 @@
using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = Simplex_tree::Filtration_value;
-using Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>;
-using Point = typename Kernel::Point_d;
-using Points_off_reader = Gudhi::Points_off_reader<Point>;
-using Cech_complex = Gudhi::cech_complex::Cech_complex<Kernel, Simplex_tree>;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
-void program_options(int argc, char* argv[], std::string& off_file_points, std::string& filediag,
- Filtration_value& max_radius, int& dim_max, int& p, Filtration_value& min_persistence);
+void program_options(int argc, char* argv[], std::string& off_file_points, bool& exact, bool& fast,
+ std::string& filediag, Filtration_value& max_radius, int& dim_max, int& p,
+ Filtration_value& min_persistence);
+
+template<class Kernel>
+Simplex_tree create_simplex_tree(const std::string &off_file_points, bool exact_version,
+ Filtration_value max_radius, int dim_max) {
+ using Point = typename Kernel::Point_d;
+ using Points_off_reader = Gudhi::Points_off_reader<Point>;
+ using Cech_complex = Gudhi::cech_complex::Cech_complex<Kernel, Simplex_tree>;
+
+ Simplex_tree stree;
+
+ Points_off_reader off_reader(off_file_points);
+ Cech_complex cech_complex_from_file(off_reader.get_point_cloud(), max_radius, exact_version);
+ cech_complex_from_file.create_complex(stree, dim_max);
+
+ return stree;
+}
int main(int argc, char* argv[]) {
std::string off_file_points;
std::string filediag;
+ bool exact_version = false;
+ bool fast_version = false;
Filtration_value max_radius;
int dim_max;
int p;
Filtration_value min_persistence;
- program_options(argc, argv, off_file_points, filediag, max_radius, dim_max, p, min_persistence);
+ program_options(argc, argv, off_file_points, exact_version, fast_version, filediag, max_radius, dim_max, p,
+ min_persistence);
- Points_off_reader off_reader(off_file_points);
- Cech_complex cech_complex_from_file(off_reader.get_point_cloud(), max_radius);
+ if ((exact_version) && (fast_version)) {
+ std::cerr << "You cannot set the exact and the fast version." << std::endl;
+ exit(-1);
+ }
- // Construct the Cech complex in a Simplex Tree
- Simplex_tree simplex_tree;
+ Simplex_tree stree;
+ if (fast_version) {
+ // WARNING : CGAL::Epick_d is fast but not safe (unlike CGAL::Epeck_d)
+ // (i.e. when the points are on a grid)
+ using Fast_kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
+ stree = create_simplex_tree<Fast_kernel>(off_file_points, exact_version, max_radius, dim_max);
+ } else {
+ using Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>;
+ stree = create_simplex_tree<Kernel>(off_file_points, exact_version, max_radius, dim_max);
+ }
- cech_complex_from_file.create_complex(simplex_tree, dim_max);
- std::clog << "The complex contains " << simplex_tree.num_simplices() << " simplices \n";
- std::clog << " and has dimension " << simplex_tree.dimension() << " \n";
+ std::clog << "The complex contains " << stree.num_simplices() << " simplices \n";
+ std::clog << " and has dimension " << stree.dimension() << " \n";
// Sort the simplices in the order of the filtration
- simplex_tree.initialize_filtration();
+ stree.initialize_filtration();
// Compute the persistence diagram of the complex
- Persistent_cohomology pcoh(simplex_tree);
+ Persistent_cohomology pcoh(stree);
// initializes the coefficient field for homology
pcoh.init_coefficients(p);
@@ -77,8 +103,9 @@ int main(int argc, char* argv[]) {
return 0;
}
-void program_options(int argc, char* argv[], std::string& off_file_points, std::string& filediag,
- Filtration_value& max_radius, int& dim_max, int& p, Filtration_value& min_persistence) {
+void program_options(int argc, char* argv[], std::string& off_file_points, bool& exact, bool& fast,
+ std::string& filediag, Filtration_value& max_radius, int& dim_max, int& p,
+ Filtration_value& min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
@@ -86,8 +113,12 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
+ "exact,e", po::bool_switch(&exact),
+ "To activate exact version of Cech complex (default is false, not available if fast is set)")(
+ "fast,f", po::bool_switch(&fast),
+ "To activate fast version of Cech complex (default is false, not available if exact is set)")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-radius,r",
po::value<Filtration_value>(&max_radius)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Cech complex construction.")(
diff --git a/src/Cech_complex/utilities/cechcomplex.md b/src/Cech_complex/utilities/cechcomplex.md
index 821e4dad..0e82674d 100644
--- a/src/Cech_complex/utilities/cechcomplex.md
+++ b/src/Cech_complex/utilities/cechcomplex.md
@@ -26,7 +26,11 @@ a prime number).
**Usage**
-`cech_persistence [options] <OFF input file>`
+`cech_persistence [options] <input OFF file>`
+
+where
+`<input OFF file>` is the path to the input point cloud in
+[nOFF ASCII format]({{ site.officialurl }}/doc/latest/fileformats.html#FileFormatsOFF).
**Allowed options**
@@ -36,6 +40,8 @@ a prime number).
* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Čech complex we want to compute.
* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+* `-e [ --exact ]` for the exact computation version.
+* `-f [ --fast ]` for the fast computation version.
Beware: this program may use a lot of RAM and take a lot of time if `max-edge-length` is set to a large value.
diff --git a/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp b/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp
index 38efb9e6..70b489b5 100644
--- a/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp
+++ b/src/Collapse/utilities/distance_matrix_edge_collapse_rips_persistence.cpp
@@ -111,7 +111,7 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
diff --git a/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp b/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp
index d8f42ab6..a8fd6f14 100644
--- a/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp
+++ b/src/Collapse/utilities/point_cloud_edge_collapse_rips_persistence.cpp
@@ -140,7 +140,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
diff --git a/src/GudhUI/view/Viewer.cpp b/src/GudhUI/view/Viewer.cpp
index 6b17c833..2c00f86f 100644
--- a/src/GudhUI/view/Viewer.cpp
+++ b/src/GudhUI/view/Viewer.cpp
@@ -31,7 +31,11 @@ void Viewer::set_bounding_box(const Point_3 & lower_left, const Point_3 & upper_
}
void Viewer::update_GL() {
+#if QGLVIEWER_VERSION >= 0x020700
+ this->update();
+#else
this->updateGL();
+#endif
}
void Viewer::init_scene() {
diff --git a/src/Persistent_cohomology/example/persistence_from_file.cpp b/src/Persistent_cohomology/example/persistence_from_file.cpp
index 38c44514..7f89c001 100644
--- a/src/Persistent_cohomology/example/persistence_from_file.cpp
+++ b/src/Persistent_cohomology/example/persistence_from_file.cpp
@@ -93,7 +93,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&output_file)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")
+ "Name of file in which the persistence diagram is written. Default print in standard output")
("field-charac,p", po::value<int>(&p)->default_value(11),
"Characteristic p of the coefficient field Z/pZ for computing homology.")
("min-persistence,m", po::value<Filtration_value>(&min_persistence),
diff --git a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
index ca26a5b9..84453898 100644
--- a/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
+++ b/src/Persistent_cohomology/example/rips_multifield_persistence.cpp
@@ -96,7 +96,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")
+ "Name of file in which the persistence diagram is written. Default print in standard output")
("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(0),
"Maximal length of an edge for the Rips complex construction.")
("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
diff --git a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
index a503d983..6f37cf5c 100644
--- a/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_step_by_step.cpp
@@ -112,7 +112,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")
+ "Name of file in which the persistence diagram is written. Default print in standard output")
("max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")
diff --git a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
index 8c5742aa..6b60f603 100644
--- a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp
@@ -109,7 +109,7 @@ void program_options(int argc, char * argv[]
visible.add_options()
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")
+ "Name of file in which the persistence diagram is written. Default print in standard output")
("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(0),
"Maximal length of an edge for the Rips complex construction.")
("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
diff --git a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
index b473738e..72ddc797 100644
--- a/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_correlation_matrix_persistence.cpp
@@ -118,7 +118,7 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"min-edge-corelation,c", po::value<Filtration_value>(&correlation_min)->default_value(0),
"Minimal corelation of an edge for the Rips complex construction.")(
"cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
diff --git a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
index 6306755d..77ad841a 100644
--- a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
@@ -79,7 +79,7 @@ void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
diff --git a/src/Rips_complex/utilities/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp
index 9d7490b3..43194821 100644
--- a/src/Rips_complex/utilities/rips_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_persistence.cpp
@@ -81,7 +81,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
diff --git a/src/Rips_complex/utilities/sparse_rips_persistence.cpp b/src/Rips_complex/utilities/sparse_rips_persistence.cpp
index ac935b41..829c85e6 100644
--- a/src/Rips_complex/utilities/sparse_rips_persistence.cpp
+++ b/src/Rips_complex/utilities/sparse_rips_persistence.cpp
@@ -84,7 +84,7 @@ void program_options(int argc, char* argv[], std::string& off_file_points, std::
po::options_description visible("Allowed options", 100);
visible.add_options()("help,h", "produce help message")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-edge-length,r",
po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")(
diff --git a/src/Spatial_searching/example/example_spatial_searching.cpp b/src/Spatial_searching/example/example_spatial_searching.cpp
index 8f9151fc..09c2dabf 100644
--- a/src/Spatial_searching/example/example_spatial_searching.cpp
+++ b/src/Spatial_searching/example/example_spatial_searching.cpp
@@ -25,7 +25,7 @@ int main(void) {
// 10-nearest neighbor query
std::clog << "10 nearest neighbors from points[20]:\n";
auto knn_range = points_ds.k_nearest_neighbors(points[20], 10, true);
- for (auto const& nghb : knn_range)
+ for (auto const nghb : knn_range)
std::clog << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
// Incremental nearest neighbor query
@@ -38,7 +38,7 @@ int main(void) {
// 10-furthest neighbor query
std::clog << "10 furthest neighbors from points[20]:\n";
auto kfn_range = points_ds.k_furthest_neighbors(points[20], 10, true);
- for (auto const& nghb : kfn_range)
+ for (auto const nghb : kfn_range)
std::clog << nghb.first << " (sq. dist. = " << nghb.second << ")\n";
// Incremental furthest neighbor query
diff --git a/src/Spatial_searching/test/test_Kd_tree_search.cpp b/src/Spatial_searching/test/test_Kd_tree_search.cpp
index d6c6fba3..e9acfaa7 100644
--- a/src/Spatial_searching/test/test_Kd_tree_search.cpp
+++ b/src/Spatial_searching/test/test_Kd_tree_search.cpp
@@ -45,7 +45,7 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
std::vector<std::size_t> knn_result;
FT last_dist = -1.;
- for (auto const& nghb : kns_range) {
+ for (auto const nghb : kns_range) {
BOOST_CHECK(nghb.second > last_dist);
knn_result.push_back(nghb.second);
last_dist = nghb.second;
@@ -76,7 +76,7 @@ BOOST_AUTO_TEST_CASE(test_Kd_tree_search) {
std::vector<std::size_t> kfn_result;
last_dist = kfn_range.begin()->second;
- for (auto const& nghb : kfn_range) {
+ for (auto const nghb : kfn_range) {
BOOST_CHECK(nghb.second <= last_dist);
kfn_result.push_back(nghb.second);
last_dist = nghb.second;
diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h
index 16b62eb6..38fe049f 100644
--- a/src/Tangential_complex/benchmark/XML_exporter.h
+++ b/src/Tangential_complex/benchmark/XML_exporter.h
@@ -157,7 +157,7 @@ class Streaming_XML_exporter {
m_xml_fstream << " </" << m_element_name << ">" << std::endl;
// Save current pointer position
- std::ofstream::streampos pos = m_xml_fstream.tellp();
+ auto pos = m_xml_fstream.tellp();
// Close the XML file (temporarily) so that the XML file is always correct
m_xml_fstream << "</" << m_list_name << ">" << std::endl;
// Restore the pointer position so that the next "add_element" will overwrite
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
index cc424810..56a24af0 100644
--- a/src/Tangential_complex/include/gudhi/Tangential_complex.h
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -36,7 +36,6 @@
#include <Eigen/Eigen>
#include <Eigen/src/Core/util/Macros.h> // for EIGEN_VERSION_AT_LEAST
-#include <boost/optional.hpp>
#include <boost/iterator/transform_iterator.hpp>
#include <boost/range/adaptor/transformed.hpp>
#include <boost/range/counting_range.hpp>
@@ -56,6 +55,7 @@
#include <cmath> // for std::sqrt
#include <string>
#include <cstddef> // for std::size_t
+#include <optional>
#ifdef GUDHI_USE_TBB
#include <tbb/parallel_for.h>
@@ -994,7 +994,7 @@ class Tangential_complex {
// circumspheres of the star of "center_vertex"
// If th the m_max_squared_edge_length is set the maximal radius of the "star sphere"
// is at most square root of m_max_squared_edge_length
- boost::optional<FT> squared_star_sphere_radius_plus_margin = m_max_squared_edge_length;
+ std::optional<FT> squared_star_sphere_radius_plus_margin = m_max_squared_edge_length;
// Insert points until we find a point which is outside "star sphere"
for (auto nn_it = ins_range.begin(); nn_it != ins_range.end(); ++nn_it) {
@@ -1036,7 +1036,7 @@ class Tangential_complex {
// Let's recompute squared_star_sphere_radius_plus_margin
if (triangulation.current_dimension() >= tangent_space_dim) {
- squared_star_sphere_radius_plus_margin = boost::none;
+ squared_star_sphere_radius_plus_margin = std::nullopt;
// Get the incident cells and look for the biggest circumsphere
std::vector<Tr_full_cell_handle> incident_cells;
triangulation.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
@@ -1044,7 +1044,7 @@ class Tangential_complex {
cit != incident_cells.end(); ++cit) {
Tr_full_cell_handle cell = *cit;
if (triangulation.is_infinite(cell)) {
- squared_star_sphere_radius_plus_margin = boost::none;
+ squared_star_sphere_radius_plus_margin = std::nullopt;
break;
} else {
// Note that this uses the perturbed point since it uses
@@ -2030,7 +2030,7 @@ class Tangential_complex {
// and their center vertex
Stars_container m_stars;
std::vector<FT> m_squared_star_spheres_radii_incl_margin;
- boost::optional<FT> m_max_squared_edge_length;
+ std::optional<FT> m_max_squared_edge_length;
#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
Points m_points_for_tse;
diff --git a/src/Witness_complex/utilities/strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp
index 614de0d4..b2ecad82 100644
--- a/src/Witness_complex/utilities/strong_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp
@@ -108,7 +108,7 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
visible.add_options()("help,h", "produce help message")("landmarks,l", po::value<int>(&nbL),
"Number of landmarks to choose from the point cloud.")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
"Maximal squared relaxation parameter.")(
"field-charac,p", po::value<int>(&p)->default_value(11),
diff --git a/src/Witness_complex/utilities/weak_witness_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp
index 5ea31d6b..c7ead7de 100644
--- a/src/Witness_complex/utilities/weak_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp
@@ -108,7 +108,7 @@ void program_options(int argc, char* argv[], int& nbL, std::string& file_name, s
visible.add_options()("help,h", "produce help message")("landmarks,l", po::value<int>(&nbL),
"Number of landmarks to choose from the point cloud.")(
"output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::clog")(
+ "Name of file in which the persistence diagram is written. Default print in standard output")(
"max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
"Maximal squared relaxation parameter.")(
"field-charac,p", po::value<int>(&p)->default_value(11),
diff --git a/src/Witness_complex/utilities/witnesscomplex.md b/src/Witness_complex/utilities/witnesscomplex.md
index 3a3a7d83..e994e0b8 100644
--- a/src/Witness_complex/utilities/witnesscomplex.md
+++ b/src/Witness_complex/utilities/witnesscomplex.md
@@ -29,7 +29,7 @@ and `p` is the characteristic of the field *Z/pZ* used for homology coefficients
* `-h [ --help ]` Produce help message
* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
-* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::clog.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in standard output.
* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
@@ -60,7 +60,7 @@ and `p` is the characteristic of the field *Z/pZ* used for homology coefficients
* `-h [ --help ]` Produce help message
* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
-* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::clog.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in standard output.
* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
diff --git a/src/cmake/modules/GUDHI_compilation_flags.cmake b/src/cmake/modules/GUDHI_compilation_flags.cmake
index 567fbc40..b43ccf73 100644
--- a/src/cmake/modules/GUDHI_compilation_flags.cmake
+++ b/src/cmake/modules/GUDHI_compilation_flags.cmake
@@ -11,7 +11,8 @@ macro(add_cxx_compiler_flag _flag)
endif()
endmacro()
-set (CMAKE_CXX_STANDARD 14)
+set (CMAKE_CXX_STANDARD 17)
+# This number needs to be changed in python/CMakeLists.txt at the same time
enable_testing()
diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h
index 63a37a25..f2f8a476 100644
--- a/src/common/doc/installation.h
+++ b/src/common/doc/installation.h
@@ -5,9 +5,9 @@
* Examples of GUDHI headers inclusion can be found in \ref utilities.
*
* \section compiling Compiling
- * The library uses c++14 and requires <a target="_blank" href="https://www.boost.org/">Boost</a> &ge; 1.66.0
+ * The library uses c++17 and requires <a target="_blank" href="https://www.boost.org/">Boost</a> &ge; 1.66.0
* and <a target="_blank" href="https://cmake.org/">CMake</a> &ge; 3.5.
- * It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015.
+ * It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2017.
*
* \subsection utilities Utilities and examples
* To build the utilities, run the following commands in a terminal:
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 5f323935..32ec13bd 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -53,7 +53,7 @@ if(PYTHONINTERP_FOUND)
set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'datasets', ")
# Cython modules
- set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'off_reader', ")
+ set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'off_utils', ")
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'simplex_tree', ")
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'rips_complex', ")
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'cubical_complex', ")
@@ -129,9 +129,10 @@ if(PYTHONINTERP_FOUND)
# Gudhi and CGAL compilation option
if(MSVC)
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'/std:c++17', ")
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'/fp:strict', ")
else(MSVC)
- set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-std=c++14', ")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-std=c++17', ")
endif(MSVC)
if(CMAKE_COMPILER_IS_GNUCXX)
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-frounding-math', ")
@@ -151,7 +152,7 @@ if(PYTHONINTERP_FOUND)
set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-DCGAL_EIGEN3_ENABLED', ")
endif (EIGEN3_FOUND)
- set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'off_reader', ")
+ set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'off_utils', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'simplex_tree', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'rips_complex', ")
set(GUDHI_CYTHON_MODULES "${GUDHI_CYTHON_MODULES}'cubical_complex', ")
@@ -246,8 +247,8 @@ if(PYTHONINTERP_FOUND)
# Specific for Mac
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
- set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-mmacosx-version-min=10.12', ")
- set(GUDHI_PYTHON_EXTRA_LINK_ARGS "${GUDHI_PYTHON_EXTRA_LINK_ARGS}'-mmacosx-version-min=10.12', ")
+ set(GUDHI_PYTHON_EXTRA_COMPILE_ARGS "${GUDHI_PYTHON_EXTRA_COMPILE_ARGS}'-mmacosx-version-min=10.14', ")
+ set(GUDHI_PYTHON_EXTRA_LINK_ARGS "${GUDHI_PYTHON_EXTRA_LINK_ARGS}'-mmacosx-version-min=10.14', ")
endif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
# Loop on INCLUDE_DIRECTORIES PROPERTY
@@ -545,6 +546,7 @@ if(PYTHONINTERP_FOUND)
# Reader utils
add_gudhi_py_test(test_reader_utils)
+ add_gudhi_py_test(test_off)
# Wasserstein
if(OT_FOUND)
diff --git a/src/python/doc/clustering.rst b/src/python/doc/clustering.rst
index c5a57d3c..62422682 100644
--- a/src/python/doc/clustering.rst
+++ b/src/python/doc/clustering.rst
@@ -17,9 +17,8 @@ As a by-product, we produce the persistence diagram of the merge tree of the ini
:include-source:
import gudhi
- f = open(gudhi.__root_source_dir__ + '/data/points/spiral_2d.csv', 'r')
- import numpy as np
- data = np.loadtxt(f)
+ from gudhi.datasets.remote import fetch_spiral_2d
+ data = fetch_spiral_2d()
import matplotlib.pyplot as plt
plt.scatter(data[:,0],data[:,1],marker='.',s=1)
plt.show()
diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst
index 4eefd415..5491542f 100644
--- a/src/python/doc/installation.rst
+++ b/src/python/doc/installation.rst
@@ -39,7 +39,7 @@ If you are instead using a git checkout, beware that the paths are a bit
different, and in particular the `python/` subdirectory is actually `src/python/`
there.
-The library uses c++14 and requires `Boost <https://www.boost.org/>`_ :math:`\geq` 1.66.0,
+The library uses c++17 and requires `Boost <https://www.boost.org/>`_ :math:`\geq` 1.66.0,
`CMake <https://www.cmake.org/>`_ :math:`\geq` 3.5 to generate makefiles,
Python :math:`\geq` 3.5, `NumPy <http://numpy.org>`_ :math:`\geq` 1.15.0, `Cython <https://www.cython.org/>`_
:math:`\geq` 0.27 and `pybind11 <https://github.com/pybind/pybind11>`_ to compile the GUDHI Python module.
@@ -150,7 +150,7 @@ You shall have something like:
Cython version 0.29.25
Numpy version 1.21.4
Boost version 1.77.0
- + Installed modules are: off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
+ + Installed modules are: off_utils;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;
+ Missing modules are: bottleneck;nerve_gic;subsampling;tangential_complex;alpha_complex;euclidean_witness_complex;
euclidean_strong_witness_complex;
@@ -188,7 +188,7 @@ A complete configuration would be :
GMPXX_LIBRARIES = /usr/lib/x86_64-linux-gnu/libgmpxx.so
MPFR_LIBRARIES = /usr/lib/x86_64-linux-gnu/libmpfr.so
TBB version 9107 found and used
- + Installed modules are: bottleneck;off_reader;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
+ + Installed modules are: bottleneck;off_utils;simplex_tree;rips_complex;cubical_complex;periodic_cubical_complex;
persistence_graphical_tools;reader_utils;witness_complex;strong_witness_complex;nerve_gic;subsampling;
tangential_complex;alpha_complex;euclidean_witness_complex;euclidean_strong_witness_complex;
+ Missing modules are:
@@ -391,7 +391,7 @@ The :doc:`persistence graphical tools </persistence_graphical_tools_user>` and
mathematics, science, and engineering.
:class:`~gudhi.point_cloud.knn.KNearestNeighbors` can use the Python package
-`SciPy <http://scipy.org>`_ as a backend if explicitly requested.
+`SciPy <http://scipy.org>`_ :math:`\geq` 1.6.0 as a backend if explicitly requested.
TensorFlow
----------
diff --git a/src/python/doc/point_cloud.rst b/src/python/doc/point_cloud.rst
index ffd8f85b..473b303f 100644
--- a/src/python/doc/point_cloud.rst
+++ b/src/python/doc/point_cloud.rst
@@ -13,6 +13,11 @@ File Readers
.. autofunction:: gudhi.read_lower_triangular_matrix_from_csv_file
+File Writers
+------------
+
+.. autofunction:: gudhi.write_points_to_off_file
+
Subsampling
-----------
diff --git a/src/python/gudhi/off_reader.pyx b/src/python/gudhi/off_utils.pyx
index a3200704..9276c7b0 100644
--- a/src/python/gudhi/off_reader.pyx
+++ b/src/python/gudhi/off_utils.pyx
@@ -13,8 +13,10 @@ from __future__ import print_function
from cython cimport numeric
from libcpp.vector cimport vector
from libcpp.string cimport string
+cimport cython
import errno
import os
+import numpy as np
__author__ = "Vincent Rouvreau"
__copyright__ = "Copyright (C) 2016 Inria"
@@ -24,7 +26,7 @@ cdef extern from "Off_reader_interface.h" namespace "Gudhi":
vector[vector[double]] read_points_from_OFF_file(string off_file)
def read_points_from_off_file(off_file=''):
- """Read points from OFF file.
+ """Read points from an `OFF file <fileformats.html#off-file-format>`_.
:param off_file: An OFF file style name.
:type off_file: string
@@ -39,3 +41,22 @@ def read_points_from_off_file(off_file=''):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
off_file)
+@cython.embedsignature(True)
+def write_points_to_off_file(fname, points):
+ """Write points to an `OFF file <fileformats.html#off-file-format>`_.
+
+ A simple wrapper for `numpy.savetxt`.
+
+ :param fname: Name of the OFF file.
+ :type fname: str or file handle
+ :param points: Point coordinates.
+ :type points: numpy array of shape (n, dim)
+ """
+ points = np.array(points, copy=False)
+ assert len(points.shape) == 2
+ dim = points.shape[1]
+ if dim == 3:
+ head = 'OFF\n{} 0 0'.format(points.shape[0])
+ else:
+ head = 'nOFF\n{} {} 0 0'.format(dim, points.shape[0])
+ np.savetxt(fname, points, header=head, comments='')
diff --git a/src/python/gudhi/persistence_graphical_tools.py b/src/python/gudhi/persistence_graphical_tools.py
index 21275cdd..e438aa66 100644
--- a/src/python/gudhi/persistence_graphical_tools.py
+++ b/src/python/gudhi/persistence_graphical_tools.py
@@ -194,19 +194,21 @@ def plot_persistence_barcode(
y=[(death - birth) if death != float("inf") else (infinity - birth) for (dim,(birth,death)) in persistence]
c=[colormap[dim] for (dim,(birth,death)) in persistence]
- axes.barh(list(reversed(range(len(x)))), y, height=0.8, left=x, alpha=alpha, color=c, linewidth=0)
+ axes.barh(range(len(x)), y, left=x, alpha=alpha, color=c, linewidth=0)
if legend:
- dimensions = list(set(item[0] for item in persistence))
+ dimensions = set(item[0] for item in persistence)
axes.legend(
handles=[mpatches.Patch(color=colormap[dim], label=str(dim)) for dim in dimensions], loc="lower right",
)
axes.set_title("Persistence barcode", fontsize=fontsize)
+ axes.set_yticks([])
+ axes.invert_yaxis()
# Ends plot on infinity value and starts a little bit before min_birth
if len(x) != 0:
- axes.axis([axis_start, infinity, 0, len(x)])
+ axes.set_xlim((axis_start, infinity))
return axes
except ImportError as import_error:
diff --git a/src/python/gudhi/point_cloud/knn.py b/src/python/gudhi/point_cloud/knn.py
index de5844f9..7dc83817 100644
--- a/src/python/gudhi/point_cloud/knn.py
+++ b/src/python/gudhi/point_cloud/knn.py
@@ -314,7 +314,9 @@ class KNearestNeighbors:
return None
if self.params["implementation"] == "ckdtree":
- qargs = {key: val for key, val in self.params.items() if key in {"p", "eps", "n_jobs"}}
+ qargs = {key: val for key, val in self.params.items() if key in {"p", "eps"}}
+ # SciPy renamed n_jobs to workers
+ qargs["workers"] = self.params.get("workers") or self.params.get("n_jobs") or 1
distances, neighbors = self.kdtree.query(X, k=self.k, **qargs)
if k == 1:
# SciPy decided to squeeze the last dimension for k=1
diff --git a/src/python/gudhi/representations/vector_methods.py b/src/python/gudhi/representations/vector_methods.py
index 69ff5e1e..a169aee8 100644
--- a/src/python/gudhi/representations/vector_methods.py
+++ b/src/python/gudhi/representations/vector_methods.py
@@ -85,7 +85,7 @@ class PersistenceImage(BaseEstimator, TransformerMixin):
Xfit.append(image.flatten()[np.newaxis,:])
- Xfit = np.concatenate(Xfit,0)
+ Xfit = np.concatenate(Xfit, 0)
return Xfit
@@ -123,6 +123,15 @@ def _automatic_sample_range(sample_range, X, y):
pass
return sample_range
+
+def _trim_on_edges(x, are_endpoints_nan):
+ if are_endpoints_nan[0]:
+ x = x[1:]
+ if are_endpoints_nan[1]:
+ x = x[:-1]
+ return x
+
+
class Landscape(BaseEstimator, TransformerMixin):
"""
This is a class for computing persistence landscapes from a list of persistence diagrams. A persistence landscape is a collection of 1D piecewise-linear functions computed from the rank function associated to the persistence diagram. These piecewise-linear functions are then sampled evenly on a given range and the corresponding vectors of samples are concatenated and returned. See http://jmlr.org/papers/v16/bubenik15a.html for more details.
@@ -149,6 +158,8 @@ class Landscape(BaseEstimator, TransformerMixin):
y (n x 1 array): persistence diagram labels (unused).
"""
self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
+ self.im_range = np.linspace(self.sample_range[0], self.sample_range[1], self.new_resolution)
+ self.im_range = _trim_on_edges(self.im_range, self.nan_in_range)
return self
def transform(self, X):
@@ -161,53 +172,26 @@ class Landscape(BaseEstimator, TransformerMixin):
Returns:
numpy array with shape (number of diagrams) x (number of samples = **num_landscapes** x **resolution**): output persistence landscapes.
"""
- num_diag, Xfit = len(X), []
- x_values = np.linspace(self.sample_range[0], self.sample_range[1], self.new_resolution)
- step_x = x_values[1] - x_values[0]
-
- for i in range(num_diag):
-
- diagram, num_pts_in_diag = X[i], X[i].shape[0]
- ls = np.zeros([self.num_landscapes, self.new_resolution])
+ Xfit = []
+ x_values = self.im_range
+ for diag in X:
+ midpoints, heights = (diag[:, 0] + diag[:, 1]) / 2., (diag[:, 1] - diag[:, 0]) / 2.
+ tent_functions = np.maximum(heights[None, :] - np.abs(x_values[:, None] - midpoints[None, :]), 0)
+ n_points = diag.shape[0]
+ # Complete the array with zeros to get the right number of landscapes
+ if self.num_landscapes > n_points:
+ tent_functions = np.concatenate(
+ [tent_functions, np.zeros((tent_functions.shape[0], self.num_landscapes-n_points))],
+ axis=1
+ )
+ tent_functions.partition(tent_functions.shape[1]-self.num_landscapes, axis=1)
+ landscapes = np.sort(tent_functions[:, -self.num_landscapes:], axis=1)[:, ::-1].T
- events = []
- for j in range(self.new_resolution):
- events.append([])
+ landscapes = np.sqrt(2) * np.ravel(landscapes)
+ Xfit.append(landscapes)
- for j in range(num_pts_in_diag):
- [px,py] = diagram[j,:2]
- min_idx = np.clip(np.ceil((px - self.sample_range[0]) / step_x).astype(int), 0, self.new_resolution)
- mid_idx = np.clip(np.ceil((0.5*(py+px) - self.sample_range[0]) / step_x).astype(int), 0, self.new_resolution)
- max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.new_resolution)
-
- if min_idx < self.new_resolution and max_idx > 0:
-
- landscape_value = self.sample_range[0] + min_idx * step_x - px
- for k in range(min_idx, mid_idx):
- events[k].append(landscape_value)
- landscape_value += step_x
-
- landscape_value = py - self.sample_range[0] - mid_idx * step_x
- for k in range(mid_idx, max_idx):
- events[k].append(landscape_value)
- landscape_value -= step_x
-
- for j in range(self.new_resolution):
- events[j].sort(reverse=True)
- for k in range( min(self.num_landscapes, len(events[j])) ):
- ls[k,j] = events[j][k]
-
- if self.nan_in_range[0]:
- ls = ls[:,1:]
- if self.nan_in_range[1]:
- ls = ls[:,:-1]
- ls = np.sqrt(2)*np.reshape(ls,[1,-1])
- Xfit.append(ls)
-
- Xfit = np.concatenate(Xfit,0)
-
- return Xfit
+ return np.stack(Xfit, axis=0)
def __call__(self, diag):
"""
@@ -219,7 +203,7 @@ class Landscape(BaseEstimator, TransformerMixin):
Returns:
numpy array with shape (number of samples = **num_landscapes** x **resolution**): output persistence landscape.
"""
- return self.fit_transform([diag])[0,:]
+ return self.fit_transform([diag])[0, :]
class Silhouette(BaseEstimator, TransformerMixin):
"""
@@ -235,6 +219,8 @@ class Silhouette(BaseEstimator, TransformerMixin):
sample_range ([double, double]): minimum and maximum for the weighted average domain, of the form [x_min, x_max] (default [numpy.nan, numpy.nan]). It is the interval on which samples will be drawn evenly. If one of the values is numpy.nan, it can be computed from the persistence diagrams with the fit() method.
"""
self.weight, self.resolution, self.sample_range = weight, resolution, sample_range
+ self.nan_in_range = np.isnan(np.array(self.sample_range))
+ self.new_resolution = self.resolution + self.nan_in_range.sum()
def fit(self, X, y=None):
"""
@@ -245,6 +231,8 @@ class Silhouette(BaseEstimator, TransformerMixin):
y (n x 1 array): persistence diagram labels (unused).
"""
self.sample_range = _automatic_sample_range(np.array(self.sample_range), X, y)
+ self.im_range = np.linspace(self.sample_range[0], self.sample_range[1], self.new_resolution)
+ self.im_range = _trim_on_edges(self.im_range, self.nan_in_range)
return self
def transform(self, X):
@@ -257,44 +245,19 @@ class Silhouette(BaseEstimator, TransformerMixin):
Returns:
numpy array with shape (number of diagrams) x (**resolution**): output persistence silhouettes.
"""
- num_diag, Xfit = len(X), []
- x_values = np.linspace(self.sample_range[0], self.sample_range[1], self.resolution)
- step_x = x_values[1] - x_values[0]
-
- for i in range(num_diag):
-
- diagram, num_pts_in_diag = X[i], X[i].shape[0]
+ Xfit = []
+ x_values = self.im_range
- sh, weights = np.zeros(self.resolution), np.zeros(num_pts_in_diag)
- for j in range(num_pts_in_diag):
- weights[j] = self.weight(diagram[j,:])
+ for diag in X:
+ midpoints, heights = (diag[:, 0] + diag[:, 1]) / 2., (diag[:, 1] - diag[:, 0]) / 2.
+ weights = np.array([self.weight(pt) for pt in diag])
total_weight = np.sum(weights)
- for j in range(num_pts_in_diag):
-
- [px,py] = diagram[j,:2]
- weight = weights[j] / total_weight
- min_idx = np.clip(np.ceil((px - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
- mid_idx = np.clip(np.ceil((0.5*(py+px) - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
- max_idx = np.clip(np.ceil((py - self.sample_range[0]) / step_x).astype(int), 0, self.resolution)
-
- if min_idx < self.resolution and max_idx > 0:
-
- silhouette_value = self.sample_range[0] + min_idx * step_x - px
- for k in range(min_idx, mid_idx):
- sh[k] += weight * silhouette_value
- silhouette_value += step_x
-
- silhouette_value = py - self.sample_range[0] - mid_idx * step_x
- for k in range(mid_idx, max_idx):
- sh[k] += weight * silhouette_value
- silhouette_value -= step_x
-
- Xfit.append(np.reshape(np.sqrt(2) * sh, [1,-1]))
-
- Xfit = np.concatenate(Xfit, 0)
+ tent_functions = np.maximum(heights[None, :] - np.abs(x_values[:, None] - midpoints[None, :]), 0)
+ silhouette = np.sum(weights[None, :] / total_weight * tent_functions, axis=1)
+ Xfit.append(silhouette * np.sqrt(2))
- return Xfit
+ return np.stack(Xfit, axis=0)
def __call__(self, diag):
"""
diff --git a/src/python/gudhi/rips_complex.pyx b/src/python/gudhi/rips_complex.pyx
index c3470292..d748f91e 100644
--- a/src/python/gudhi/rips_complex.pyx
+++ b/src/python/gudhi/rips_complex.pyx
@@ -41,31 +41,30 @@ cdef class RipsComplex:
cdef Rips_complex_interface thisref
# Fake constructor that does nothing but documenting the constructor
- def __init__(self, points=None, distance_matrix=None,
+ def __init__(self, *, points=None, distance_matrix=None,
max_edge_length=float('inf'), sparse=None):
"""RipsComplex constructor.
- :param max_edge_length: Rips value.
- :type max_edge_length: float
-
:param points: A list of points in d-Dimension.
- :type points: list of list of float
+ :type points: List[List[float]]
Or
:param distance_matrix: A distance matrix (full square or lower
triangular).
- :type points: list of list of float
+ :type distance_matrix: List[List[float]]
And in both cases
+ :param max_edge_length: Rips value.
+ :type max_edge_length: float
:param sparse: If this is not None, it switches to building a sparse
Rips and represents the approximation parameter epsilon.
:type sparse: float
"""
# The real cython constructor
- def __cinit__(self, points=None, distance_matrix=None,
+ def __cinit__(self, *, points=None, distance_matrix=None,
max_edge_length=float('inf'), sparse=None):
if sparse is not None:
if distance_matrix is not None:
diff --git a/src/python/test/test_off.py b/src/python/test/test_off.py
new file mode 100644
index 00000000..aea1941b
--- /dev/null
+++ b/src/python/test/test_off.py
@@ -0,0 +1,21 @@
+""" This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ Author(s): Marc Glisse
+
+ Copyright (C) 2022 Inria
+
+ Modification(s):
+ - YYYY/MM Author: Description of the modification
+"""
+
+import gudhi as gd
+import numpy as np
+import pytest
+
+
+def test_off_rw():
+ for dim in range(2, 6):
+ X = np.random.rand(123, dim)
+ gd.write_points_to_off_file("rand.off", X)
+ Y = gd.read_points_from_off_file("rand.off")
+ assert Y == pytest.approx(X)
diff --git a/src/python/test/test_representations.py b/src/python/test/test_representations.py
index 4a455bb6..58caab21 100755
--- a/src/python/test/test_representations.py
+++ b/src/python/test/test_representations.py
@@ -187,3 +187,67 @@ def test_kernel_empty_diagrams():
# PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1.)(empty_diag, empty_diag)
# PersistenceFisherKernel(bandwidth_fisher=1., bandwidth=1., kernel_approx=RBFSampler(gamma=1./2, n_components=100000).fit(np.ones([1,2])))(empty_diag, empty_diag)
+
+def test_silhouette_permutation_invariance():
+ dgm = _n_diags(1)[0]
+ dgm_permuted = dgm[np.random.permutation(dgm.shape[0]).astype(int)]
+ random_resolution = random.randint(50, 100) * 10
+ slt = Silhouette(resolution=random_resolution, weight=pow(2))
+
+ assert np.all(np.isclose(slt(dgm), slt(dgm_permuted)))
+
+
+def test_silhouette_multiplication_invariance():
+ dgm = _n_diags(1)[0]
+ n_repetitions = np.random.randint(2, high=10)
+ dgm_augmented = np.repeat(dgm, repeats=n_repetitions, axis=0)
+
+ random_resolution = random.randint(50, 100) * 10
+ slt = Silhouette(resolution=random_resolution, weight=pow(2))
+ assert np.all(np.isclose(slt(dgm), slt(dgm_augmented)))
+
+
+def test_silhouette_numeric():
+ dgm = np.array([[2., 3.], [5., 6.]])
+ slt = Silhouette(resolution=9, weight=pow(1), sample_range=[2., 6.])
+ #slt.fit([dgm])
+ # x_values = array([2., 2.5, 3., 3.5, 4., 4.5, 5., 5.5, 6.])
+
+ expected_silhouette = np.array([0., 0.5, 0., 0., 0., 0., 0., 0.5, 0.])/np.sqrt(2)
+ output_silhouette = slt(dgm)
+ assert np.all(np.isclose(output_silhouette, expected_silhouette))
+
+
+def test_landscape_small_persistence_invariance():
+ dgm = np.array([[2., 6.], [2., 5.], [3., 7.]])
+ small_persistence_pts = np.random.rand(10, 2)
+ small_persistence_pts[:, 1] += small_persistence_pts[:, 0]
+ small_persistence_pts += np.min(dgm)
+ dgm_augmented = np.concatenate([dgm, small_persistence_pts], axis=0)
+
+ lds = Landscape(num_landscapes=2, resolution=5)
+ lds_dgm, lds_dgm_augmented = lds(dgm), lds(dgm_augmented)
+
+ assert np.all(np.isclose(lds_dgm, lds_dgm_augmented))
+
+
+def test_landscape_numeric():
+ dgm = np.array([[2., 6.], [3., 5.]])
+ lds_ref = np.array([
+ 0., 0.5, 1., 1.5, 2., 1.5, 1., 0.5, 0., # tent of [2, 6]
+ 0., 0., 0., 0.5, 1., 0.5, 0., 0., 0.,
+ 0., 0., 0., 0., 0., 0., 0., 0., 0.,
+ 0., 0., 0., 0., 0., 0., 0., 0., 0.,
+ ])
+ lds_ref *= np.sqrt(2)
+ lds = Landscape(num_landscapes=4, resolution=9, sample_range=[2., 6.])
+ lds_dgm = lds(dgm)
+ assert np.all(np.isclose(lds_dgm, lds_ref))
+
+
+def test_landscape_nan_range():
+ dgm = np.array([[2., 6.], [3., 5.]])
+ lds = Landscape(num_landscapes=2, resolution=9, sample_range=[np.nan, 6.])
+ lds_dgm = lds(dgm)
+ assert (lds.sample_range[0] == 2) & (lds.sample_range[1] == 6)
+ assert lds.new_resolution == 10
diff --git a/src/python/test/test_simplex_generators.py b/src/python/test/test_simplex_generators.py
index 8a9b4844..c567d4c1 100755
--- a/src/python/test/test_simplex_generators.py
+++ b/src/python/test/test_simplex_generators.py
@@ -14,7 +14,7 @@ import numpy as np
def test_flag_generators():
pts = np.array([[0, 0], [0, 1.01], [1, 0], [1.02, 1.03], [100, 0], [100, 3.01], [103, 0], [103.02, 3.03]])
- r = gudhi.RipsComplex(pts, max_edge_length=4)
+ r = gudhi.RipsComplex(points=pts, max_edge_length=4)
st = r.create_simplex_tree(max_dimension=50)
st.persistence()
g = st.flag_persistence_generators()
diff --git a/src/python/test/test_subsampling.py b/src/python/test/test_subsampling.py
index 3431f372..c1cb4e3f 100755
--- a/src/python/test/test_subsampling.py
+++ b/src/python/test/test_subsampling.py
@@ -16,17 +16,9 @@ __license__ = "MIT"
def test_write_off_file_for_tests():
- file = open("subsample.off", "w")
- file.write("nOFF\n")
- file.write("2 7 0 0\n")
- file.write("1.0 1.0\n")
- file.write("7.0 0.0\n")
- file.write("4.0 6.0\n")
- file.write("9.0 6.0\n")
- file.write("0.0 14.0\n")
- file.write("2.0 19.0\n")
- file.write("9.0 17.0\n")
- file.close()
+ gudhi.write_points_to_off_file(
+ "subsample.off", [[1.0, 1.0], [7.0, 0.0], [4.0, 6.0], [9.0, 6.0], [0.0, 14.0], [2.0, 19.0], [9.0, 17.0]]
+ )
def test_simple_choose_n_farthest_points_with_a_starting_point():
@@ -34,54 +26,29 @@ def test_simple_choose_n_farthest_points_with_a_starting_point():
i = 0
for point in point_set:
# The iteration starts with the given starting point
- sub_set = gudhi.choose_n_farthest_points(
- points=point_set, nb_points=1, starting_point=i
- )
+ sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=1, starting_point=i)
assert sub_set[0] == point_set[i]
i = i + 1
# The iteration finds then the farthest
- sub_set = gudhi.choose_n_farthest_points(
- points=point_set, nb_points=2, starting_point=1
- )
+ sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=2, starting_point=1)
assert sub_set[1] == point_set[3]
- sub_set = gudhi.choose_n_farthest_points(
- points=point_set, nb_points=2, starting_point=3
- )
+ sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=2, starting_point=3)
assert sub_set[1] == point_set[1]
- sub_set = gudhi.choose_n_farthest_points(
- points=point_set, nb_points=2, starting_point=0
- )
+ sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=2, starting_point=0)
assert sub_set[1] == point_set[2]
- sub_set = gudhi.choose_n_farthest_points(
- points=point_set, nb_points=2, starting_point=2
- )
+ sub_set = gudhi.choose_n_farthest_points(points=point_set, nb_points=2, starting_point=2)
assert sub_set[1] == point_set[0]
# Test the limits
- assert (
- gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=0) == []
- )
- assert (
- gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=0) == []
- )
- assert (
- gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=1) == []
- )
- assert (
- gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=1) == []
- )
+ assert gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=0) == []
+ assert gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=0) == []
+ assert gudhi.choose_n_farthest_points(points=[], nb_points=0, starting_point=1) == []
+ assert gudhi.choose_n_farthest_points(points=[], nb_points=1, starting_point=1) == []
# From off file test
for i in range(0, 7):
- assert (
- len(
- gudhi.choose_n_farthest_points(
- off_file="subsample.off", nb_points=i, starting_point=i
- )
- )
- == i
- )
+ assert len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i, starting_point=i)) == i
def test_simple_choose_n_farthest_points_randomed():
@@ -104,10 +71,7 @@ def test_simple_choose_n_farthest_points_randomed():
# From off file test
for i in range(0, 7):
- assert (
- len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i))
- == i
- )
+ assert len(gudhi.choose_n_farthest_points(off_file="subsample.off", nb_points=i)) == i
def test_simple_pick_n_random_points():
@@ -130,9 +94,7 @@ def test_simple_pick_n_random_points():
# From off file test
for i in range(0, 7):
- assert (
- len(gudhi.pick_n_random_points(off_file="subsample.off", nb_points=i)) == i
- )
+ assert len(gudhi.pick_n_random_points(off_file="subsample.off", nb_points=i)) == i
def test_simple_sparsify_points():
@@ -152,31 +114,10 @@ def test_simple_sparsify_points():
]
assert gudhi.sparsify_point_set(points=point_set, min_squared_dist=2.001) == [[0, 1]]
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0))
- == 7
- )
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=30.0))
- == 5
- )
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.1))
- == 4
- )
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=89.9))
- == 3
- )
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=100.0))
- == 2
- )
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=324.9))
- == 2
- )
- assert (
- len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.01))
- == 1
- )
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=0.0)) == 7
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=30.0)) == 5
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=40.1)) == 4
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=89.9)) == 3
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=100.0)) == 2
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=324.9)) == 2
+ assert len(gudhi.sparsify_point_set(off_file="subsample.off", min_squared_dist=325.01)) == 1