summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVincent Rouvreau <vincent.rouvreau@inria.fr>2022-08-05 10:18:19 +0200
committerVincent Rouvreau <vincent.rouvreau@inria.fr>2022-08-05 10:18:19 +0200
commit2b4bf47e209225a56687b2a7fa65b27ef4b00ab2 (patch)
treee876553cc2f8c18e0f504a33b26230d0e3e491ed
parent3253e504a0564bc75ffd4b1351e800593ffefd0f (diff)
parent7fa45f4f0c7fb89abf64bc61b26a6201ace16a7a (diff)
Merge master and fix conflicts
-rw-r--r--.circleci/config.yml14
-rw-r--r--.github/for_maintainers/tests_strategy.md5
-rw-r--r--.gitignore4
-rw-r--r--CMakeLists.txt12
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h16
-rw-r--r--src/CMakeLists.txt12
-rw-r--r--src/Cech_complex/benchmark/CMakeLists.txt18
-rw-r--r--src/Cech_complex/benchmark/cech_complex_benchmark.cpp169
-rw-r--r--src/Cech_complex/concept/SimplicialComplexForCech.h4
-rw-r--r--src/Cech_complex/doc/Intro_cech_complex.h15
-rw-r--r--src/Cech_complex/example/CMakeLists.txt16
-rw-r--r--src/Cech_complex/example/cech_complex_example_from_points.cpp31
-rw-r--r--src/Cech_complex/example/cech_complex_step_by_step.cpp154
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex.h84
-rw-r--r--src/Cech_complex/include/gudhi/Cech_complex_blocker.h92
-rw-r--r--src/Cech_complex/include/gudhi/Miniball.COPYRIGHT4
-rw-r--r--src/Cech_complex/include/gudhi/Miniball.README26
-rw-r--r--src/Cech_complex/include/gudhi/Miniball.hpp523
-rw-r--r--src/Cech_complex/include/gudhi/Sphere_circumradius.h65
-rw-r--r--src/Cech_complex/test/CMakeLists.txt19
-rw-r--r--src/Cech_complex/test/test_cech_complex.cpp80
-rw-r--r--src/Cech_complex/utilities/CMakeLists.txt24
-rw-r--r--src/Cech_complex/utilities/cech_persistence.cpp10
-rw-r--r--src/Doxyfile.in15
-rw-r--r--src/cmake/modules/GUDHI_options.cmake9
-rw-r--r--src/cmake/modules/GUDHI_submodules.cmake5
-rw-r--r--src/cmake/modules/GUDHI_third_party_libraries.cmake6
-rw-r--r--src/common/doc/examples.h1
-rw-r--r--src/common/doc/footer.html13
-rw-r--r--src/common/doc/header.html12
-rw-r--r--src/common/doc/installation.h11
-rw-r--r--src/common/doc/main_page.md4
-rwxr-xr-x[-rw-r--r--]src/common/doc/stylesheet.css1371
-rw-r--r--src/common/include/gudhi/distance_functions.h49
-rw-r--r--src/python/CMakeLists.txt9
-rw-r--r--src/python/doc/cubical_complex_sum.inc3
-rw-r--r--src/python/doc/cubical_complex_tflow_itf_ref.rst40
-rw-r--r--src/python/doc/differentiation_sum.inc12
-rw-r--r--src/python/doc/installation.rst8
-rw-r--r--src/python/doc/ls_simplex_tree_tflow_itf_ref.rst53
-rw-r--r--src/python/doc/rips_complex_sum.inc5
-rw-r--r--src/python/doc/rips_complex_tflow_itf_ref.rst48
-rw-r--r--src/python/doc/simplex_tree_sum.inc23
-rw-r--r--src/python/gudhi/simplex_tree.pyx6
-rw-r--r--src/python/gudhi/tensorflow/__init__.py5
-rw-r--r--src/python/gudhi/tensorflow/cubical_layer.py82
-rw-r--r--src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py87
-rw-r--r--src/python/gudhi/tensorflow/rips_layer.py93
-rw-r--r--src/python/test/test_diff.py78
-rwxr-xr-xsrc/python/test/test_dtm.py16
50 files changed, 1013 insertions, 2448 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
index 90737006..e2df5c87 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -58,7 +58,7 @@ jobs:
git submodule update
mkdir build
cd build
- cmake -DUSER_VERSION_DIR=version ..
+ cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version ..
make user_version
cd version
cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=ON -DPython_ADDITIONAL_VERSIONS=3 -DWITH_GUDHI_REMOTE_TEST=ON .
@@ -79,7 +79,7 @@ jobs:
doxygen:
docker:
- - image: gudhi/ci_for_gudhi:latest
+ - image: gudhi/doxygen_for_gudhi:latest
steps:
- checkout
- run:
@@ -89,15 +89,15 @@ jobs:
git submodule update
mkdir build
cd build
- cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF -DUSER_VERSION_DIR=version ..
+ cmake -DWITH_GUDHI_THIRD_PARTY=OFF -DUSER_VERSION_DIR=version ..
make user_version
cd version
mkdir build
cd build
- cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_EXAMPLE=OFF -DWITH_GUDHI_TEST=OFF -DWITH_GUDHI_UTILITIES=OFF -DWITH_GUDHI_PYTHON=OFF ..
- make doxygen 2>&1 | tee dox.log
- grep warning dox.log
- cp dox.log html/
+ cmake -DWITH_GUDHI_THIRD_PARTY=OFF ..
+ make doxygen
+ grep warning doxygen.log
+ cp doxygen.log html/
cp -R html /tmp/doxygen
- store_artifacts:
diff --git a/.github/for_maintainers/tests_strategy.md b/.github/for_maintainers/tests_strategy.md
index 338d4282..d0ae76ef 100644
--- a/.github/for_maintainers/tests_strategy.md
+++ b/.github/for_maintainers/tests_strategy.md
@@ -4,6 +4,11 @@ This document tries to sum up the tests strategy that has been put in place for
The aim is to help maintainers to anticipate third parties modifications, updates.
+## CMake options
+
+[CMake GUDHI options](../../src/cmake/modules/GUDHI_options.cmake) allows to activate/deactivate what should be built and tested.
+Note the special option `WITH_GUDHI_THIRD_PARTY` that, when set to `OFF`, accelerates doxygen documentation generation or `user_version` for instance.
+
## Builds
### Linux
diff --git a/.gitignore b/.gitignore
index 6aab7337..9f427fb2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,5 @@
# Classical CMake build directory
-build/
+/*build*
# Generated by Cython
src/python/gudhi/*.cpp
@@ -16,5 +16,3 @@ data/points/human.off_sc.txt
# IDE specific
# CLion
.idea/
-cmake-build-debug/
-
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ac877eea..6c233459 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -13,8 +13,12 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES")
# This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path
set(GUDHI_PYTHON_PATH "src/python")
-# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
-include(GUDHI_third_party_libraries NO_POLICY_SCOPE)
+include(GUDHI_submodules)
+
+if (WITH_GUDHI_THIRD_PARTY)
+ # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
+ include(GUDHI_third_party_libraries NO_POLICY_SCOPE)
+endif()
include(GUDHI_compilation_flags)
@@ -52,7 +56,9 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES})
endforeach()
endforeach()
-add_subdirectory(src/GudhUI)
+if (WITH_GUDHI_THIRD_PARTY)
+ add_subdirectory(src/GudhUI)
+endif()
if (WITH_GUDHI_PYTHON)
# specific for cython module
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index b1a9407b..aec8c1b1 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -461,10 +461,10 @@ class Alpha_complex {
void propagate_alpha_filtration(SimplicialComplexForAlpha& complex, Simplex_handle f_simplex) {
// From SimplicialComplexForAlpha type required to assign filtration values.
using Filtration_value = typename SimplicialComplexForAlpha::Filtration_value;
- using Vertex_handle = typename SimplicialComplexForAlpha::Vertex_handle;
// ### Foreach Tau face of Sigma
- for (auto f_boundary : complex.boundary_simplex_range(f_simplex)) {
+ for (auto face_opposite_vertex : complex.boundary_opposite_vertex_simplex_range(f_simplex)) {
+ auto f_boundary = face_opposite_vertex.first;
#ifdef DEBUG_TRACES
std::clog << " | --------------------------------------------------\n";
std::clog << " | Tau ";
@@ -485,18 +485,10 @@ class Alpha_complex {
#endif // DEBUG_TRACES
// ### Else
} else {
- // Find which vertex of f_simplex is missing in f_boundary. We could actually write a variant of boundary_simplex_range that gives pairs (f_boundary, vertex). We rely on the fact that simplex_vertex_range is sorted.
- auto longlist = complex.simplex_vertex_range(f_simplex);
- auto shortlist = complex.simplex_vertex_range(f_boundary);
- auto longiter = std::begin(longlist);
- auto shortiter = std::begin(shortlist);
- auto enditer = std::end(shortlist);
- while(shortiter != enditer && *longiter == *shortiter) { ++longiter; ++shortiter; }
- Vertex_handle extra = *longiter;
auto const& cache=get_cache(complex, f_boundary);
- bool is_gab = kernel_.is_gabriel(cache, get_point_(extra));
+ bool is_gab = kernel_.is_gabriel(cache, get_point_(face_opposite_vertex.second));
#ifdef DEBUG_TRACES
- std::clog << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << extra << std::endl;
+ std::clog << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << face_opposite_vertex.second << std::endl;
#endif // DEBUG_TRACES
// ### If Tau is not Gabriel of Sigma
if (false == is_gab) {
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 8023e04c..f9f77ef7 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -12,8 +12,12 @@ set(GUDHI_MISSING_MODULES "" CACHE INTERNAL "GUDHI_MISSING_MODULES")
# This variable is used by Cython CMakeLists.txt and by GUDHI_third_party_libraries to know its path
set(GUDHI_PYTHON_PATH "python")
-# For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
-include(GUDHI_third_party_libraries NO_POLICY_SCOPE)
+include(GUDHI_submodules)
+
+if (WITH_GUDHI_THIRD_PARTY)
+ # For third parties libraries management - To be done last as CGAL updates CMAKE_MODULE_PATH
+ include(GUDHI_third_party_libraries NO_POLICY_SCOPE)
+endif()
include(GUDHI_compilation_flags)
@@ -67,7 +71,9 @@ foreach(GUDHI_MODULE ${GUDHI_MODULES})
endforeach()
endforeach()
-add_subdirectory(GudhUI)
+if (WITH_GUDHI_THIRD_PARTY)
+ add_subdirectory(GudhUI)
+endif()
message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"")
message("++ GUDHI_MISSING_MODULES list is:\"${GUDHI_MISSING_MODULES}\"")
diff --git a/src/Cech_complex/benchmark/CMakeLists.txt b/src/Cech_complex/benchmark/CMakeLists.txt
index bc54c0f3..a6b3d70b 100644
--- a/src/Cech_complex/benchmark/CMakeLists.txt
+++ b/src/Cech_complex/benchmark/CMakeLists.txt
@@ -1,13 +1,15 @@
project(Cech_complex_benchmark)
-# Do not forget to copy test files in current binary dir
-file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1)
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
-if(TARGET Boost::filesystem)
- add_executable(cech_complex_benchmark cech_complex_benchmark.cpp)
- target_link_libraries(cech_complex_benchmark Boost::filesystem)
+ if(TARGET Boost::filesystem)
+ add_executable(cech_complex_benchmark cech_complex_benchmark.cpp)
+ target_link_libraries(cech_complex_benchmark Boost::filesystem)
- if (TBB_FOUND)
- target_link_libraries(cech_complex_benchmark ${TBB_LIBRARIES})
+ if (TBB_FOUND)
+ target_link_libraries(cech_complex_benchmark ${TBB_LIBRARIES})
+ endif()
endif()
-endif() \ No newline at end of file
+endif()
diff --git a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
index 2e4adce4..a9dc5d0d 100644
--- a/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
+++ b/src/Cech_complex/benchmark/cech_complex_benchmark.cpp
@@ -10,12 +10,13 @@
#include <gudhi/Points_off_io.h>
#include <gudhi/distance_functions.h>
-#include <gudhi/graph_simplicial_complex.h>
#include <gudhi/Clock.h>
#include <gudhi/Rips_complex.h>
#include <gudhi/Cech_complex.h>
#include <gudhi/Simplex_tree.h>
-#include <gudhi/Miniball.hpp>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Epeck_d.h>
#include "boost/filesystem.hpp" // includes all needed Boost.Filesystem declarations
@@ -26,107 +27,81 @@
using Simplex_tree = Gudhi::Simplex_tree<>;
using Filtration_value = Simplex_tree::Filtration_value;
using Point = std::vector<Filtration_value>;
-using Point_cloud = std::vector<Point>;
using Points_off_reader = Gudhi::Points_off_reader<Point>;
-using Proximity_graph = Gudhi::Proximity_graph<Simplex_tree>;
using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
-using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
-
-class Minimal_enclosing_ball_radius {
- public:
- // boost::range_value is not SFINAE-friendly so we cannot use it in the return type
- template <typename Point>
- typename std::iterator_traits<typename boost::range_iterator<Point>::type>::value_type operator()(
- const Point& p1, const Point& p2) const {
- // Type def
- using Point_cloud = std::vector<Point>;
- using Point_iterator = typename Point_cloud::const_iterator;
- using Coordinate_iterator = typename Point::const_iterator;
- using Min_sphere =
- typename Gudhi::Miniball::Miniball<Gudhi::Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
-
- Point_cloud point_cloud;
- point_cloud.push_back(p1);
- point_cloud.push_back(p2);
-
- GUDHI_CHECK((p1.end() - p1.begin()) == (p2.end() - p2.begin()), "inconsistent point dimensions");
- Min_sphere min_sphere(p1.end() - p1.begin(), point_cloud.begin(), point_cloud.end());
- return std::sqrt(min_sphere.squared_radius());
- }
-};
+template<typename Kernel>
+Simplex_tree benchmark_cech(const std::string& off_file_points, const Filtration_value& radius, const int& dim_max, const bool exact) {
+ using Point_cgal = typename Kernel::Point_d;
+ using Points_off_reader_cgal = Gudhi::Points_off_reader<Point_cgal>;
+ using Cech_complex = Gudhi::cech_complex::Cech_complex<Kernel, Simplex_tree>;
+
+ // Extract the points from the file filepoints
+ Points_off_reader_cgal off_reader_cgal(off_file_points);
+
+ Gudhi::Clock cech_clock("Cech computation");
+ Cech_complex cech_complex_from_points(off_reader_cgal.get_point_cloud(), radius);
+ Simplex_tree cech_stree;
+ cech_complex_from_points.create_complex(cech_stree, dim_max, exact);
+
+ // ------------------------------------------
+ // Display information about the Cech complex
+ // ------------------------------------------
+ double cech_sec = cech_clock.num_seconds();
+ std::clog << cech_sec << " ; ";
+ return cech_stree;
+}
int main(int argc, char* argv[]) {
- std::string off_file_points = "tore3D_1307.off";
- Filtration_value threshold = 1e20;
-
- // Extract the points from the file filepoints
- Points_off_reader off_reader(off_file_points);
-
- Gudhi::Clock euclidean_clock("Gudhi::Euclidean_distance");
- // Compute the proximity graph of the points
- Proximity_graph euclidean_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
- off_reader.get_point_cloud(), threshold, Gudhi::Euclidean_distance());
-
- std::clog << euclidean_clock << std::endl;
-
- Gudhi::Clock miniball_clock("Minimal_enclosing_ball_radius");
- // Compute the proximity graph of the points
- Proximity_graph miniball_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
- off_reader.get_point_cloud(), threshold, Minimal_enclosing_ball_radius());
- std::clog << miniball_clock << std::endl;
-
- Gudhi::Clock common_miniball_clock("Gudhi::Minimal_enclosing_ball_radius()");
- // Compute the proximity graph of the points
- Proximity_graph common_miniball_prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(
- off_reader.get_point_cloud(), threshold, Gudhi::Minimal_enclosing_ball_radius());
- std::clog << common_miniball_clock << std::endl;
-
- boost::filesystem::path full_path(boost::filesystem::current_path());
- std::clog << "Current path is : " << full_path << std::endl;
-
- std::clog << "File name;Radius;Rips time;Cech time; Ratio Rips/Cech time;Rips nb simplices;Cech nb simplices;"
- << std::endl;
- boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end
- for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) {
- if (!boost::filesystem::is_directory(itr->status())) {
- if (itr->path().extension() == ".off") // see below
- {
- Points_off_reader off_reader(itr->path().string());
- Point p0 = off_reader.get_point_cloud()[0];
-
- for (Filtration_value radius = 0.1; radius < 0.4; radius += 0.1) {
- std::clog << itr->path().stem() << ";";
- std::clog << radius << ";";
- Gudhi::Clock rips_clock("Rips computation");
- Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius,
- Gudhi::Minimal_enclosing_ball_radius());
- Simplex_tree rips_stree;
- rips_complex_from_points.create_complex(rips_stree, p0.size() - 1);
- // ------------------------------------------
- // Display information about the Rips complex
- // ------------------------------------------
- double rips_sec = rips_clock.num_seconds();
- std::clog << rips_sec << ";";
-
- Gudhi::Clock cech_clock("Cech computation");
- Cech_complex cech_complex_from_points(off_reader.get_point_cloud(), radius);
- Simplex_tree cech_stree;
- cech_complex_from_points.create_complex(cech_stree, p0.size() - 1);
- // ------------------------------------------
- // Display information about the Cech complex
- // ------------------------------------------
- double cech_sec = cech_clock.num_seconds();
- std::clog << cech_sec << ";";
- std::clog << cech_sec / rips_sec << ";";
-
- assert(rips_stree.num_simplices() >= cech_stree.num_simplices());
- std::clog << rips_stree.num_simplices() << ";";
- std::clog << cech_stree.num_simplices() << ";" << std::endl;
+ boost::filesystem::path full_path(boost::filesystem::current_path());
+ std::clog << "Current path is : " << full_path << std::endl;
+
+ std::clog << "File name ; Radius ; Rips time ; Dim-3 Fast Cech time ; Dynamic_dim Fast Cech time ; "
+ "Dim-3 Safe Cech time ; Dynamic_dim Safe Cech time ; Dim-3 Exact Cech time ; Dynamic_dim Exact Cech time ; "
+ "Cech nb simplices ; Rips nb simplices;"
+ << std::endl;
+ boost::filesystem::directory_iterator end_itr; // default construction yields past-the-end
+ // For every ".off" file in the current directory, and for 3 predefined thresholds, compare Rips and various Cech constructions
+ for (boost::filesystem::directory_iterator itr(boost::filesystem::current_path()); itr != end_itr; ++itr) {
+ if (!boost::filesystem::is_directory(itr->status())) {
+ if (itr->path().extension() == ".off") {
+ Points_off_reader off_reader(itr->path().string());
+ Point p0 = off_reader.get_point_cloud()[0];
+ // Loop over the different thresholds
+ for (Filtration_value radius = 0.1; radius < 0.35; radius += 0.1) {
+ std::clog << itr->path().stem() << " ; ";
+ std::clog << radius << " ; ";
+
+ Gudhi::Clock rips_clock("Rips computation");
+ Rips_complex rips_complex_from_points(off_reader.get_point_cloud(), radius, Gudhi::Euclidean_distance());
+ Simplex_tree rips_stree;
+ int dim_max = p0.size() - 1;
+ rips_complex_from_points.create_complex(rips_stree, dim_max);
+ // ------------------------------------------
+ // Display information about the Rips complex
+ // ------------------------------------------
+ double rips_sec = rips_clock.num_seconds();
+ std::clog << rips_sec << " ; ";
+
+ // --------------
+ // Cech complex
+ // --------------
+ // Fast
+ benchmark_cech<CGAL::Epick_d<CGAL::Dimension_tag<3>>>(itr->path().string(), radius, dim_max, false);
+ benchmark_cech<CGAL::Epick_d<CGAL::Dynamic_dimension_tag>>(itr->path().string(), radius, dim_max, false);
+ // Safe
+ benchmark_cech<CGAL::Epeck_d<CGAL::Dimension_tag<3>>>(itr->path().string(), radius, dim_max, false);
+ benchmark_cech<CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>>(itr->path().string(), radius, dim_max, false);
+ // Exact
+ benchmark_cech<CGAL::Epeck_d<CGAL::Dimension_tag<3>>>(itr->path().string(), radius, dim_max, true);
+ auto cech_stree = benchmark_cech<CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>>(itr->path().string(), radius, dim_max, true);
+
+ std::clog << cech_stree.num_simplices() << " ; ";
+ std::clog << rips_stree.num_simplices() << ";" << std::endl;
+ }
+ }
}
- }
}
- }
- return 0;
+ return 0;
}
diff --git a/src/Cech_complex/concept/SimplicialComplexForCech.h b/src/Cech_complex/concept/SimplicialComplexForCech.h
index 00c7df3a..6202fe92 100644
--- a/src/Cech_complex/concept/SimplicialComplexForCech.h
+++ b/src/Cech_complex/concept/SimplicialComplexForCech.h
@@ -47,8 +47,8 @@ struct SimplicialComplexForCech {
};
-} // namespace alpha_complex
+} // namespace cech_complex
} // namespace Gudhi
-#endif // CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
+#endif // CONCEPT_CECH_COMPLEX_SIMPLICIAL_COMPLEX_FOR_CECH_H_
diff --git a/src/Cech_complex/doc/Intro_cech_complex.h b/src/Cech_complex/doc/Intro_cech_complex.h
index 095fd320..595fb64b 100644
--- a/src/Cech_complex/doc/Intro_cech_complex.h
+++ b/src/Cech_complex/doc/Intro_cech_complex.h
@@ -28,7 +28,7 @@ namespace cech_complex {
* <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a> constructed
* from a proximity graph. The set of all simplices is filtered by the radius of their minimal enclosing ball.
*
- * The input shall be a point cloud in an Euclidean space.
+ * The input shall be a range of points where a point is defined as <a target="_blank" href="https://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Point__d.html">CGAL kernel Point_d.</a>
*
* \remark For people only interested in the topology of the \ref cech_complex (for instance persistence),
* \ref alpha_complex is equivalent to the \ref cech_complex and much smaller if you do not bound the radii.
@@ -37,8 +37,7 @@ namespace cech_complex {
* \subsection cechalgorithm Algorithm
*
* Cech_complex first builds a proximity graph from a point cloud.
- * The filtration value of each edge of the `Gudhi::Proximity_graph` is computed from
- * `Gudhi::Minimal_enclosing_ball_radius` function.
+ * The filtration value of each edge of the `Gudhi::Proximity_graph` is computed using CGAL kernel functions.
*
* All edges that have a filtration value strictly greater than a user given maximal radius value, \f$max\_radius\f$,
* are not inserted into the complex.
@@ -60,19 +59,9 @@ namespace cech_complex {
*
* \image html "cech_complex_representation.png" "Čech complex expansion"
*
- * The minimal ball radius computation is insured by
- * <a target="_blank" href="https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html">
- * the miniball software (V3.0)</a> - Smallest Enclosing Balls of Points - and distributed with GUDHI.
- * Please refer to
- * <a target="_blank" href="https://people.inf.ethz.ch/gaertner/subdir/texts/own_work/esa99_final.pdf">
- * the miniball software design description</a> for more information about this computation.
- *
* This radius computation is the reason why the Cech_complex is taking much more time to be computed than the
* \ref rips_complex but it offers more topological guarantees.
*
- * If the Cech_complex interfaces are not detailed enough for your need, please refer to the example
- * \gudhi_example_link{Cech_complex,cech_complex_step_by_step.cpp}, where the graph construction over the Simplex_tree is more detailed.
- *
* \subsection cechpointscloudexample Example from a point cloud
*
* This example builds the proximity graph from the given points, and maximal radius values.
diff --git a/src/Cech_complex/example/CMakeLists.txt b/src/Cech_complex/example/CMakeLists.txt
index 1b08c7cb..7d52ed5e 100644
--- a/src/Cech_complex/example/CMakeLists.txt
+++ b/src/Cech_complex/example/CMakeLists.txt
@@ -1,17 +1,9 @@
project(Cech_complex_examples)
-if (TARGET Boost::program_options)
- add_executable ( Cech_complex_example_step_by_step cech_complex_step_by_step.cpp )
- target_link_libraries(Cech_complex_example_step_by_step Boost::program_options)
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1)
+ add_executable ( Cech_complex_example_from_points cech_complex_example_from_points.cpp)
if (TBB_FOUND)
- target_link_libraries(Cech_complex_example_step_by_step ${TBB_LIBRARIES})
+ target_link_libraries(Cech_complex_example_from_points ${TBB_LIBRARIES})
endif()
- add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:Cech_complex_example_step_by_step>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-d" "3")
+ add_test(NAME Cech_complex_example_from_points COMMAND $<TARGET_FILE:Cech_complex_example_from_points>)
endif()
-
-add_executable ( Cech_complex_example_from_points cech_complex_example_from_points.cpp)
-if (TBB_FOUND)
- target_link_libraries(Cech_complex_example_from_points ${TBB_LIBRARIES})
-endif()
-add_test(NAME Cech_complex_example_from_points COMMAND $<TARGET_FILE:Cech_complex_example_from_points>)
diff --git a/src/Cech_complex/example/cech_complex_example_from_points.cpp b/src/Cech_complex/example/cech_complex_example_from_points.cpp
index 1a1f708c..ef9071ec 100644
--- a/src/Cech_complex/example/cech_complex_example_from_points.cpp
+++ b/src/Cech_complex/example/cech_complex_example_from_points.cpp
@@ -1,30 +1,33 @@
#include <gudhi/Cech_complex.h>
#include <gudhi/Simplex_tree.h>
+#include <CGAL/Epeck_d.h> // For EXACT or SAFE version
+
#include <iostream>
#include <string>
#include <vector>
-#include <array>
int main() {
// Type definitions
- using Point_cloud = std::vector<std::array<double, 2>>;
using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = Simplex_tree::Filtration_value;
- using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
+ using Kernel = CGAL::Epeck_d<CGAL::Dimension_tag<2>>;
+ using Point = typename Kernel::Point_d;
+ using Point_cloud = std::vector<Point>;
+ using Cech_complex = Gudhi::cech_complex::Cech_complex<Kernel, Simplex_tree>;
Point_cloud points;
- points.push_back({1., 0.}); // 0
- points.push_back({0., 1.}); // 1
- points.push_back({2., 1.}); // 2
- points.push_back({3., 2.}); // 3
- points.push_back({0., 3.}); // 4
- points.push_back({3. + std::sqrt(3.), 3.}); // 5
- points.push_back({1., 4.}); // 6
- points.push_back({3., 4.}); // 7
- points.push_back({2., 4. + std::sqrt(3.)}); // 8
- points.push_back({0., 4.}); // 9
- points.push_back({-0.5, 2.}); // 10
+ points.emplace_back(1., 0.); // 0
+ points.emplace_back(0., 1.); // 1
+ points.emplace_back(2., 1.); // 2
+ points.emplace_back(3., 2.); // 3
+ points.emplace_back(0., 3.); // 4
+ points.emplace_back(3. + std::sqrt(3.), 3.); // 5
+ points.emplace_back(1., 4.); // 6
+ points.emplace_back(3., 4.); // 7
+ points.emplace_back(2., 4. + std::sqrt(3.)); // 8
+ points.emplace_back(0., 4.); // 9
+ points.emplace_back(-0.5, 2.); // 10
// ----------------------------------------------------------------------------
// Init of a Cech complex from points
diff --git a/src/Cech_complex/example/cech_complex_step_by_step.cpp b/src/Cech_complex/example/cech_complex_step_by_step.cpp
deleted file mode 100644
index f59f0293..00000000
--- a/src/Cech_complex/example/cech_complex_step_by_step.cpp
+++ /dev/null
@@ -1,154 +0,0 @@
-/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
- * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
- * Author(s): Vincent Rouvreau
- *
- * Copyright (C) 2018 Inria
- *
- * Modification(s):
- * - YYYY/MM Author: Description of the modification
- */
-
-#include <gudhi/graph_simplicial_complex.h>
-#include <gudhi/distance_functions.h>
-#include <gudhi/Simplex_tree.h>
-#include <gudhi/Points_off_io.h>
-
-#include <gudhi/Miniball.hpp>
-
-#include <boost/program_options.hpp>
-
-#include <string>
-#include <vector>
-#include <limits> // infinity
-#include <utility> // for pair
-#include <map>
-
-// ----------------------------------------------------------------------------
-// rips_persistence_step_by_step is an example of each step that is required to
-// build a Rips over a Simplex_tree. Please refer to rips_persistence to see
-// how to do the same thing with the Rips_complex wrapper for less detailed
-// steps.
-// ----------------------------------------------------------------------------
-
-// Types definition
-using Simplex_tree = Gudhi::Simplex_tree<>;
-using Simplex_handle = Simplex_tree::Simplex_handle;
-using Filtration_value = Simplex_tree::Filtration_value;
-using Point = std::vector<double>;
-using Points_off_reader = Gudhi::Points_off_reader<Point>;
-using Proximity_graph = Gudhi::Proximity_graph<Simplex_tree>;
-
-class Cech_blocker {
- private:
- using Point_cloud = std::vector<Point>;
- using Point_iterator = Point_cloud::const_iterator;
- using Coordinate_iterator = Point::const_iterator;
- using Min_sphere = Gudhi::Miniball::Miniball<Gudhi::Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
-
- public:
- bool operator()(Simplex_handle sh) {
- std::vector<Point> points;
- for (auto vertex : simplex_tree_.simplex_vertex_range(sh)) {
- points.push_back(point_cloud_[vertex]);
-#ifdef DEBUG_TRACES
- std::clog << "#(" << vertex << ")#";
-#endif // DEBUG_TRACES
- }
- Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points);
-#ifdef DEBUG_TRACES
- std::clog << "radius = " << radius << " - " << (radius > max_radius_) << std::endl;
-#endif // DEBUG_TRACES
- simplex_tree_.assign_filtration(sh, radius);
- return (radius > max_radius_);
- }
- Cech_blocker(Simplex_tree& simplex_tree, Filtration_value max_radius, const std::vector<Point>& point_cloud)
- : simplex_tree_(simplex_tree), max_radius_(max_radius), point_cloud_(point_cloud) {
- dimension_ = point_cloud_[0].size();
- }
-
- private:
- Simplex_tree simplex_tree_;
- Filtration_value max_radius_;
- std::vector<Point> point_cloud_;
- int dimension_;
-};
-
-void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max);
-
-int main(int argc, char* argv[]) {
- std::string off_file_points;
- Filtration_value max_radius;
- int dim_max;
-
- program_options(argc, argv, off_file_points, max_radius, dim_max);
-
- // Extract the points from the file filepoints
- Points_off_reader off_reader(off_file_points);
-
- // Compute the proximity graph of the points
- Proximity_graph prox_graph = Gudhi::compute_proximity_graph<Simplex_tree>(off_reader.get_point_cloud(), max_radius,
- Gudhi::Minimal_enclosing_ball_radius());
-
- // Construct the Rips complex in a Simplex Tree
- Simplex_tree st;
- // insert the proximity graph in the simplex tree
- st.insert_graph(prox_graph);
- // expand the graph until dimension dim_max
- st.expansion_with_blockers(dim_max, Cech_blocker(st, max_radius, off_reader.get_point_cloud()));
-
- std::clog << "The complex contains " << st.num_simplices() << " simplices \n";
- std::clog << " and has dimension " << st.dimension() << " \n";
-
- // Sort the simplices in the order of the filtration
- st.initialize_filtration();
-
-#if DEBUG_TRACES
- std::clog << "********************************************************************\n";
- std::clog << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
- std::clog << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
- for (auto f_simplex : st.filtration_simplex_range()) {
- std::clog << " "
- << "[" << st.filtration(f_simplex) << "] ";
- for (auto vertex : st.simplex_vertex_range(f_simplex)) {
- std::clog << static_cast<int>(vertex) << " ";
- }
- std::clog << std::endl;
- }
-#endif // DEBUG_TRACES
-
- return 0;
-}
-
-void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& max_radius, int& dim_max) {
- namespace po = boost::program_options;
- po::options_description hidden("Hidden options");
- hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
- "Name of an OFF file containing a point set.\n");
-
- po::options_description visible("Allowed options", 100);
- visible.add_options()("help,h", "produce help message")(
- "max-radius,r",
- po::value<Filtration_value>(&max_radius)->default_value(std::numeric_limits<Filtration_value>::infinity()),
- "Maximal length of an edge for the Rips complex construction.")(
- "cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
- "Maximal dimension of the Rips complex we want to compute.");
-
- po::positional_options_description pos;
- pos.add("input-file", 1);
-
- po::options_description all;
- all.add(visible).add(hidden);
-
- po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
- po::notify(vm);
-
- if (vm.count("help") || !vm.count("input-file")) {
- std::clog << std::endl;
- std::clog << "Construct a Cech complex defined on a set of input points.\n \n";
-
- std::clog << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
- std::clog << visible << std::endl;
- exit(-1);
- }
-}
diff --git a/src/Cech_complex/include/gudhi/Cech_complex.h b/src/Cech_complex/include/gudhi/Cech_complex.h
index b0871e10..08b7a72f 100644
--- a/src/Cech_complex/include/gudhi/Cech_complex.h
+++ b/src/Cech_complex/include/gudhi/Cech_complex.h
@@ -11,14 +11,13 @@
#ifndef CECH_COMPLEX_H_
#define CECH_COMPLEX_H_
-#include <gudhi/distance_functions.h> // for Gudhi::Minimal_enclosing_ball_radius
+#include <gudhi/Sphere_circumradius.h> // for Gudhi::cech_complex::Sphere_circumradius
#include <gudhi/graph_simplicial_complex.h> // for Gudhi::Proximity_graph
#include <gudhi/Debug_utils.h> // for GUDHI_CHECK
#include <gudhi/Cech_complex_blocker.h> // for Gudhi::cech_complex::Cech_blocker
#include <iostream>
#include <stdexcept> // for exception management
-#include <vector>
namespace Gudhi {
@@ -26,55 +25,52 @@ namespace cech_complex {
/**
* \class Cech_complex
- * \brief Cech complex data structure.
+ * \brief Cech complex class.
*
* \ingroup cech_complex
*
* \details
- * The data structure is a proximity graph, containing edges when the edge length is less or equal
- * to a given max_radius. Edge length is computed from `Gudhi::Minimal_enclosing_ball_radius` distance function.
+ * Cech complex is a simplicial complex where the set of all simplices is filtered
+ * by the radius of their minimal enclosing ball and bounded by the given max_radius.
*
- * \tparam SimplicialComplexForProximityGraph furnishes `Vertex_handle` and `Filtration_value` type definition required
- * by `Gudhi::Proximity_graph`.
+ * \tparam Kernel CGAL kernel: either Epick_d or Epeck_d.
+ *
+ * \tparam SimplicialComplexForCechComplex furnishes `Vertex_handle` and `Filtration_value` type definition required
+ * by `Gudhi::Proximity_graph` and Cech blocker.
*
- * \tparam ForwardPointRange must be a range for which `std::begin()` and `std::end()` methods return input
- * iterators on a point. `std::begin()` and `std::end()` methods are also required for a point.
*/
-template <typename SimplicialComplexForProximityGraph, typename ForwardPointRange>
+template <typename Kernel, typename SimplicialComplexForCechComplex>
class Cech_complex {
private:
// Required by compute_proximity_graph
- using Vertex_handle = typename SimplicialComplexForProximityGraph::Vertex_handle;
- using Filtration_value = typename SimplicialComplexForProximityGraph::Filtration_value;
- using Proximity_graph = Gudhi::Proximity_graph<SimplicialComplexForProximityGraph>;
-
- // Retrieve Coordinate type from ForwardPointRange
- using Point_from_range_iterator = typename boost::range_const_iterator<ForwardPointRange>::type;
- using Point_from_range = typename std::iterator_traits<Point_from_range_iterator>::value_type;
- using Coordinate_iterator = typename boost::range_const_iterator<Point_from_range>::type;
- using Coordinate = typename std::iterator_traits<Coordinate_iterator>::value_type;
-
- public:
- // Point and Point_cloud type definition
- using Point = std::vector<Coordinate>;
- using Point_cloud = std::vector<Point>;
-
- public:
- /** \brief Cech_complex constructor from a list of points.
+ using Vertex_handle = typename SimplicialComplexForCechComplex::Vertex_handle;
+ using Filtration_value = typename SimplicialComplexForCechComplex::Filtration_value;
+ using Proximity_graph = Gudhi::Proximity_graph<SimplicialComplexForCechComplex>;
+
+ using cech_blocker = Cech_blocker<SimplicialComplexForCechComplex, Cech_complex, Kernel>;
+
+ using Point_d = typename cech_blocker::Point_d;
+ using Point_cloud = std::vector<Point_d>;
+
+ // Numeric type of coordinates in the kernel
+ using FT = typename cech_blocker::FT;
+ // Sphere is a pair of point and squared radius.
+ using Sphere = typename cech_blocker::Sphere;
+
+ public:
+ /** \brief Cech_complex constructor from a range of points.
*
- * @param[in] points Range of points.
+ * @param[in] points Range of points where each point is defined as `kernel::Point_d`.
* @param[in] max_radius Maximal radius value.
*
- * \tparam ForwardPointRange must be a range of Point. Point must be a range of <b>copyable</b> Cartesian coordinates.
- *
*/
- Cech_complex(const ForwardPointRange& points, Filtration_value max_radius) : max_radius_(max_radius) {
- // Point cloud deep copy
- point_cloud_.reserve(boost::size(points));
- for (auto&& point : points) point_cloud_.emplace_back(std::begin(point), std::end(point));
+ template<typename InputPointRange >
+ Cech_complex(const InputPointRange & points, Filtration_value max_radius) : max_radius_(max_radius) {
+
+ point_cloud_.assign(std::begin(points), std::end(points));
- cech_skeleton_graph_ = Gudhi::compute_proximity_graph<SimplicialComplexForProximityGraph>(
- point_cloud_, max_radius_, Gudhi::Minimal_enclosing_ball_radius());
+ cech_skeleton_graph_ = Gudhi::compute_proximity_graph<SimplicialComplexForCechComplex>(
+ point_cloud_, max_radius_, Sphere_circumradius<Kernel, Filtration_value>());
}
/** \brief Initializes the simplicial complex from the proximity graph and expands it until a given maximal
@@ -82,19 +78,19 @@ class Cech_complex {
*
* @param[in] complex SimplicialComplexForCech to be created.
* @param[in] dim_max graph expansion until this given maximal dimension.
+ * @param[in] exact Exact filtration values computation. Not exact if `Kernel` is not <a target="_blank"
+ * href="https://doc.cgal.org/latest/Kernel_d/structCGAL_1_1Epeck__d.html">CGAL::Epeck_d</a>.
* @exception std::invalid_argument In debug mode, if `complex.num_vertices()` does not return 0.
*
*/
- template <typename SimplicialComplexForCechComplex>
- void create_complex(SimplicialComplexForCechComplex& complex, int dim_max) {
+ void create_complex(SimplicialComplexForCechComplex& complex, int dim_max, const bool exact = false) {
GUDHI_CHECK(complex.num_vertices() == 0,
std::invalid_argument("Cech_complex::create_complex - simplicial complex is not empty"));
// insert the proximity graph in the simplicial complex
complex.insert_graph(cech_skeleton_graph_);
// expand the graph until dimension dim_max
- complex.expansion_with_blockers(dim_max,
- Cech_blocker<SimplicialComplexForCechComplex, Cech_complex>(&complex, this));
+ complex.expansion_with_blockers(dim_max, cech_blocker(&complex, this, exact));
}
/** @return max_radius value given at construction. */
@@ -103,12 +99,18 @@ class Cech_complex {
/** @param[in] vertex Point position in the range.
* @return The point.
*/
- const Point& get_point(Vertex_handle vertex) const { return point_cloud_[vertex]; }
+ const Point_d& get_point(Vertex_handle vertex) const { return point_cloud_[vertex]; }
+
+ /**
+ * @return Vector of cached spheres.
+ */
+ std::vector<Sphere> & get_cache() { return cache_; }
private:
Proximity_graph cech_skeleton_graph_;
Filtration_value max_radius_;
Point_cloud point_cloud_;
+ std::vector<Sphere> cache_;
};
} // namespace cech_complex
diff --git a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
index 31b9aab5..e7f548ba 100644
--- a/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
+++ b/src/Cech_complex/include/gudhi/Cech_complex_blocker.h
@@ -11,10 +11,12 @@
#ifndef CECH_COMPLEX_BLOCKER_H_
#define CECH_COMPLEX_BLOCKER_H_
-#include <gudhi/distance_functions.h> // for Gudhi::Minimal_enclosing_ball_radius
+#include <CGAL/NT_converter.h> // for casting from FT to Filtration_value
+#include <CGAL/Lazy_exact_nt.h> // for CGAL::exact
#include <iostream>
#include <vector>
+#include <set>
#include <cmath> // for std::sqrt
namespace Gudhi {
@@ -30,33 +32,101 @@ namespace cech_complex {
* \details
* Čech blocker is an oracle constructed from a Cech_complex and a simplicial complex.
*
- * \tparam SimplicialComplexForProximityGraph furnishes `Simplex_handle` and `Filtration_value` type definition,
+ * \tparam SimplicialComplexForCech furnishes `Simplex_handle` and `Filtration_value` type definition,
* `simplex_vertex_range(Simplex_handle sh)`and `assign_filtration(Simplex_handle sh, Filtration_value filt)` methods.
*
- * \tparam Chech_complex is required by the blocker.
+ * \tparam Cech_complex is required by the blocker.
+ *
+ * \tparam Kernel CGAL kernel: either Epick_d or Epeck_d.
*/
-template <typename SimplicialComplexForCech, typename Cech_complex>
+template <typename SimplicialComplexForCech, typename Cech_complex, typename Kernel>
class Cech_blocker {
+
+ public:
+
+ using Point_d = typename Kernel::Point_d;
+ // Numeric type of coordinates in the kernel
+ using FT = typename Kernel::FT;
+ // Sphere is a pair of point and squared radius.
+ using Sphere = typename std::pair<Point_d, FT>;
+
private:
- using Point_cloud = typename Cech_complex::Point_cloud;
using Simplex_handle = typename SimplicialComplexForCech::Simplex_handle;
using Filtration_value = typename SimplicialComplexForCech::Filtration_value;
+ template<class PointIterator>
+ Sphere get_sphere(PointIterator begin, PointIterator end) const {
+ Point_d c = kernel_.construct_circumcenter_d_object()(begin, end);
+ FT r = kernel_.squared_distance_d_object()(c, *begin);
+ return std::make_pair(std::move(c), std::move(r));
+ }
+
public:
+
/** \internal \brief Čech complex blocker operator() - the oracle - assigns the filtration value from the simplex
* radius and returns if the simplex expansion must be blocked.
* \param[in] sh The Simplex_handle.
* \return true if the simplex radius is greater than the Cech_complex max_radius*/
bool operator()(Simplex_handle sh) {
+ using Point_cloud = std::vector<Point_d>;
+ CGAL::NT_converter<FT, Filtration_value> cast_to_fv;
+ Filtration_value radius = 0;
+ bool is_min_enclos_ball = false;
Point_cloud points;
- for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) {
- points.push_back(cc_ptr_->get_point(vertex));
+ points.reserve(sc_ptr_->dimension(sh)+1);
+
+ // for each face of simplex sh, test outsider point is indeed inside enclosing ball, if yes, take it and exit loop, otherwise, new sphere is circumsphere of all vertices
+ for (auto face_opposite_vertex : sc_ptr_->boundary_opposite_vertex_simplex_range(sh)) {
+ Sphere sph;
+ auto k = sc_ptr_->key(face_opposite_vertex.first);
+ if(k != sc_ptr_->null_key()) {
+ sph = cc_ptr_->get_cache().at(k);
+ }
+ else {
+ for (auto vertex : sc_ptr_->simplex_vertex_range(face_opposite_vertex.first)) {
+ points.push_back(cc_ptr_->get_point(vertex));
+#ifdef DEBUG_TRACES
+ std::clog << "#(" << vertex << ")#";
+#endif // DEBUG_TRACES
+ }
+ sph = get_sphere(points.cbegin(), points.cend());
+ // Put edge sphere in cache
+ sc_ptr_->assign_key(face_opposite_vertex.first, cc_ptr_->get_cache().size());
+ cc_ptr_->get_cache().push_back(sph);
+ // Clear face points
+ points.clear();
+ }
+ // Check if the minimal enclosing ball of current face contains the extra point/opposite vertex
+ if (kernel_.squared_distance_d_object()(sph.first, cc_ptr_->get_point(face_opposite_vertex.second)) <= sph.second) {
#ifdef DEBUG_TRACES
- std::clog << "#(" << vertex << ")#";
+ std::clog << "center: " << sph.first << ", radius: " << radius << std::endl;
#endif // DEBUG_TRACES
+ is_min_enclos_ball = true;
+#if CGAL_VERSION_NR >= 1050000000
+ if(exact_) CGAL::exact(sph.second);
+#endif
+ radius = std::sqrt(cast_to_fv(sph.second));
+ sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size());
+ cc_ptr_->get_cache().push_back(sph);
+ break;
+ }
}
- Filtration_value radius = Gudhi::Minimal_enclosing_ball_radius()(points);
+ // Spheres of each face don't contain the whole simplex
+ if(!is_min_enclos_ball) {
+ for (auto vertex : sc_ptr_->simplex_vertex_range(sh)) {
+ points.push_back(cc_ptr_->get_point(vertex));
+ }
+ Sphere sph = get_sphere(points.cbegin(), points.cend());
+#if CGAL_VERSION_NR >= 1050000000
+ if(exact_) CGAL::exact(sph.second);
+#endif
+ radius = std::sqrt(cast_to_fv(sph.second));
+
+ sc_ptr_->assign_key(sh, cc_ptr_->get_cache().size());
+ cc_ptr_->get_cache().push_back(std::move(sph));
+ }
+
#ifdef DEBUG_TRACES
if (radius > cc_ptr_->max_radius()) std::clog << "radius > max_radius => expansion is blocked\n";
#endif // DEBUG_TRACES
@@ -65,11 +135,13 @@ class Cech_blocker {
}
/** \internal \brief Čech complex blocker constructor. */
- Cech_blocker(SimplicialComplexForCech* sc_ptr, Cech_complex* cc_ptr) : sc_ptr_(sc_ptr), cc_ptr_(cc_ptr) {}
+ Cech_blocker(SimplicialComplexForCech* sc_ptr, Cech_complex* cc_ptr, const bool exact) : sc_ptr_(sc_ptr), cc_ptr_(cc_ptr), exact_(exact) {}
private:
SimplicialComplexForCech* sc_ptr_;
Cech_complex* cc_ptr_;
+ Kernel kernel_;
+ const bool exact_;
};
} // namespace cech_complex
diff --git a/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT b/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT
deleted file mode 100644
index dbe4c553..00000000
--- a/src/Cech_complex/include/gudhi/Miniball.COPYRIGHT
+++ /dev/null
@@ -1,4 +0,0 @@
-The miniball software is available under the GNU General Public License (GPLv3 - https://www.gnu.org/copyleft/gpl.html).
-If your intended use is not compliant with this license, please buy a commercial license (EUR 500 - https://people.inf.ethz.ch/gaertner/subdir/software/miniball/license.html).
-You need a license if the software that you develop using Miniball V3.0 is not open source.
-
diff --git a/src/Cech_complex/include/gudhi/Miniball.README b/src/Cech_complex/include/gudhi/Miniball.README
deleted file mode 100644
index 033d8953..00000000
--- a/src/Cech_complex/include/gudhi/Miniball.README
+++ /dev/null
@@ -1,26 +0,0 @@
-https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html
-
-Smallest Enclosing Balls of Points - Fast and Robust in C++.
-(high-quality software for smallest enclosing balls of balls is available in the computational geometry algorithms library CGAL)
-
-
-This is the miniball software (V3.0) for computing smallest enclosing balls of points in arbitrary dimensions. It consists of a C++ header file Miniball.hpp (around 500 lines of code) and two example programs miniball_example.cpp and miniball_example_containers.cpp that demonstrate the usage. The first example stores the coordinates of the input points in a two-dimensional array, the second example uses a list of vectors to show how generic containers can be used.
-
-Credits: Aditya Gupta and Alexandros Konstantinakis-Karmis have significantly contributed to this version of the software.
-
-Changes - https://people.inf.ethz.ch/gaertner/subdir/software/miniball/changes.txt - from previous versions.
-
-The theory - https://people.inf.ethz.ch/gaertner/subdir/texts/own_work/esa99_final.pdf - behind the miniball software (Proc. 7th Annual European Symposium on Algorithms (ESA), Lecture Notes in Computer Science 1643, Springer-Verlag, pp.325-338, 1999).
-
-Main Features:
-
- Very fast in low dimensions. 1 million points in 5-space are processed within 0.05 seconds on any recent machine.
-
- High numerical stability. Almost all input degeneracies (cospherical points, multiple points, points very close together) are routinely handled.
-
- Easily integrates into your code. You can freely choose the coordinate type of your points and the container to store the points. If you still need to adapt the code, the header is small and readable and contains documentation for all major methods.
-
-
-Changes done for the GUDHI version of MiniBall:
- - Add include guard
- - Move Miniball namespace inside a new Gudhi namespace
diff --git a/src/Cech_complex/include/gudhi/Miniball.hpp b/src/Cech_complex/include/gudhi/Miniball.hpp
deleted file mode 100644
index 55387a8a..00000000
--- a/src/Cech_complex/include/gudhi/Miniball.hpp
+++ /dev/null
@@ -1,523 +0,0 @@
-// Copyright (C) 1999-2013, Bernd Gaertner
-// $Rev: 3581 $
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU General Public License as published by
-// the Free Software Foundation, either version 3 of the License, or
-// (at your option) any later version.
-
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU General Public License for more details.
-
-// You should have received a copy of the GNU General Public License
-// along with this program. If not, see <http://www.gnu.org/licenses/>.
-//
-// Contact:
-// --------
-// Bernd Gaertner
-// Institute of Theoretical Computer Science
-// ETH Zuerich
-// CAB G31.1
-// CH-8092 Zuerich, Switzerland
-// http://www.inf.ethz.ch/personal/gaertner
-
-#ifndef MINIBALL_HPP_
-#define MINIBALL_HPP_
-
-#include <cassert>
-#include <algorithm>
-#include <list>
-#include <ctime>
-#include <limits>
-
-namespace Gudhi {
-
-namespace Miniball {
-
- // Global Functions
- // ================
- template <typename NT>
- inline NT mb_sqr (NT r) {return r*r;}
-
- // Functors
- // ========
-
- // functor to map a point iterator to the corresponding coordinate iterator;
- // generic version for points whose coordinate containers have begin()
- template < typename Pit_, typename Cit_ >
- struct CoordAccessor {
- typedef Pit_ Pit;
- typedef Cit_ Cit;
- inline Cit operator() (Pit it) const { return (*it).begin(); }
- };
-
- // partial specialization for points whose coordinate containers are arrays
- template < typename Pit_, typename Cit_ >
- struct CoordAccessor<Pit_, Cit_*> {
- typedef Pit_ Pit;
- typedef Cit_* Cit;
- inline Cit operator() (Pit it) const { return *it; }
- };
-
- // Class Declaration
- // =================
-
- template <typename CoordAccessor>
- class Miniball {
- private:
- // types
- // The iterator type to go through the input points
- typedef typename CoordAccessor::Pit Pit;
- // The iterator type to go through the coordinates of a single point.
- typedef typename CoordAccessor::Cit Cit;
- // The coordinate type
- typedef typename std::iterator_traits<Cit>::value_type NT;
- // The iterator to go through the support points
- typedef typename std::list<Pit>::iterator Sit;
-
- // data members...
- const int d; // dimension
- Pit points_begin;
- Pit points_end;
- CoordAccessor coord_accessor;
- double time;
- const NT nt0; // NT(0)
-
- //...for the algorithms
- std::list<Pit> L;
- Sit support_end;
- int fsize; // number of forced points
- int ssize; // number of support points
-
- // ...for the ball updates
- NT* current_c;
- NT current_sqr_r;
- NT** c;
- NT* sqr_r;
-
- // helper arrays
- NT* q0;
- NT* z;
- NT* f;
- NT** v;
- NT** a;
-
- public:
- // The iterator type to go through the support points
- typedef typename std::list<Pit>::const_iterator SupportPointIterator;
-
- // PRE: [begin, end) is a nonempty range
- // POST: computes the smallest enclosing ball of the points in the range
- // [begin, end); the functor a maps a point iterator to an iterator
- // through the d coordinates of the point
- Miniball (int d_, Pit begin, Pit end, CoordAccessor ca = CoordAccessor());
-
- // POST: returns a pointer to the first element of an array that holds
- // the d coordinates of the center of the computed ball
- const NT* center () const;
-
- // POST: returns the squared radius of the computed ball
- NT squared_radius () const;
-
- // POST: returns the number of support points of the computed ball;
- // the support points form a minimal set with the same smallest
- // enclosing ball as the input set; in particular, the support
- // points are on the boundary of the computed ball, and their
- // number is at most d+1
- int nr_support_points () const;
-
- // POST: returns an iterator to the first support point
- SupportPointIterator support_points_begin () const;
-
- // POST: returns a past-the-end iterator for the range of support points
- SupportPointIterator support_points_end () const;
-
- // POST: returns the maximum excess of any input point w.r.t. the computed
- // ball, divided by the squared radius of the computed ball. The
- // excess of a point is the difference between its squared distance
- // from the center and the squared radius; Ideally, the return value
- // is 0. subopt is set to the absolute value of the most negative
- // coefficient in the affine combination of the support points that
- // yields the center. Ideally, this is a convex combination, and there
- // is no negative coefficient in which case subopt is set to 0.
- NT relative_error (NT& subopt) const;
-
- // POST: return true if the relative error is at most tol, and the
- // suboptimality is 0; the default tolerance is 10 times the
- // coordinate type's machine epsilon
- bool is_valid (NT tol = NT(10) * std::numeric_limits<NT>::epsilon()) const;
-
- // POST: returns the time in seconds taken by the constructor call for
- // computing the smallest enclosing ball
- double get_time() const;
-
- // POST: deletes dynamically allocated arrays
- ~Miniball();
-
- private:
- void mtf_mb (Sit n);
- void mtf_move_to_front (Sit j);
- void pivot_mb (Pit n);
- void pivot_move_to_front (Pit j);
- NT excess (Pit pit) const;
- void pop ();
- bool push (Pit pit);
- NT suboptimality () const;
- void create_arrays();
- void delete_arrays();
- };
-
- // Class Definition
- // ================
- template <typename CoordAccessor>
- Miniball<CoordAccessor>::Miniball (int d_, Pit begin, Pit end,
- CoordAccessor ca)
- : d (d_),
- points_begin (begin),
- points_end (end),
- coord_accessor (ca),
- time (clock()),
- nt0 (NT(0)),
- L(),
- support_end (L.begin()),
- fsize(0),
- ssize(0),
- current_c (NULL),
- current_sqr_r (NT(-1)),
- c (NULL),
- sqr_r (NULL),
- q0 (NULL),
- z (NULL),
- f (NULL),
- v (NULL),
- a (NULL)
- {
- assert (points_begin != points_end);
- create_arrays();
-
- // set initial center
- for (int j=0; j<d; ++j) c[0][j] = nt0;
- current_c = c[0];
-
- // compute miniball
- pivot_mb (points_end);
-
- // update time
- time = (clock() - time) / CLOCKS_PER_SEC;
- }
-
- template <typename CoordAccessor>
- Miniball<CoordAccessor>::~Miniball()
- {
- delete_arrays();
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::create_arrays()
- {
- c = new NT*[d+1];
- v = new NT*[d+1];
- a = new NT*[d+1];
- for (int i=0; i<d+1; ++i) {
- c[i] = new NT[d];
- v[i] = new NT[d];
- a[i] = new NT[d];
- }
- sqr_r = new NT[d+1];
- q0 = new NT[d];
- z = new NT[d+1];
- f = new NT[d+1];
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::delete_arrays()
- {
- delete[] f;
- delete[] z;
- delete[] q0;
- delete[] sqr_r;
- for (int i=0; i<d+1; ++i) {
- delete[] a[i];
- delete[] v[i];
- delete[] c[i];
- }
- delete[] a;
- delete[] v;
- delete[] c;
- }
-
- template <typename CoordAccessor>
- const typename Miniball<CoordAccessor>::NT*
- Miniball<CoordAccessor>::center () const
- {
- return current_c;
- }
-
- template <typename CoordAccessor>
- typename Miniball<CoordAccessor>::NT
- Miniball<CoordAccessor>::squared_radius () const
- {
- return current_sqr_r;
- }
-
- template <typename CoordAccessor>
- int Miniball<CoordAccessor>::nr_support_points () const
- {
- assert (ssize < d+2);
- return ssize;
- }
-
- template <typename CoordAccessor>
- typename Miniball<CoordAccessor>::SupportPointIterator
- Miniball<CoordAccessor>::support_points_begin () const
- {
- return L.begin();
- }
-
- template <typename CoordAccessor>
- typename Miniball<CoordAccessor>::SupportPointIterator
- Miniball<CoordAccessor>::support_points_end () const
- {
- return support_end;
- }
-
- template <typename CoordAccessor>
- typename Miniball<CoordAccessor>::NT
- Miniball<CoordAccessor>::relative_error (NT& subopt) const
- {
- NT e, max_e = nt0;
- // compute maximum absolute excess of support points
- for (SupportPointIterator it = support_points_begin();
- it != support_points_end(); ++it) {
- e = excess (*it);
- if (e < nt0) e = -e;
- if (e > max_e) {
- max_e = e;
- }
- }
- // compute maximum excess of any point
- for (Pit i = points_begin; i != points_end; ++i)
- if ((e = excess (i)) > max_e)
- max_e = e;
-
- subopt = suboptimality();
- assert (current_sqr_r > nt0 || max_e == nt0);
- return (current_sqr_r == nt0 ? nt0 : max_e / current_sqr_r);
- }
-
- template <typename CoordAccessor>
- bool Miniball<CoordAccessor>::is_valid (NT tol) const
- {
- NT suboptimality;
- return ( (relative_error (suboptimality) <= tol) && (suboptimality == 0) );
- }
-
- template <typename CoordAccessor>
- double Miniball<CoordAccessor>::get_time() const
- {
- return time;
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::mtf_mb (Sit n)
- {
- // Algorithm 1: mtf_mb (L_{n-1}, B), where L_{n-1} = [L.begin, n)
- // B: the set of forced points, defining the current ball
- // S: the superset of support points computed by the algorithm
- // --------------------------------------------------------------
- // from B. Gaertner, Fast and Robust Smallest Enclosing Balls, ESA 1999,
- // http://www.inf.ethz.ch/personal/gaertner/texts/own_work/esa99_final.pdf
-
- // PRE: B = S
- assert (fsize == ssize);
-
- support_end = L.begin();
- if ((fsize) == d+1) return;
-
- // incremental construction
- for (Sit i = L.begin(); i != n;)
- {
- // INV: (support_end - L.begin() == |S|-|B|)
- assert (std::distance (L.begin(), support_end) == ssize - fsize);
-
- Sit j = i++;
- if (excess(*j) > nt0)
- if (push(*j)) { // B := B + p_i
- mtf_mb (j); // mtf_mb (L_{i-1}, B + p_i)
- pop(); // B := B - p_i
- mtf_move_to_front(j);
- }
- }
- // POST: the range [L.begin(), support_end) stores the set S\B
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::mtf_move_to_front (Sit j)
- {
- if (support_end == j)
- support_end++;
- L.splice (L.begin(), L, j);
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::pivot_mb (Pit n)
- {
- // Algorithm 2: pivot_mb (L_{n-1}), where L_{n-1} = [L.begin, n)
- // --------------------------------------------------------------
- // from B. Gaertner, Fast and Robust Smallest Enclosing Balls, ESA 1999,
- // http://www.inf.ethz.ch/personal/gaertner/texts/own_work/esa99_final.pdf
- NT old_sqr_r;
- const NT* c;
- Pit pivot, k;
- NT e, max_e, sqr_r;
- Cit p;
- do {
- old_sqr_r = current_sqr_r;
- sqr_r = current_sqr_r;
-
- pivot = points_begin;
- max_e = nt0;
- for (k = points_begin; k != n; ++k) {
- p = coord_accessor(k);
- e = -sqr_r;
- c = current_c;
- for (int j=0; j<d; ++j)
- e += mb_sqr<NT>(*p++-*c++);
- if (e > max_e) {
- max_e = e;
- pivot = k;
- }
- }
-
- if (max_e > nt0) {
- // check if the pivot is already contained in the support set
- if (std::find(L.begin(), support_end, pivot) == support_end) {
- assert (fsize == 0);
- if (push (pivot)) {
- mtf_mb(support_end);
- pop();
- pivot_move_to_front(pivot);
- }
- }
- }
- } while (old_sqr_r < current_sqr_r);
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::pivot_move_to_front (Pit j)
- {
- L.push_front(j);
- if (std::distance(L.begin(), support_end) == d+2)
- support_end--;
- }
-
- template <typename CoordAccessor>
- inline typename Miniball<CoordAccessor>::NT
- Miniball<CoordAccessor>::excess (Pit pit) const
- {
- Cit p = coord_accessor(pit);
- NT e = -current_sqr_r;
- NT* c = current_c;
- for (int k=0; k<d; ++k){
- e += mb_sqr<NT>(*p++-*c++);
- }
- return e;
- }
-
- template <typename CoordAccessor>
- void Miniball<CoordAccessor>::pop ()
- {
- --fsize;
- }
-
- template <typename CoordAccessor>
- bool Miniball<CoordAccessor>::push (Pit pit)
- {
- int i, j;
- NT eps = mb_sqr<NT>(std::numeric_limits<NT>::epsilon());
-
- Cit cit = coord_accessor(pit);
- Cit p = cit;
-
- if (fsize==0) {
- for (i=0; i<d; ++i)
- q0[i] = *p++;
- for (i=0; i<d; ++i)
- c[0][i] = q0[i];
- sqr_r[0] = nt0;
- }
- else {
- // set v_fsize to Q_fsize
- for (i=0; i<d; ++i)
- //v[fsize][i] = p[i]-q0[i];
- v[fsize][i] = *p++-q0[i];
-
- // compute the a_{fsize,i}, i< fsize
- for (i=1; i<fsize; ++i) {
- a[fsize][i] = nt0;
- for (j=0; j<d; ++j)
- a[fsize][i] += v[i][j] * v[fsize][j];
- a[fsize][i]*=(2/z[i]);
- }
-
- // update v_fsize to Q_fsize-\bar{Q}_fsize
- for (i=1; i<fsize; ++i) {
- for (j=0; j<d; ++j)
- v[fsize][j] -= a[fsize][i]*v[i][j];
- }
-
- // compute z_fsize
- z[fsize]=nt0;
- for (j=0; j<d; ++j)
- z[fsize] += mb_sqr<NT>(v[fsize][j]);
- z[fsize]*=2;
-
- // reject push if z_fsize too small
- if (z[fsize]<eps*current_sqr_r) {
- return false;
- }
-
- // update c, sqr_r
- p=cit;
- NT e = -sqr_r[fsize-1];
- for (i=0; i<d; ++i)
- e += mb_sqr<NT>(*p++-c[fsize-1][i]);
- f[fsize]=e/z[fsize];
-
- for (i=0; i<d; ++i)
- c[fsize][i] = c[fsize-1][i]+f[fsize]*v[fsize][i];
- sqr_r[fsize] = sqr_r[fsize-1] + e*f[fsize]/2;
- }
- current_c = c[fsize];
- current_sqr_r = sqr_r[fsize];
- ssize = ++fsize;
- return true;
- }
-
- template <typename CoordAccessor>
- typename Miniball<CoordAccessor>::NT
- Miniball<CoordAccessor>::suboptimality () const
- {
- NT* l = new NT[d+1];
- NT min_l = nt0;
- l[0] = NT(1);
- for (int i=ssize-1; i>0; --i) {
- l[i] = f[i];
- for (int k=ssize-1; k>i; --k)
- l[i]-=a[k][i]*l[k];
- if (l[i] < min_l) min_l = l[i];
- l[0] -= l[i];
- }
- if (l[0] < min_l) min_l = l[0];
- delete[] l;
- if (min_l < nt0)
- return -min_l;
- return nt0;
- }
-} // namespace Miniball
-
-} // namespace Gudhi
-
-#endif // MINIBALL_HPP_
diff --git a/src/Cech_complex/include/gudhi/Sphere_circumradius.h b/src/Cech_complex/include/gudhi/Sphere_circumradius.h
new file mode 100644
index 00000000..790f6950
--- /dev/null
+++ b/src/Cech_complex/include/gudhi/Sphere_circumradius.h
@@ -0,0 +1,65 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Hind Montassif
+ *
+ * Copyright (C) 2021 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef SPHERE_CIRCUMRADIUS_H_
+#define SPHERE_CIRCUMRADIUS_H_
+
+#include <CGAL/Epick_d.h> // for #include <CGAL/NT_converter.h> which is not working/compiling alone
+
+#include <cmath> // for std::sqrt
+#include <vector>
+
+namespace Gudhi {
+
+namespace cech_complex {
+
+/** \private @brief Compute the circumradius of the sphere passing through points given by a range of coordinates.
+ * The points are assumed to have the same dimension. */
+template<typename Kernel, typename Filtration_value>
+class Sphere_circumradius {
+ private:
+ Kernel kernel_;
+ public:
+ using FT = typename Kernel::FT;
+ using Point = typename Kernel::Point_d;
+ using Point_cloud = typename std::vector<Point>;
+
+ CGAL::NT_converter<FT, Filtration_value> cast_to_fv;
+
+ /** \brief Circumradius of sphere passing through two points using CGAL.
+ *
+ * @param[in] point_1
+ * @param[in] point_2
+ * @return Sphere circumradius passing through two points.
+ * \tparam Point must be a Kernel::Point_d from CGAL.
+ *
+ */
+ Filtration_value operator()(const Point& point_1, const Point& point_2) const {
+ return std::sqrt(cast_to_fv(kernel_.squared_distance_d_object()(point_1, point_2))) / 2.;
+ }
+
+ /** \brief Circumradius of sphere passing through point cloud using CGAL.
+ *
+ * @param[in] point_cloud The points.
+ * @return Sphere circumradius passing through the points.
+ * \tparam Point_cloud must be a range of Kernel::Point_d points from CGAL.
+ *
+ */
+ Filtration_value operator()(const Point_cloud& point_cloud) const {
+ return std::sqrt(cast_to_fv(kernel_.compute_squared_radius_d_object()(point_cloud.begin(), point_cloud.end())));
+ }
+
+};
+
+} // namespace cech_complex
+
+} // namespace Gudhi
+
+#endif // SPHERE_CIRCUMRADIUS_H_
diff --git a/src/Cech_complex/test/CMakeLists.txt b/src/Cech_complex/test/CMakeLists.txt
index e6a2a18f..2d736f27 100644
--- a/src/Cech_complex/test/CMakeLists.txt
+++ b/src/Cech_complex/test/CMakeLists.txt
@@ -1,11 +1,14 @@
-include(GUDHI_boost_test)
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1)
+ include(GUDHI_boost_test)
-add_executable ( Cech_complex_test_unit test_cech_complex.cpp )
-if (TBB_FOUND)
- target_link_libraries(Cech_complex_test_unit ${TBB_LIBRARIES})
-endif()
+ add_executable ( Cech_complex_test_unit test_cech_complex.cpp )
+ if (TBB_FOUND)
+ target_link_libraries(Cech_complex_test_unit ${TBB_LIBRARIES})
+ endif()
+
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
-# Do not forget to copy test files in current binary dir
-file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ gudhi_add_boost_test(Cech_complex_test_unit)
-gudhi_add_boost_test(Cech_complex_test_unit)
+endif()
diff --git a/src/Cech_complex/test/test_cech_complex.cpp b/src/Cech_complex/test/test_cech_complex.cpp
index 6e00d7b5..f5980e6d 100644
--- a/src/Cech_complex/test/test_cech_complex.cpp
+++ b/src/Cech_complex/test/test_cech_complex.cpp
@@ -22,21 +22,20 @@
// to construct Cech_complex from a OFF file of points
#include <gudhi/Points_off_io.h>
#include <gudhi/Simplex_tree.h>
-#include <gudhi/distance_functions.h>
#include <gudhi/Unitary_tests_utils.h>
-#include <gudhi/Miniball.hpp>
+
+#include <CGAL/Epeck_d.h> // For EXACT or SAFE version
// Type definitions
using Simplex_tree = Gudhi::Simplex_tree<>;
using Filtration_value = Simplex_tree::Filtration_value;
-using Point = std::vector<Filtration_value>;
+using Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>;
+using FT = typename Kernel::FT;
+using Point = typename Kernel::Point_d;
+
using Point_cloud = std::vector<Point>;
using Points_off_reader = Gudhi::Points_off_reader<Point>;
-using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
-
-using Point_iterator = Point_cloud::const_iterator;
-using Coordinate_iterator = Point::const_iterator;
-using Min_sphere = Gudhi::Miniball::Miniball<Gudhi::Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
+using Cech_complex = Gudhi::cech_complex::Cech_complex<Kernel, Simplex_tree>;
BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
// ----------------------------------------------------------------------------
@@ -45,17 +44,29 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
//
// ----------------------------------------------------------------------------
Point_cloud points;
- points.push_back({1., 0.}); // 0
- points.push_back({0., 1.}); // 1
- points.push_back({2., 1.}); // 2
- points.push_back({3., 2.}); // 3
- points.push_back({0., 3.}); // 4
- points.push_back({3. + std::sqrt(3.), 3.}); // 5
- points.push_back({1., 4.}); // 6
- points.push_back({3., 4.}); // 7
- points.push_back({2., 4. + std::sqrt(3.)}); // 8
- points.push_back({0., 4.}); // 9
- points.push_back({-0.5, 2.}); // 10
+
+ std::vector<FT> point0({1., 0.});
+ points.emplace_back(point0.begin(), point0.end());
+ std::vector<FT> point1({0., 1.});
+ points.emplace_back(point1.begin(), point1.end());
+ std::vector<FT> point2({2., 1.});
+ points.emplace_back(point2.begin(), point2.end());
+ std::vector<FT> point3({3., 2.});
+ points.emplace_back(point3.begin(), point3.end());
+ std::vector<FT> point4({0., 3.});
+ points.emplace_back(point4.begin(), point4.end());
+ std::vector<FT> point5({3. + std::sqrt(3.), 3.});
+ points.emplace_back(point5.begin(), point5.end());
+ std::vector<FT> point6({1., 4.});
+ points.emplace_back(point6.begin(), point6.end());
+ std::vector<FT> point7({3., 4.});
+ points.emplace_back(point7.begin(), point7.end());
+ std::vector<FT> point8({2., 4. + std::sqrt(3.)});
+ points.emplace_back(point8.begin(), point8.end());
+ std::vector<FT> point9({0., 4.});
+ points.emplace_back(point9.begin(), point9.end());
+ std::vector<FT> point10({-0.5, 2.});
+ points.emplace_back(point10.begin(), point10.end());
Filtration_value max_radius = 1.0;
std::clog << "========== NUMBER OF POINTS = " << points.size() << " - Cech max_radius = " << max_radius
@@ -96,11 +107,11 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
std::clog << vertex << ",";
vp.push_back(points.at(vertex));
}
- std::clog << ") - distance =" << Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1))
+ std::clog << ") - distance =" << Gudhi::cech_complex::Sphere_circumradius<Kernel, Filtration_value>()(vp.at(0), vp.at(1))
<< " - filtration =" << st.filtration(f_simplex) << std::endl;
BOOST_CHECK(vp.size() == 2);
GUDHI_TEST_FLOAT_EQUALITY_CHECK(st.filtration(f_simplex),
- Gudhi::Minimal_enclosing_ball_radius()(vp.at(0), vp.at(1)));
+ Gudhi::cech_complex::Sphere_circumradius<Kernel, Filtration_value>()(vp.at(0), vp.at(1)));
}
}
@@ -125,35 +136,34 @@ BOOST_AUTO_TEST_CASE(Cech_complex_for_documentation) {
for (std::size_t vertex = 0; vertex <= 2; vertex++) {
points012.push_back(cech_complex_for_doc.get_point(vertex));
}
- std::size_t dimension = points[0].end() - points[0].begin();
- Min_sphere ms012(dimension, points012.begin(), points012.end());
- Simplex_tree::Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
- std::clog << "f012= " << f012 << " | ms012_radius= " << std::sqrt(ms012.squared_radius()) << std::endl;
+ Kernel kern;
+ Filtration_value f012 = st2.filtration(st2.find({0, 1, 2}));
+ std::clog << "f012= " << f012 << std::endl;
- GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(ms012.squared_radius()));
+ CGAL::NT_converter<FT, Filtration_value> cast_to_fv;
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(f012, std::sqrt(cast_to_fv(kern.compute_squared_radius_d_object()(points012.begin(), points012.end()))));
Point_cloud points1410;
points1410.push_back(cech_complex_for_doc.get_point(1));
points1410.push_back(cech_complex_for_doc.get_point(4));
points1410.push_back(cech_complex_for_doc.get_point(10));
- Min_sphere ms1410(dimension, points1410.begin(), points1410.end());
- Simplex_tree::Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10}));
- std::clog << "f1410= " << f1410 << " | ms1410_radius= " << std::sqrt(ms1410.squared_radius()) << std::endl;
+ Filtration_value f1410 = st2.filtration(st2.find({1, 4, 10}));
+ std::clog << "f1410= " << f1410 << std::endl;
- GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, std::sqrt(ms1410.squared_radius()));
+ // In this case, the computed circumsphere using CGAL kernel does not match the minimal enclosing ball; the filtration value check is therefore done against a hardcoded value
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(f1410, 1.);
Point_cloud points469;
points469.push_back(cech_complex_for_doc.get_point(4));
points469.push_back(cech_complex_for_doc.get_point(6));
points469.push_back(cech_complex_for_doc.get_point(9));
- Min_sphere ms469(dimension, points469.begin(), points469.end());
- Simplex_tree::Filtration_value f469 = st2.filtration(st2.find({4, 6, 9}));
- std::clog << "f469= " << f469 << " | ms469_radius= " << std::sqrt(ms469.squared_radius()) << std::endl;
+ Filtration_value f469 = st2.filtration(st2.find({4, 6, 9}));
+ std::clog << "f469= " << f469 << std::endl;
- GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(ms469.squared_radius()));
+ GUDHI_TEST_FLOAT_EQUALITY_CHECK(f469, std::sqrt(cast_to_fv(kern.compute_squared_radius_d_object()(points469.begin(), points469.end()))));
BOOST_CHECK((st2.find({6, 7, 8}) == st2.null_simplex()));
BOOST_CHECK((st2.find({3, 5, 7}) == st2.null_simplex()));
@@ -235,7 +245,7 @@ BOOST_AUTO_TEST_CASE(Cech_create_complex_throw) {
//
// ----------------------------------------------------------------------------
std::string off_file_name("alphacomplexdoc.off");
- double max_radius = 12.0;
+ Filtration_value max_radius = 12.0;
std::clog << "========== OFF FILE NAME = " << off_file_name << " - Cech max_radius=" << max_radius
<< "==========" << std::endl;
diff --git a/src/Cech_complex/utilities/CMakeLists.txt b/src/Cech_complex/utilities/CMakeLists.txt
index b183c8d8..e80a698e 100644
--- a/src/Cech_complex/utilities/CMakeLists.txt
+++ b/src/Cech_complex/utilities/CMakeLists.txt
@@ -1,15 +1,17 @@
-project(Cech_complex_utilities)
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 5.0.1)
+ project(Cech_complex_utilities)
-if (TARGET Boost::program_options)
- add_executable(cech_persistence cech_persistence.cpp)
- target_link_libraries(cech_persistence Boost::program_options)
+ if (TARGET Boost::program_options)
+ add_executable(cech_persistence cech_persistence.cpp)
+ target_link_libraries(cech_persistence Boost::program_options)
- if (TBB_FOUND)
- target_link_libraries(cech_persistence ${TBB_LIBRARIES})
- endif()
+ if (TBB_FOUND)
+ target_link_libraries(cech_persistence ${TBB_LIBRARIES})
+ endif()
- add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:cech_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
+ add_test(NAME Cech_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:cech_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
- install(TARGETS cech_persistence DESTINATION bin)
-endif() \ No newline at end of file
+ install(TARGETS cech_persistence DESTINATION bin)
+ endif()
+endif()
diff --git a/src/Cech_complex/utilities/cech_persistence.cpp b/src/Cech_complex/utilities/cech_persistence.cpp
index daea08e2..75d10c0f 100644
--- a/src/Cech_complex/utilities/cech_persistence.cpp
+++ b/src/Cech_complex/utilities/cech_persistence.cpp
@@ -9,13 +9,14 @@
*/
#include <gudhi/Cech_complex.h>
-#include <gudhi/distance_functions.h>
#include <gudhi/Simplex_tree.h>
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_off_io.h>
#include <boost/program_options.hpp>
+#include <CGAL/Epeck_d.h> // For EXACT or SAFE version
+
#include <string>
#include <vector>
#include <limits> // infinity
@@ -23,10 +24,11 @@
// Types definition
using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persistence>;
using Filtration_value = Simplex_tree::Filtration_value;
-using Point = std::vector<double>;
-using Point_cloud = std::vector<Point>;
+
+using Kernel = CGAL::Epeck_d<CGAL::Dynamic_dimension_tag>;
+using Point = typename Kernel::Point_d;
using Points_off_reader = Gudhi::Points_off_reader<Point>;
-using Cech_complex = Gudhi::cech_complex::Cech_complex<Simplex_tree, Point_cloud>;
+using Cech_complex = Gudhi::cech_complex::Cech_complex<Kernel, Simplex_tree>;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
diff --git a/src/Doxyfile.in b/src/Doxyfile.in
index 06a74012..6e0e0333 100644
--- a/src/Doxyfile.in
+++ b/src/Doxyfile.in
@@ -152,7 +152,7 @@ FULL_PATH_NAMES = YES
# will be relative from the directory where doxygen is started.
# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-STRIP_FROM_PATH =
+STRIP_FROM_PATH = @CMAKE_SOURCE_DIR@
# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
# path mentioned in the documentation of a class, which tells the reader which
@@ -162,7 +162,8 @@ STRIP_FROM_PATH =
# using the -I flag.
STRIP_FROM_INC_PATH = include \
- concept
+ concept \
+ @CMAKE_SOURCE_DIR@
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
# less readable) file names. This can be useful is your file systems doesn't
@@ -711,7 +712,7 @@ CITE_BIB_FILES = @CMAKE_SOURCE_DIR@/biblio/bibliography.bib \
# messages are off.
# The default value is: NO.
-QUIET = NO
+QUIET = YES
# The WARNINGS tag can be used to turn on/off the warning messages that are
# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
@@ -765,7 +766,7 @@ WARN_FORMAT = "$file:$line: $text"
# messages should be written. If left blank the output is written to standard
# error (stderr).
-WARN_LOGFILE =
+WARN_LOGFILE = doxygen.log
#---------------------------------------------------------------------------
# Configuration options related to the input files
@@ -832,7 +833,7 @@ EXCLUDE = @CMAKE_SOURCE_DIR@/data/ \
@CMAKE_SOURCE_DIR@/ext/ \
@CMAKE_SOURCE_DIR@/README.md \
@CMAKE_SOURCE_DIR@/.github \
- @CMAKE_CURRENT_BINARY_DIR@/new_gudhi_version_creation.md \
+ @CMAKE_CURRENT_BINARY_DIR@ \
@GUDHI_DOXYGEN_SOURCE_PREFIX@/GudhUI/ \
@GUDHI_DOXYGEN_SOURCE_PREFIX@/cmake/ \
@GUDHI_DOXYGEN_SOURCE_PREFIX@/python/
@@ -1117,7 +1118,7 @@ HTML_FOOTER = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/footer.html
# obsolete.
# This tag requires that the tag GENERATE_HTML is set to YES.
-HTML_STYLESHEET = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/stylesheet.css
+HTML_STYLESHEET =
# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
# cascading style sheets that are included after the standard style sheets
@@ -1130,7 +1131,7 @@ HTML_STYLESHEET = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/stylesheet.css
# list). For an example see the documentation.
# This tag requires that the tag GENERATE_HTML is set to YES.
-HTML_EXTRA_STYLESHEET =
+HTML_EXTRA_STYLESHEET = @GUDHI_DOXYGEN_COMMON_DOC_PATH@/stylesheet.css
# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
# other source files which should be copied to the HTML output directory. Note
diff --git a/src/cmake/modules/GUDHI_options.cmake b/src/cmake/modules/GUDHI_options.cmake
index c75b72f5..5e28c87d 100644
--- a/src/cmake/modules/GUDHI_options.cmake
+++ b/src/cmake/modules/GUDHI_options.cmake
@@ -4,3 +4,12 @@ option(WITH_GUDHI_REMOTE_TEST "Activate/deactivate datasets fetching test which
option(WITH_GUDHI_PYTHON "Activate/deactivate python module compilation and installation" ON)
option(WITH_GUDHI_TEST "Activate/deactivate examples compilation and installation" ON)
option(WITH_GUDHI_UTILITIES "Activate/deactivate utilities compilation and installation" ON)
+option(WITH_GUDHI_THIRD_PARTY "Activate/deactivate third party libraries cmake detection. When set to OFF, it is usefull for doxygen or user_version i.e." ON)
+
+if (NOT WITH_GUDHI_THIRD_PARTY)
+ set (WITH_GUDHI_BENCHMARK OFF)
+ set (WITH_GUDHI_EXAMPLE OFF)
+ set (WITH_GUDHI_PYTHON OFF)
+ set (WITH_GUDHI_TEST OFF)
+ set (WITH_GUDHI_UTILITIES OFF)
+endif() \ No newline at end of file
diff --git a/src/cmake/modules/GUDHI_submodules.cmake b/src/cmake/modules/GUDHI_submodules.cmake
new file mode 100644
index 00000000..78b045bd
--- /dev/null
+++ b/src/cmake/modules/GUDHI_submodules.cmake
@@ -0,0 +1,5 @@
+# For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera.
+set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include)
+set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h")
+set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include)
+set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h") \ No newline at end of file
diff --git a/src/cmake/modules/GUDHI_third_party_libraries.cmake b/src/cmake/modules/GUDHI_third_party_libraries.cmake
index 6ba822ad..2cf6787e 100644
--- a/src/cmake/modules/GUDHI_third_party_libraries.cmake
+++ b/src/cmake/modules/GUDHI_third_party_libraries.cmake
@@ -48,12 +48,6 @@ if(CGAL_FOUND)
include( ${CGAL_USE_FILE} )
endif()
-# For those who dislike bundled dependencies, this indicates where to find a preinstalled Hera.
-set(HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/wasserstein/include)
-set(HERA_WASSERSTEIN_INCLUDE_DIR ${HERA_WASSERSTEIN_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's wasserstein.h")
-set(HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR ${CMAKE_SOURCE_DIR}/ext/hera/bottleneck/include)
-set(HERA_BOTTLENECK_INCLUDE_DIR ${HERA_BOTTLENECK_INTERNAL_INCLUDE_DIR} CACHE PATH "Directory where one can find Hera's bottleneck.h")
-
option(WITH_GUDHI_USE_TBB "Build with Intel TBB parallelization" ON)
# Find TBB package for parallel sort - not mandatory, just optional.
diff --git a/src/common/doc/examples.h b/src/common/doc/examples.h
index 556a24f1..1634b19e 100644
--- a/src/common/doc/examples.h
+++ b/src/common/doc/examples.h
@@ -40,7 +40,6 @@
* @example edge_collapse_basic_example.cpp
* \section Cech_complex_example_section Cech_complex
* @example cech_persistence.cpp
- * @example cech_complex_step_by_step.cpp
* @example cech_complex_example_from_points.cpp
* \section Bitmap_cubical_complex_example_section Bitmap_cubical_complex
* @example periodic_cubical_complex_persistence.cpp
diff --git a/src/common/doc/footer.html b/src/common/doc/footer.html
index 4168c6bc..08a2cbd0 100644
--- a/src/common/doc/footer.html
+++ b/src/common/doc/footer.html
@@ -1,5 +1,9 @@
-<!-- HTML footer for doxygen 1.8.6-->
+<!-- HTML footer for doxygen 1.9.4-->
<!-- start footer part -->
+<!--BEGIN GENERATE_TREEVIEW-->
+<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
+<!--END GENERATE_TREEVIEW-->
+<ul>
<table style="width:100%">
<tr class="no-bullet shadow-black">
<td class="network-entypo">
@@ -10,14 +14,15 @@
<!--END PROJECT_NAME-->
</td>
<td class="network-entypo">
-<!--BEGIN GENERATE_TREEVIEW-->
$generatedby
<a href="http://www.doxygen.org/index.html">
Doxygen</a> $doxygenversion
-<!--END GENERATE_TREEVIEW-->
</td>
</tr>
</table>
-
+</ul>
+<!--BEGIN GENERATE_TREEVIEW-->
+</div>
+<!--END GENERATE_TREEVIEW-->
</body>
</html>
diff --git a/src/common/doc/header.html b/src/common/doc/header.html
index 7c20478b..a97e1b2f 100644
--- a/src/common/doc/header.html
+++ b/src/common/doc/header.html
@@ -8,9 +8,6 @@
<meta name="generator" content="Doxygen $doxygenversion"/>
<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
-<!-- GUDHI website css for header BEGIN -->
-<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
-<!-- GUDHI website css for header END -->
<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="$relpath^jquery.js"></script>
<script type="text/javascript" src="$relpath^dynsections.js"></script>
@@ -18,13 +15,17 @@ $treeview
$search
$mathjax
<link href="$relpath^$stylesheet" rel="stylesheet" type="text/css" />
+<!-- GUDHI website css for header BEGIN -->
+<link rel="stylesheet" type="text/css" href="https://gudhi.inria.fr/assets/css/styles_feeling_responsive.css" />
+<!-- GUDHI website css for header END -->
$extrastylesheet
</head>
<body>
+<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<!-- GUDHI website header BEGIN -->
<div id="navigation" class="sticky">
- <nav class="top-bar" role="navigation" data-topbar>
+ <nav class="top-bar" role="navigation" data-topbar="true">
<ul class="title-area">
<li class="name">
<h1 class="show-for-small-only"><a href="" class="icon-tree"> GUDHI library</a></h1>
@@ -38,7 +39,7 @@ $extrastylesheet
<li><a href="/contact/">Contact</a></li>
</ul>
<ul class="left">
- <li><a href="/"> <img src="/assets/img/home.png" alt=" GUDHI"> GUDHI </a></li>
+ <li><a href="/"> <img src="/assets/img/home.png" alt=" GUDHI"/> GUDHI </a></li>
<li class="divider"></li>
<li class="has-dropdown">
<a href="#">Project</a>
@@ -85,7 +86,6 @@ $extrastylesheet
</div><!-- /#navigation -->
<!-- GUDHI website header END -->
-<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<!--BEGIN TITLEAREA-->
<div id="titlearea">
diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h
index 229c9f59..28526498 100644
--- a/src/common/doc/installation.h
+++ b/src/common/doc/installation.h
@@ -43,12 +43,17 @@ make \endverbatim
* Testing fetching datasets feature requires the use of the internet and is disabled by default. If you want to include this test, set WITH_GUDHI_REMOTE_TEST to ON when building in the previous step (note that this test is included in the python module):
* \verbatim cmake -DCMAKE_BUILD_TYPE=Release -DWITH_GUDHI_TEST=ON -DWITH_GUDHI_REMOTE_TEST=ON --DWITH_GUDHI_PYTHON=ON .. \endverbatim
*
- * \subsection documentationgeneration Documentation
- * To generate the documentation, <a target="_blank" href="http://www.doxygen.org/">Doxygen</a> is required.
- * Run the following command in a terminal:
+ * \subsection documentationgeneration C++ documentation
+ * To generate the C++ documentation, the <a target="_blank" href="http://www.doxygen.org/">doxygen</a> program
+ * is required (version &ge; 1.9.3 is advised). Run the following command in a terminal:
* \verbatim make doxygen \endverbatim
* Documentation will be generated in a folder named <code>html</code>.
*
+ * In case there is not a full setup present and only the documentation should be build the following command sequence
+ * can be used:
+\verbatim cmake -DWITH_GUDHI_THIRD_PARTY=OFF ..
+make doxygen\endverbatim
+ *
* \subsection helloworld Hello world !
* The <a target="_blank" href="https://github.com/GUDHI/hello-gudhi-world">Hello world for GUDHI</a>
* project is an example to help developers to make their own C++ project on top of the GUDHI library.
diff --git a/src/common/doc/main_page.md b/src/common/doc/main_page.md
index 4f2f1692..ce903405 100644
--- a/src/common/doc/main_page.md
+++ b/src/common/doc/main_page.md
@@ -180,8 +180,8 @@
<td width="15%">
<b>Author:</b> Vincent Rouvreau<br>
<b>Introduced in:</b> GUDHI 2.2.0<br>
- <b>Copyright:</b> MIT [(GPL v3)](../../licensing/)<br>
- <b>Includes:</b> [Miniball](https://people.inf.ethz.ch/gaertner/subdir/software/miniball.html)<br>
+ <b>Copyright:</b> MIT [(LGPL v3)](../../licensing/)<br>
+ <b>Requires:</b> \ref cgal
</td>
</tr>
<tr>
diff --git a/src/common/doc/stylesheet.css b/src/common/doc/stylesheet.css
index 1df177a4..fb030e1f 100644..100755
--- a/src/common/doc/stylesheet.css
+++ b/src/common/doc/stylesheet.css
@@ -1,1367 +1,28 @@
-/* The standard CSS for doxygen 1.8.6 */
-
-body, table, div, p, dl {
- font: 400 14px/22px Roboto,sans-serif;
-}
-
-/* @group Heading Levels */
-
-h1.groupheader {
- font-size: 150%;
-}
-
-.title {
- font: 400 14px/28px Roboto,sans-serif;
- font-size: 150%;
- font-weight: bold;
- margin: 10px 2px;
-}
-
-h2.groupheader {
- border-bottom: 1px solid #879ECB;
- color: #354C7B;
- font-size: 150%;
- font-weight: normal;
- margin-top: 1.75em;
- padding-top: 8px;
- padding-bottom: 4px;
- width: 100%;
-}
-
-h3.groupheader {
- font-size: 100%;
-}
-
-h1, h2, h3, h4, h5, h6 {
- -webkit-transition: text-shadow 0.5s linear;
- -moz-transition: text-shadow 0.5s linear;
- -ms-transition: text-shadow 0.5s linear;
- -o-transition: text-shadow 0.5s linear;
- transition: text-shadow 0.5s linear;
- margin-right: 15px;
-}
-
-h1.glow, h2.glow, h3.glow, h4.glow, h5.glow, h6.glow {
- text-shadow: 0 0 15px cyan;
-}
-
-dt {
- font-weight: bold;
-}
-
-div.multicol {
- -moz-column-gap: 1em;
- -webkit-column-gap: 1em;
- -moz-column-count: 3;
- -webkit-column-count: 3;
-}
-
-p.startli, p.startdd {
- margin-top: 2px;
-}
-
-p.starttd {
- margin-top: 0px;
-}
-
-p.endli {
- margin-bottom: 0px;
-}
-
-p.enddd {
- margin-bottom: 4px;
-}
-
-p.endtd {
- margin-bottom: 2px;
-}
-
-/* @end */
-
-caption {
- font-weight: bold;
-}
-
-span.legend {
- font-size: 70%;
- text-align: center;
-}
-
-h3.version {
- font-size: 90%;
- text-align: center;
-}
-
-div.qindex, div.navtab{
- background-color: #EBEFF6;
- border: 1px solid #A3B4D7;
- text-align: center;
-}
-
-div.qindex, div.navpath {
- width: 100%;
- line-height: 140%;
-}
-
-div.navtab {
- margin-right: 15px;
-}
-
-/* @group Link Styling */
-
-a {
- color: #3D578C;
- font-weight: normal;
- text-decoration: none;
-}
-
-.contents a:visited {
- color: #4665A2;
-}
-
-a:hover {
- text-decoration: underline;
-}
-
-a.qindex {
- font-weight: bold;
-}
-
-a.qindexHL {
- font-weight: bold;
- background-color: #9CAFD4;
- color: #ffffff;
- border: 1px double #869DCA;
-}
-
-.contents a.qindexHL:visited {
- color: #ffffff;
-}
-
-a.el {
- font-weight: bold;
-}
-
-a.elRef {
-}
-
-a.code, a.code:visited, a.line, a.line:visited {
- color: #4665A2;
-}
-
-a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited {
- color: #4665A2;
-}
-
-/* @end */
-
-dl.el {
- margin-left: -1cm;
-}
-
-pre.fragment {
- border: 1px solid #C4CFE5;
- background-color: #FBFCFD;
- padding: 4px 6px;
- margin: 4px 8px 4px 2px;
- overflow: auto;
- word-wrap: break-word;
- font-size: 9pt;
- line-height: 125%;
- font-family: monospace, fixed;
- font-size: 105%;
-}
-
-div.fragment {
- padding: 4px 6px;
- margin: 4px 8px 4px 2px;
- background-color: #FBFCFD;
- border: 1px solid #C4CFE5;
-}
-
-div.line {
- font-family: monospace, fixed;
- font-size: 13px;
- min-height: 13px;
- line-height: 1.0;
- text-wrap: unrestricted;
- white-space: -moz-pre-wrap; /* Moz */
- white-space: -pre-wrap; /* Opera 4-6 */
- white-space: -o-pre-wrap; /* Opera 7 */
- white-space: pre-wrap; /* CSS3 */
- word-wrap: break-word; /* IE 5.5+ */
- text-indent: -53px;
- padding-left: 53px;
- padding-bottom: 0px;
- margin: 0px;
- -webkit-transition-property: background-color, box-shadow;
- -webkit-transition-duration: 0.5s;
- -moz-transition-property: background-color, box-shadow;
- -moz-transition-duration: 0.5s;
- -ms-transition-property: background-color, box-shadow;
- -ms-transition-duration: 0.5s;
- -o-transition-property: background-color, box-shadow;
- -o-transition-duration: 0.5s;
- transition-property: background-color, box-shadow;
- transition-duration: 0.5s;
-}
-
-div.line.glow {
- background-color: cyan;
- box-shadow: 0 0 10px cyan;
-}
-
-
-span.lineno {
- padding-right: 4px;
- text-align: right;
- border-right: 2px solid #0F0;
- background-color: #E8E8E8;
- white-space: pre;
-}
-span.lineno a {
- background-color: #D8D8D8;
-}
-
-span.lineno a:hover {
- background-color: #C8C8C8;
-}
-
-div.ah {
- background-color: black;
- font-weight: bold;
- color: #ffffff;
- margin-bottom: 3px;
- margin-top: 3px;
- padding: 0.2em;
- border: solid thin #333;
- border-radius: 0.5em;
- -webkit-border-radius: .5em;
- -moz-border-radius: .5em;
- box-shadow: 2px 2px 3px #999;
- -webkit-box-shadow: 2px 2px 3px #999;
- -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px;
- background-image: -webkit-gradient(linear, left top, left bottom, from(#eee), to(#000),color-stop(0.3, #444));
- background-image: -moz-linear-gradient(center top, #eee 0%, #444 40%, #000);
-}
-
-div.groupHeader {
- margin-left: 16px;
- margin-top: 12px;
- font-weight: bold;
-}
-
-div.groupText {
- margin-left: 16px;
- font-style: italic;
-}
-
-body {
- background-color: white;
- color: black;
- margin: 0;
-}
-
-div.contents {
- margin-top: 10px;
- margin-left: 12px;
- margin-right: 8px;
-}
-
-td.indexkey {
- background-color: #EBEFF6;
- font-weight: bold;
- border: 1px solid #C4CFE5;
- margin: 2px 0px 2px 0;
- padding: 2px 10px;
- white-space: nowrap;
- vertical-align: top;
-}
-
-td.indexvalue {
- background-color: #EBEFF6;
- border: 1px solid #C4CFE5;
- padding: 2px 10px;
- margin: 2px 0px;
-}
-
-tr.memlist {
- background-color: #EEF1F7;
-}
-
-p.formulaDsp {
- text-align: center;
-}
-
-img.formulaDsp {
-
-}
-
-img.formulaInl {
- vertical-align: middle;
-}
-
-div.center {
- text-align: center;
- margin-top: 0px;
- margin-bottom: 0px;
- padding: 0px;
-}
-
-div.center img {
- border: 0px;
-}
-
-address.footer {
- text-align: right;
- padding-right: 12px;
-}
-
-img.footer {
- border: 0px;
- vertical-align: middle;
-}
-
-/* @group Code Colorization */
-
-span.keyword {
- color: #008000
-}
-
-span.keywordtype {
- color: #604020
-}
-
-span.keywordflow {
- color: #e08000
-}
-
-span.comment {
- color: #800000
-}
-
-span.preprocessor {
- color: #806020
-}
-
-span.stringliteral {
- color: #002080
-}
-
-span.charliteral {
- color: #008080
-}
-
-span.vhdldigit {
- color: #ff00ff
-}
-
-span.vhdlchar {
- color: #000000
-}
-
-span.vhdlkeyword {
- color: #700070
-}
-
-span.vhdllogic {
- color: #ff0000
-}
-
-blockquote {
- background-color: #F7F8FB;
- border-left: 2px solid #9CAFD4;
- margin: 0 24px 0 4px;
- padding: 0 12px 0 16px;
-}
-
-/* @end */
-
-/*
-.search {
- color: #003399;
- font-weight: bold;
-}
-
-form.search {
- margin-bottom: 0px;
- margin-top: 0px;
-}
-
-input.search {
- font-size: 75%;
- color: #000080;
- font-weight: normal;
- background-color: #e8eef2;
-}
-*/
-
-td.tiny {
- font-size: 75%;
-}
-
-.dirtab {
- padding: 4px;
- border-collapse: collapse;
- border: 1px solid #A3B4D7;
-}
-
-th.dirtab {
- background: #EBEFF6;
- font-weight: bold;
-}
-
-hr {
- height: 0px;
- border: none;
- border-top: 1px solid #4A6AAA;
-}
-
-hr.footer {
- height: 1px;
-}
-
-/* @group Member Descriptions */
-
-table.memberdecls {
- border-spacing: 0px;
- padding: 0px;
-}
-
-.memberdecls td, .fieldtable tr {
- -webkit-transition-property: background-color, box-shadow;
- -webkit-transition-duration: 0.5s;
- -moz-transition-property: background-color, box-shadow;
- -moz-transition-duration: 0.5s;
- -ms-transition-property: background-color, box-shadow;
- -ms-transition-duration: 0.5s;
- -o-transition-property: background-color, box-shadow;
- -o-transition-duration: 0.5s;
- transition-property: background-color, box-shadow;
- transition-duration: 0.5s;
-}
-
-.memberdecls td.glow, .fieldtable tr.glow {
- background-color: cyan;
- box-shadow: 0 0 15px cyan;
-}
-
-.mdescLeft, .mdescRight,
-.memItemLeft, .memItemRight,
-.memTemplItemLeft, .memTemplItemRight, .memTemplParams {
- background-color: #F9FAFC;
- border: none;
- margin: 4px;
- padding: 1px 0 0 8px;
-}
-
-.mdescLeft, .mdescRight {
- padding: 0px 8px 4px 8px;
- color: #555;
-}
-
-.memSeparator {
- border-bottom: 1px solid #DEE4F0;
- line-height: 1px;
- margin: 0px;
- padding: 0px;
-}
-
-.memItemLeft, .memTemplItemLeft {
- white-space: nowrap;
-}
-
-.memItemRight {
- width: 100%;
-}
-
-.memTemplParams {
- color: #4665A2;
- white-space: nowrap;
- font-size: 80%;
-}
-
-/* @end */
-
-/* @group Member Details */
-
-/* Styles for detailed member documentation */
-
-.memtemplate {
- font-size: 80%;
- color: #4665A2;
- font-weight: normal;
- margin-left: 9px;
-}
-
-.memnav {
- background-color: #EBEFF6;
- border: 1px solid #A3B4D7;
- text-align: center;
- margin: 2px;
- margin-right: 15px;
- padding: 2px;
-}
-
-.mempage {
- width: 100%;
-}
-
-.memitem {
- padding: 0;
- margin-bottom: 10px;
- margin-right: 5px;
- -webkit-transition: box-shadow 0.5s linear;
- -moz-transition: box-shadow 0.5s linear;
- -ms-transition: box-shadow 0.5s linear;
- -o-transition: box-shadow 0.5s linear;
- transition: box-shadow 0.5s linear;
- display: table !important;
- width: 100%;
-}
-
-.memitem.glow {
- box-shadow: 0 0 15px cyan;
-}
-
-.memname {
- font-weight: bold;
- margin-left: 6px;
-}
-
-.memname td {
- vertical-align: bottom;
-}
-
-.memproto, dl.reflist dt {
- border-top: 1px solid #A8B8D9;
- border-left: 1px solid #A8B8D9;
- border-right: 1px solid #A8B8D9;
- padding: 6px 0px 6px 0px;
- color: #253555;
- font-weight: bold;
- text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9);
- background-image:url('nav_f.png');
- background-repeat:repeat-x;
- background-color: #E2E8F2;
- /* opera specific markup */
- box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15);
- border-top-right-radius: 4px;
- border-top-left-radius: 4px;
- /* firefox specific markup */
- -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px;
- -moz-border-radius-topright: 4px;
- -moz-border-radius-topleft: 4px;
- /* webkit specific markup */
- -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15);
- -webkit-border-top-right-radius: 4px;
- -webkit-border-top-left-radius: 4px;
-
-}
-
-.memdoc, dl.reflist dd {
- border-bottom: 1px solid #A8B8D9;
- border-left: 1px solid #A8B8D9;
- border-right: 1px solid #A8B8D9;
- padding: 6px 10px 2px 10px;
- background-color: #FBFCFD;
- border-top-width: 0;
- background-image:url('nav_g.png');
- background-repeat:repeat-x;
- background-color: #FFFFFF;
- /* opera specific markup */
- border-bottom-left-radius: 4px;
- border-bottom-right-radius: 4px;
- box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15);
- /* firefox specific markup */
- -moz-border-radius-bottomleft: 4px;
- -moz-border-radius-bottomright: 4px;
- -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px;
- /* webkit specific markup */
- -webkit-border-bottom-left-radius: 4px;
- -webkit-border-bottom-right-radius: 4px;
- -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15);
-}
-
-dl.reflist dt {
- padding: 5px;
-}
-
-dl.reflist dd {
- margin: 0px 0px 10px 0px;
- padding: 5px;
-}
-
-.paramkey {
- text-align: right;
-}
-
-.paramtype {
- white-space: nowrap;
-}
-
-.paramname {
- color: #602020;
- white-space: nowrap;
-}
-.paramname em {
- font-style: normal;
-}
-.paramname code {
- line-height: 14px;
-}
-
-.params, .retval, .exception, .tparams {
- margin-left: 0px;
- padding-left: 0px;
-}
-
-.params .paramname, .retval .paramname {
- font-weight: bold;
- vertical-align: top;
-}
-
-.params .paramtype {
- font-style: italic;
- vertical-align: top;
-}
-
-.params .paramdir {
- font-family: "courier new",courier,monospace;
- vertical-align: top;
-}
-
-table.mlabels {
- border-spacing: 0px;
-}
-
-td.mlabels-left {
- width: 100%;
- padding: 0px;
-}
-
-td.mlabels-right {
- vertical-align: bottom;
- padding: 0px;
- white-space: nowrap;
-}
-
-span.mlabels {
- margin-left: 8px;
-}
-
-span.mlabel {
- background-color: #728DC1;
- border-top:1px solid #5373B4;
- border-left:1px solid #5373B4;
- border-right:1px solid #C4CFE5;
- border-bottom:1px solid #C4CFE5;
- text-shadow: none;
- color: white;
- margin-right: 4px;
- padding: 2px 3px;
- border-radius: 3px;
- font-size: 7pt;
- white-space: nowrap;
- vertical-align: middle;
-}
-
-
-
-/* @end */
-
-/* these are for tree view when not used as main index */
-
-div.directory {
- margin: 10px 0px;
- border-top: 1px solid #A8B8D9;
- border-bottom: 1px solid #A8B8D9;
- width: 100%;
-}
-
-.directory table {
- border-collapse:collapse;
-}
-
-.directory td {
- margin: 0px;
- padding: 0px;
- vertical-align: top;
-}
-
-.directory td.entry {
- white-space: nowrap;
- padding-right: 6px;
- padding-top: 3px;
-}
-
-.directory td.entry a {
- outline:none;
-}
-
-.directory td.entry a img {
- border: none;
-}
-
-.directory td.desc {
- width: 100%;
- padding-left: 6px;
- padding-right: 6px;
- padding-top: 3px;
- border-left: 1px solid rgba(0,0,0,0.05);
-}
-
-.directory tr.even {
- padding-left: 6px;
- background-color: #F7F8FB;
-}
-
-.directory img {
- vertical-align: -30%;
-}
-
-.directory .levels {
- white-space: nowrap;
- width: 100%;
- text-align: right;
- font-size: 9pt;
-}
-
-.directory .levels span {
- cursor: pointer;
- padding-left: 2px;
- padding-right: 2px;
- color: #3D578C;
-}
-
-div.dynheader {
- margin-top: 8px;
- -webkit-touch-callout: none;
- -webkit-user-select: none;
- -khtml-user-select: none;
- -moz-user-select: none;
- -ms-user-select: none;
- user-select: none;
-}
-
-address {
- font-style: normal;
- color: #2A3D61;
-}
-
-table.doxtable {
- border-collapse:collapse;
- margin-top: 4px;
- margin-bottom: 4px;
-}
-
-table.doxtable td, table.doxtable th {
- border: 1px solid #2D4068;
- padding: 3px 7px 2px;
-}
-
-table.doxtable th {
- background-color: #374F7F;
- color: #FFFFFF;
- font-size: 110%;
- padding-bottom: 4px;
- padding-top: 5px;
-}
-
-table.fieldtable {
- /*width: 100%;*/
- margin-bottom: 10px;
- border: 1px solid #A8B8D9;
- border-spacing: 0px;
- -moz-border-radius: 4px;
- -webkit-border-radius: 4px;
- border-radius: 4px;
- -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px;
- -webkit-box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15);
- box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15);
-}
-
-.fieldtable td, .fieldtable th {
- padding: 3px 7px 2px;
-}
-
-.fieldtable td.fieldtype, .fieldtable td.fieldname {
- white-space: nowrap;
- border-right: 1px solid #A8B8D9;
- border-bottom: 1px solid #A8B8D9;
- vertical-align: top;
-}
-
-.fieldtable td.fieldname {
- padding-top: 3px;
-}
-
-.fieldtable td.fielddoc {
- border-bottom: 1px solid #A8B8D9;
- /*width: 100%;*/
-}
-
-.fieldtable td.fielddoc p:first-child {
- margin-top: 0px;
-}
-
-.fieldtable td.fielddoc p:last-child {
- margin-bottom: 2px;
-}
-
-.fieldtable tr:last-child td {
- border-bottom: none;
-}
-
-.fieldtable th {
- background-image:url('nav_f.png');
- background-repeat:repeat-x;
- background-color: #E2E8F2;
- font-size: 90%;
- color: #253555;
- padding-bottom: 4px;
- padding-top: 5px;
- text-align:left;
- -moz-border-radius-topleft: 4px;
- -moz-border-radius-topright: 4px;
- -webkit-border-top-left-radius: 4px;
- -webkit-border-top-right-radius: 4px;
- border-top-left-radius: 4px;
- border-top-right-radius: 4px;
- border-bottom: 1px solid #A8B8D9;
-}
-
-
-.tabsearch {
- top: 0px;
- left: 10px;
- height: 36px;
- background-image: url('tab_b.png');
- z-index: 101;
- overflow: hidden;
- font-size: 13px;
-}
-
-.navpath ul
-{
- font-size: 11px;
- background-image:url('tab_b.png');
- background-repeat:repeat-x;
- background-position: 0 -5px;
- height:30px;
- line-height:30px;
- color:#8AA0CC;
- border:solid 1px #C2CDE4;
- overflow:hidden;
- margin:0px;
- padding:0px;
-}
-
-.navpath li
-{
- list-style-type:none;
- float:left;
- padding-left:10px;
- padding-right:15px;
- background-image:url('bc_s.png');
- background-repeat:no-repeat;
- background-position:right;
- color:#364D7C;
-}
-
-.navpath li.navelem a
-{
- height:32px;
- display:block;
- text-decoration: none;
- outline: none;
- color: #283A5D;
- font-family: 'Lucida Grande',Geneva,Helvetica,Arial,sans-serif;
- text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9);
- text-decoration: none;
-}
-
-.navpath li.navelem a:hover
-{
- color:#6884BD;
-}
-
-.navpath li.footer
-{
- list-style-type:none;
- float:right;
- padding-left:10px;
- padding-right:15px;
- background-image:none;
- background-repeat:no-repeat;
- background-position:right;
- color:#364D7C;
- font-size: 8pt;
-}
-
-
-div.summary
-{
- float: right;
- font-size: 8pt;
- padding-right: 5px;
- width: 50%;
- text-align: right;
-}
-
-div.summary a
-{
- white-space: nowrap;
-}
-
-div.ingroups
-{
- font-size: 8pt;
- width: 50%;
- text-align: left;
-}
-
-div.ingroups a
-{
- white-space: nowrap;
-}
-
-div.header
-{
- background-image:url('nav_h.png');
- background-repeat:repeat-x;
- background-color: #F9FAFC;
- margin: 0px;
- border-bottom: 1px solid #C4CFE5;
-}
-
-div.headertitle
-{
- padding: 5px 5px 5px 10px;
-}
-
-dl
-{
- padding: 0 0 0 10px;
-}
-
-/* dl.note, dl.warning, dl.attention, dl.pre, dl.post, dl.invariant, dl.deprecated, dl.todo, dl.test, dl.bug */
-dl.section
-{
- margin-left: 0px;
- padding-left: 0px;
-}
-
-dl.note
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #D0C000;
-}
-
-dl.warning, dl.attention
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #FF0000;
-}
-
-dl.pre, dl.post, dl.invariant
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #00D000;
-}
-
-dl.deprecated
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #505050;
-}
-
-dl.todo
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #00C0E0;
-}
-
-dl.test
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #3030E0;
-}
-
-dl.bug
-{
- margin-left:-7px;
- padding-left: 3px;
- border-left:4px solid;
- border-color: #C08050;
-}
-
-dl.section dd {
- margin-bottom: 6px;
-}
-
-
-#projectlogo
-{
- text-align: center;
- vertical-align: bottom;
- border-collapse: separate;
-}
-
-#projectlogo img
-{
- border: 0px none;
-}
-
#projectname
{
- border: 0px none;
- font: 300% Tahoma, Arial,sans-serif;
- margin: 0px;
- padding: 2px 0px;
+ border: 0px none;
}
-
#projectbrief
{
- font: 60% Tahoma, Arial,sans-serif;
- margin: 0px;
- padding: 0px;
+ font: 60% Tahoma, Arial,sans-serif;
}
-
#projectnumber
{
- font: 80% Tahoma, Arial,sans-serif;
- margin: 0px;
- padding: 0px;
-}
-
-#titlearea
-{
- padding: 0px;
- margin: 0px;
- width: 100%;
- border-bottom: 1px solid #5373B4;
-}
-
-.image
-{
- text-align: center;
-}
-
-.dotgraph
-{
- text-align: center;
+ font: 80% Tahoma, Arial,sans-serif;
}
-
-.mscgraph
+.arrow
{
- text-align: center;
-}
-
-.diagraph
-{
- text-align: center;
-}
-
-.caption
-{
- font-weight: bold;
-}
-
-div.zoom
-{
- border: 1px solid #90A5CE;
-}
-
-dl.citelist {
- margin-bottom:50px;
-}
-
-dl.citelist dt {
- color:#334975;
- float:left;
- font-weight:bold;
- margin-right:10px;
- padding:5px;
-}
-
-dl.citelist dd {
- margin:2px 0;
- padding:5px 0;
-}
-
-div.toc {
- padding: 14px 25px;
- background-color: #F4F6FA;
- border: 1px solid #D8DFEE;
- border-radius: 7px 7px 7px 7px;
- float: right;
- height: auto;
- margin: 0 20px 10px 10px;
- width: 200px;
-}
-
-div.toc li {
- background: url("bdwn.png") no-repeat scroll 0 5px transparent;
- font: 10px/1.2 Verdana,DejaVu Sans,Geneva,sans-serif;
- margin-top: 5px;
- padding-left: 10px;
- padding-top: 2px;
-}
-
-div.toc h3 {
- font: bold 12px/1.2 Arial,FreeSans,sans-serif;
- color: #4665A2;
- border-bottom: 0 none;
- margin: 0;
-}
-
-div.toc ul {
- list-style: none outside none;
- border: medium none;
- padding: 0px;
-}
-
-div.toc li.level1 {
- margin-left: 0px;
-}
-
-div.toc li.level2 {
- margin-left: 15px;
-}
-
-div.toc li.level3 {
- margin-left: 30px;
-}
-
-div.toc li.level4 {
- margin-left: 45px;
-}
-
-.inherit_header {
- font-weight: bold;
- color: gray;
- cursor: pointer;
- -webkit-touch-callout: none;
- -webkit-user-select: none;
- -khtml-user-select: none;
- -moz-user-select: none;
- -ms-user-select: none;
- user-select: none;
-}
-
-.inherit_header td {
- padding: 6px 0px 2px 5px;
-}
-
-.inherit {
- display: none;
-}
-
-tr.heading h2 {
- margin-top: 12px;
- margin-bottom: 4px;
-}
-
-/* tooltip related style info */
-
-.ttc {
- position: absolute;
- display: none;
-}
-
-#powerTip {
- cursor: default;
- white-space: nowrap;
- background-color: white;
- border: 1px solid gray;
- border-radius: 4px 4px 4px 4px;
- box-shadow: 1px 1px 7px gray;
- display: none;
- font-size: smaller;
- max-width: 80%;
- opacity: 0.9;
- padding: 1ex 1em 1em;
- position: absolute;
- z-index: 2147483647;
+ width: auto;
+ height: auto;
+ padding-left: 16px;
}
-
-#powerTip div.ttdoc {
- color: grey;
- font-style: italic;
-}
-
-#powerTip div.ttname a {
- font-weight: bold;
-}
-
-#powerTip div.ttname {
- font-weight: bold;
-}
-
-#powerTip div.ttdeci {
- color: #006318;
-}
-
-#powerTip div {
- margin: 0px;
- padding: 0px;
- font: 12px/16px Roboto,sans-serif;
-}
-
-#powerTip:before, #powerTip:after {
- content: "";
- position: absolute;
- margin: 0px;
-}
-
-#powerTip.n:after, #powerTip.n:before,
-#powerTip.s:after, #powerTip.s:before,
-#powerTip.w:after, #powerTip.w:before,
-#powerTip.e:after, #powerTip.e:before,
-#powerTip.ne:after, #powerTip.ne:before,
-#powerTip.se:after, #powerTip.se:before,
-#powerTip.nw:after, #powerTip.nw:before,
-#powerTip.sw:after, #powerTip.sw:before {
- border: solid transparent;
- content: " ";
- height: 0;
- width: 0;
- position: absolute;
-}
-
-#powerTip.n:after, #powerTip.s:after,
-#powerTip.w:after, #powerTip.e:after,
-#powerTip.nw:after, #powerTip.ne:after,
-#powerTip.sw:after, #powerTip.se:after {
- border-color: rgba(255, 255, 255, 0);
-}
-
-#powerTip.n:before, #powerTip.s:before,
-#powerTip.w:before, #powerTip.e:before,
-#powerTip.nw:before, #powerTip.ne:before,
-#powerTip.sw:before, #powerTip.se:before {
- border-color: rgba(128, 128, 128, 0);
-}
-
-#powerTip.n:after, #powerTip.n:before,
-#powerTip.ne:after, #powerTip.ne:before,
-#powerTip.nw:after, #powerTip.nw:before {
- top: 100%;
-}
-
-#powerTip.n:after, #powerTip.ne:after, #powerTip.nw:after {
- border-top-color: #ffffff;
- border-width: 10px;
- margin: 0px -10px;
-}
-#powerTip.n:before {
- border-top-color: #808080;
- border-width: 11px;
- margin: 0px -11px;
-}
-#powerTip.n:after, #powerTip.n:before {
- left: 50%;
-}
-
-#powerTip.nw:after, #powerTip.nw:before {
- right: 14px;
-}
-
-#powerTip.ne:after, #powerTip.ne:before {
- left: 14px;
-}
-
-#powerTip.s:after, #powerTip.s:before,
-#powerTip.se:after, #powerTip.se:before,
-#powerTip.sw:after, #powerTip.sw:before {
- bottom: 100%;
-}
-
-#powerTip.s:after, #powerTip.se:after, #powerTip.sw:after {
- border-bottom-color: #ffffff;
- border-width: 10px;
- margin: 0px -10px;
-}
-
-#powerTip.s:before, #powerTip.se:before, #powerTip.sw:before {
- border-bottom-color: #808080;
- border-width: 11px;
- margin: 0px -11px;
-}
-
-#powerTip.s:after, #powerTip.s:before {
- left: 50%;
-}
-
-#powerTip.sw:after, #powerTip.sw:before {
- right: 14px;
-}
-
-#powerTip.se:after, #powerTip.se:before {
- left: 14px;
-}
-
-#powerTip.e:after, #powerTip.e:before {
- left: 100%;
-}
-#powerTip.e:after {
- border-left-color: #ffffff;
- border-width: 10px;
- top: 50%;
- margin-top: -10px;
-}
-#powerTip.e:before {
- border-left-color: #808080;
- border-width: 11px;
- top: 50%;
- margin-top: -11px;
-}
-
-#powerTip.w:after, #powerTip.w:before {
- right: 100%;
-}
-#powerTip.w:after {
- border-right-color: #ffffff;
- border-width: 10px;
- top: 50%;
- margin-top: -10px;
-}
-#powerTip.w:before {
- border-right-color: #808080;
- border-width: 11px;
- top: 50%;
- margin-top: -11px;
-}
-
-@media print
-{
- #top { display: none; }
- #side-nav { display: none; }
- #nav-path { display: none; }
- body { overflow:visible; }
- h1, h2, h3, h4, h5, h6 { page-break-after: avoid; }
- .summary { display: none; }
- .memitem { page-break-inside: avoid; }
- #doc-content
- {
- margin-left:0 !important;
- height:auto !important;
- width:auto !important;
- overflow:inherit;
- display:inline;
- }
+// With the doxygen versions <= 1.9.2 the default setting 'overflow: hidden;' causes problems.
+// With the commit:
+// Commit: 590198b416cd53313d150428d2f912586065ea0d [590198b]
+// Date: Wednesday, December 1, 2021 1:37:26 PM
+// issue #8924 Horizontal scroll bar missing in HTML for wide class="dotgraph" objects
+// for the doxygen 1.9.3 version this has already been corrected but to run properly with the <= 1.9.2 version
+// this setting is required
+ul {
+ overflow: visible;
}
-
diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h
index 9bbc62b7..5e5a1e31 100644
--- a/src/common/include/gudhi/distance_functions.h
+++ b/src/common/include/gudhi/distance_functions.h
@@ -13,8 +13,6 @@
#include <gudhi/Debug_utils.h>
-#include <gudhi/Miniball.hpp>
-
#include <boost/range/metafunctions.hpp>
#include <boost/range/size.hpp>
@@ -59,53 +57,6 @@ class Euclidean_distance {
}
};
-/** @brief Compute the radius of the minimal enclosing ball between Points given by a range of coordinates.
- * The points are assumed to have the same dimension. */
-class Minimal_enclosing_ball_radius {
- public:
- /** \brief Minimal_enclosing_ball_radius from two points.
- *
- * @param[in] point_1 First point.
- * @param[in] point_2 second point.
- * @return The minimal enclosing ball radius for the two points (aka. Euclidean distance / 2.).
- *
- * \tparam Point must be a range of Cartesian coordinates.
- *
- */
- template< typename Point >
- typename std::iterator_traits<typename boost::range_iterator<Point>::type>::value_type
- operator()(const Point& point_1, const Point& point_2) const {
- return Euclidean_distance()(point_1, point_2) / 2.;
- }
- /** \brief Minimal_enclosing_ball_radius from a point cloud.
- *
- * @param[in] point_cloud The points.
- * @return The minimal enclosing ball radius for the points.
- *
- * \tparam Point_cloud must be a range of points with Cartesian coordinates.
- * Point_cloud is a range over a range of Coordinate.
- *
- */
- template< typename Point_cloud,
- typename Point_iterator = typename boost::range_const_iterator<Point_cloud>::type,
- typename Point = typename std::iterator_traits<Point_iterator>::value_type,
- typename Coordinate_iterator = typename boost::range_const_iterator<Point>::type,
- typename Coordinate = typename std::iterator_traits<Coordinate_iterator>::value_type>
- Coordinate
- operator()(const Point_cloud& point_cloud) const {
- using Min_sphere = Miniball::Miniball<Miniball::CoordAccessor<Point_iterator, Coordinate_iterator>>;
-
- Min_sphere ms(boost::size(*point_cloud.begin()), point_cloud.begin(), point_cloud.end());
-#ifdef DEBUG_TRACES
- std::clog << "Minimal_enclosing_ball_radius = " << std::sqrt(ms.squared_radius()) << " | nb points = "
- << boost::size(point_cloud) << " | dimension = "
- << boost::size(*point_cloud.begin()) << std::endl;
-#endif // DEBUG_TRACES
-
- return std::sqrt(ms.squared_radius());
- }
-};
-
} // namespace Gudhi
#endif // DISTANCE_FUNCTIONS_H_
diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt
index 63a2e006..5f323935 100644
--- a/src/python/CMakeLists.txt
+++ b/src/python/CMakeLists.txt
@@ -70,6 +70,7 @@ if(PYTHONINTERP_FOUND)
set(GUDHI_PYTHON_MODULES "${GUDHI_PYTHON_MODULES}'euclidean_strong_witness_complex', ")
# Modules that should not be auto-imported in __init__.py
set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'representations', ")
+ set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'tensorflow', ")
set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'wasserstein', ")
set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'point_cloud', ")
set(GUDHI_PYTHON_MODULES_EXTRA "${GUDHI_PYTHON_MODULES_EXTRA}'weighted_rips_complex', ")
@@ -289,7 +290,8 @@ if(PYTHONINTERP_FOUND)
# Other .py files
file(COPY "gudhi/persistence_graphical_tools.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
file(COPY "gudhi/representations" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi/")
- file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
+ file(COPY "gudhi/wasserstein" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
+ file(COPY "gudhi/tensorflow" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
file(COPY "gudhi/point_cloud" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
file(COPY "gudhi/clustering" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi" FILES_MATCHING PATTERN "*.py")
file(COPY "gudhi/weighted_rips_complex.py" DESTINATION "${CMAKE_CURRENT_BINARY_DIR}/gudhi")
@@ -563,6 +565,11 @@ if(PYTHONINTERP_FOUND)
add_gudhi_py_test(test_representations)
endif()
+ # Differentiation
+ if(TENSORFLOW_FOUND)
+ add_gudhi_py_test(test_diff)
+ endif()
+
# Betti curves
if(SKLEARN_FOUND AND SCIPY_FOUND)
add_gudhi_py_test(test_betti_curve_representations)
diff --git a/src/python/doc/cubical_complex_sum.inc b/src/python/doc/cubical_complex_sum.inc
index e2fd55bb..f1cf25d4 100644
--- a/src/python/doc/cubical_complex_sum.inc
+++ b/src/python/doc/cubical_complex_sum.inc
@@ -10,6 +10,9 @@
| * :doc:`cubical_complex_user` | * :doc:`cubical_complex_ref` |
| | * :doc:`periodic_cubical_complex_ref` |
+--------------------------------------------------------------------------+--------------------------------------------------------------+-------------------------------------------------------------+
+ | | * :doc:`cubical_complex_tflow_itf_ref` | :requires: `TensorFlow <installation.html#tensorflow>`_ |
+ | | | |
+ +--------------------------------------------------------------------------+--------------------------------------------------------------+-------------------------------------------------------------+
| .. image:: | * :doc:`cubical_complex_sklearn_itf_ref` | :Requires: `Scikit-learn <installation.html#scikit-learn>`_ |
| img/sklearn.png | | |
| :target: https://scikit-learn.org | | |
diff --git a/src/python/doc/cubical_complex_tflow_itf_ref.rst b/src/python/doc/cubical_complex_tflow_itf_ref.rst
new file mode 100644
index 00000000..b32f5e47
--- /dev/null
+++ b/src/python/doc/cubical_complex_tflow_itf_ref.rst
@@ -0,0 +1,40 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+TensorFlow layer for cubical persistence
+########################################
+
+.. include:: differentiation_sum.inc
+
+Example of gradient computed from cubical persistence
+-----------------------------------------------------
+
+.. testcode::
+
+ from gudhi.tensorflow import CubicalLayer
+ import tensorflow as tf
+
+ X = tf.Variable([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=tf.float32, trainable=True)
+ cl = CubicalLayer(homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = cl.call(X)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+
+ grads = tape.gradient(loss, [X])
+ print(grads[0].numpy())
+
+.. testoutput::
+
+ [[ 0. 0. 0. ]
+ [ 0. 0.5 0. ]
+ [ 0. 0. -0.5]]
+
+Documentation for CubicalLayer
+------------------------------
+
+.. autoclass:: gudhi.tensorflow.CubicalLayer
+ :members:
+ :special-members: __init__
+ :show-inheritance:
diff --git a/src/python/doc/differentiation_sum.inc b/src/python/doc/differentiation_sum.inc
new file mode 100644
index 00000000..3aec33df
--- /dev/null
+++ b/src/python/doc/differentiation_sum.inc
@@ -0,0 +1,12 @@
+.. list-table::
+ :widths: 40 30 30
+ :header-rows: 0
+
+ * - :Since: GUDHI 3.5.0
+ - :License: MIT
+ - :Requires: `TensorFlow <installation.html#tensorflow>`_
+
+We provide TensorFlow 2 models that can handle automatic differentiation for the computation of persistence diagrams from complexes available in the Gudhi library.
+This includes simplex trees, cubical complexes and Vietoris-Rips complexes. Detailed example on how to use these layers in practice are available
+in the following `notebook <https://github.com/GUDHI/TDA-tutorial/blob/master/Tuto-GUDHI-optimization.ipynb>`_. Note that even if TensorFlow GPU is enabled, all
+internal computations using Gudhi will be done on CPU.
diff --git a/src/python/doc/installation.rst b/src/python/doc/installation.rst
index cff84691..4eefd415 100644
--- a/src/python/doc/installation.rst
+++ b/src/python/doc/installation.rst
@@ -175,7 +175,7 @@ A complete configuration would be :
Scikit-learn version 1.0.1
POT version 0.8.0
HNSWlib found
- PyKeOps version [pyKeOps]: 1.5
+ PyKeOps version [pyKeOps]: 2.1
EagerPy version 0.30.0
TensorFlow version 2.7.0
Sphinx version 4.3.0
@@ -396,7 +396,11 @@ mathematics, science, and engineering.
TensorFlow
----------
-`TensorFlow <https://www.tensorflow.org>`_ is currently only used in some automatic differentiation tests.
+The :doc:`cubical complex </cubical_complex_tflow_itf_ref>`, :doc:`simplex tree </ls_simplex_tree_tflow_itf_ref>`
+and :doc:`Rips complex </rips_complex_tflow_itf_ref>` modules require `TensorFlow <https://www.tensorflow.org>`_
+for incorporating them in neural nets.
+
+`TensorFlow <https://www.tensorflow.org>`_ is also used in some automatic differentiation tests.
Bug reports and contributions
*****************************
diff --git a/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst
new file mode 100644
index 00000000..9d7d633f
--- /dev/null
+++ b/src/python/doc/ls_simplex_tree_tflow_itf_ref.rst
@@ -0,0 +1,53 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+TensorFlow layer for lower-star persistence on simplex trees
+############################################################
+
+.. include:: differentiation_sum.inc
+
+Example of gradient computed from lower-star filtration of a simplex tree
+-------------------------------------------------------------------------
+
+.. testcode::
+
+ from gudhi.tensorflow import LowerStarSimplexTreeLayer
+ import tensorflow as tf
+ import gudhi as gd
+
+ st = gd.SimplexTree()
+ st.insert([0, 1])
+ st.insert([1, 2])
+ st.insert([2, 3])
+ st.insert([3, 4])
+ st.insert([4, 5])
+ st.insert([5, 6])
+ st.insert([6, 7])
+ st.insert([7, 8])
+ st.insert([8, 9])
+ st.insert([9, 10])
+
+ F = tf.Variable([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=tf.float32, trainable=True)
+ sl = LowerStarSimplexTreeLayer(simplextree=st, homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = sl.call(F)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+
+ grads = tape.gradient(loss, [F])
+ print(grads[0].indices.numpy())
+ print(grads[0].values.numpy())
+
+.. testoutput::
+
+ [2 4]
+ [-1. 1.]
+
+Documentation for LowerStarSimplexTreeLayer
+-------------------------------------------
+
+.. autoclass:: gudhi.tensorflow.LowerStarSimplexTreeLayer
+ :members:
+ :special-members: __init__
+ :show-inheritance:
diff --git a/src/python/doc/rips_complex_sum.inc b/src/python/doc/rips_complex_sum.inc
index 2cb24990..6931ebee 100644
--- a/src/python/doc/rips_complex_sum.inc
+++ b/src/python/doc/rips_complex_sum.inc
@@ -11,4 +11,7 @@
| | | |
+----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+
| * :doc:`rips_complex_user` | * :doc:`rips_complex_ref` |
- +----------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+
+ | | * :doc:`rips_complex_tflow_itf_ref` | :requires: `TensorFlow <installation.html#tensorflow>`_ |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+----------------------------------------------------------------------------------+
diff --git a/src/python/doc/rips_complex_tflow_itf_ref.rst b/src/python/doc/rips_complex_tflow_itf_ref.rst
new file mode 100644
index 00000000..3ce75868
--- /dev/null
+++ b/src/python/doc/rips_complex_tflow_itf_ref.rst
@@ -0,0 +1,48 @@
+:orphan:
+
+.. To get rid of WARNING: document isn't included in any toctree
+
+TensorFlow layer for Vietoris-Rips persistence
+##############################################
+
+.. include:: differentiation_sum.inc
+
+Example of gradient computed from Vietoris-Rips persistence
+-----------------------------------------------------------
+
+.. testsetup::
+
+ import numpy
+ numpy.set_printoptions(precision=4)
+
+.. testcode::
+
+ from gudhi.tensorflow import RipsLayer
+ import tensorflow as tf
+
+ X = tf.Variable([[1.,1.],[2.,2.]], dtype=tf.float32, trainable=True)
+ rl = RipsLayer(maximum_edge_length=2., homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = rl.call(X)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+
+ grads = tape.gradient(loss, [X])
+ print(grads[0].numpy())
+
+.. testcleanup::
+
+ numpy.set_printoptions(precision=8)
+
+.. testoutput::
+
+ [[-0.5 -0.5]
+ [ 0.5 0.5]]
+
+Documentation for RipsLayer
+---------------------------
+
+.. autoclass:: gudhi.tensorflow.RipsLayer
+ :members:
+ :special-members: __init__
+ :show-inheritance:
diff --git a/src/python/doc/simplex_tree_sum.inc b/src/python/doc/simplex_tree_sum.inc
index a8858f16..3ad1292c 100644
--- a/src/python/doc/simplex_tree_sum.inc
+++ b/src/python/doc/simplex_tree_sum.inc
@@ -1,13 +1,16 @@
.. table::
:widths: 30 40 30
- +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+
- | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria |
- | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | |
- | :alt: Simplex tree representation | | :Since: GUDHI 2.0.0 |
- | :figclass: align-center | The data structure is described in | |
- | | :cite:`boissonnatmariasimplextreealgorithmica` | :License: MIT |
- | | | |
- +----------------------------------------------------------------+------------------------------------------------------------------------+-----------------------------+
- | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` |
- +----------------------------------------------------------------+------------------------------------------------------------------------------------------------------+
+ +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+
+ | .. figure:: | The simplex tree is an efficient and flexible data structure for | :Author: Clément Maria |
+ | ../../doc/Simplex_tree/Simplex_tree_representation.png | representing general (filtered) simplicial complexes. | |
+ | :alt: Simplex tree representation | | :Since: GUDHI 2.0.0 |
+ | :figclass: align-center | The data structure is described in | |
+ | | :cite:`boissonnatmariasimplextreealgorithmica` | :License: MIT |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+
+ | * :doc:`simplex_tree_user` | * :doc:`simplex_tree_ref` |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+
+ | | * :doc:`ls_simplex_tree_tflow_itf_ref` | :requires: `TensorFlow <installation.html#tensorflow>`_ |
+ | | | |
+ +----------------------------------------------------------------+------------------------------------------------------------------------+---------------------------------------------------------+
diff --git a/src/python/gudhi/simplex_tree.pyx b/src/python/gudhi/simplex_tree.pyx
index 521a7763..05bfe22e 100644
--- a/src/python/gudhi/simplex_tree.pyx
+++ b/src/python/gudhi/simplex_tree.pyx
@@ -487,9 +487,9 @@ cdef class SimplexTree:
otherwise it is kept. The algorithm then proceeds with the next candidate.
.. warning::
- Several candidates of the same dimension may be inserted simultaneously before calling `block_simplex`, so
- if you examine the complex in `block_simplex`, you may hit a few simplices of the same dimension that have
- not been vetted by `block_simplex` yet, or have already been rejected but not yet removed.
+ Several candidates of the same dimension may be inserted simultaneously before calling `blocker_func`, so
+ if you examine the complex in `blocker_func`, you may hit a few simplices of the same dimension that have
+ not been vetted by `blocker_func` yet, or have already been rejected but not yet removed.
:param max_dim: Expansion maximal dimension value.
:type max_dim: int
diff --git a/src/python/gudhi/tensorflow/__init__.py b/src/python/gudhi/tensorflow/__init__.py
new file mode 100644
index 00000000..1599cf52
--- /dev/null
+++ b/src/python/gudhi/tensorflow/__init__.py
@@ -0,0 +1,5 @@
+from .cubical_layer import CubicalLayer
+from .lower_star_simplex_tree_layer import LowerStarSimplexTreeLayer
+from .rips_layer import RipsLayer
+
+__all__ = ["LowerStarSimplexTreeLayer", "RipsLayer", "CubicalLayer"]
diff --git a/src/python/gudhi/tensorflow/cubical_layer.py b/src/python/gudhi/tensorflow/cubical_layer.py
new file mode 100644
index 00000000..3304e719
--- /dev/null
+++ b/src/python/gudhi/tensorflow/cubical_layer.py
@@ -0,0 +1,82 @@
+import numpy as np
+import tensorflow as tf
+from ..cubical_complex import CubicalComplex
+
+######################
+# Cubical filtration #
+######################
+
+# The parameters of the model are the pixel values.
+
+def _Cubical(Xflat, Xdim, dimensions, homology_coeff_field):
+ # Parameters: Xflat (flattened image),
+ # Xdim (shape of non-flattened image)
+ # dimensions (homology dimensions)
+
+ # Compute the persistence pairs with Gudhi
+ # We reverse the dimensions because CubicalComplex uses Fortran ordering
+ cc = CubicalComplex(dimensions=Xdim[::-1], top_dimensional_cells=Xflat)
+ cc.compute_persistence(homology_coeff_field=homology_coeff_field)
+
+ # Retrieve and ouput image indices/pixels corresponding to positive and negative simplices
+ cof_pp = cc.cofaces_of_persistence_pairs()
+
+ L_cofs = []
+ for dim in dimensions:
+
+ try:
+ cof = cof_pp[0][dim]
+ except IndexError:
+ cof = np.array([])
+
+ L_cofs.append(np.array(cof, dtype=np.int32))
+
+ return L_cofs
+
+class CubicalLayer(tf.keras.layers.Layer):
+ """
+ TensorFlow layer for computing the persistent homology of a cubical complex
+ """
+ def __init__(self, homology_dimensions, min_persistence=None, homology_coeff_field=11, **kwargs):
+ """
+ Constructor for the CubicalLayer class
+
+ Parameters:
+ homology_dimensions (List[int]): list of homology dimensions
+ min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions)
+ homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337.
+ """
+ super().__init__(dynamic=True, **kwargs)
+ self.dimensions = homology_dimensions
+ self.min_persistence = min_persistence if min_persistence != None else [0.] * len(self.dimensions)
+ self.hcf = homology_coeff_field
+ assert len(self.min_persistence) == len(self.dimensions)
+
+ def call(self, X):
+ """
+ Compute persistence diagram associated to a cubical complex filtered by some pixel values
+
+ Parameters:
+ X (TensorFlow variable): pixel values of the cubical complex
+
+ Returns:
+ List[Tuple[tf.Tensor,tf.Tensor]]: List of cubical persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively. Note that the essential part is always empty in cubical persistence diagrams, except in homology dimension zero, where the essential part always contains a single point, with abscissa equal to the smallest value in the complex, and infinite ordinate
+ """
+ # Compute pixels associated to positive and negative simplices
+ # Don't compute gradient for this operation
+ Xflat = tf.reshape(X, [-1])
+ Xdim, Xflat_numpy = X.shape, Xflat.numpy()
+ indices_list = _Cubical(Xflat_numpy, Xdim, self.dimensions, self.hcf)
+ index_essential = np.argmin(Xflat_numpy) # index of minimum pixel value for essential persistence diagram
+ # Get persistence diagram by simply picking the corresponding entries in the image
+ self.dgms = []
+ for idx_dim, dimension in enumerate(self.dimensions):
+ finite_dgm = tf.reshape(tf.gather(Xflat, indices_list[idx_dim]), [-1,2])
+ essential_dgm = tf.reshape(tf.gather(Xflat, index_essential), [-1,1]) if dimension == 0 else tf.zeros([0, 1])
+ min_pers = self.min_persistence[idx_dim]
+ if min_pers >= 0:
+ persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers)
+ self.dgms.append((tf.reshape(tf.gather(finite_dgm, indices=persistent_indices), [-1,2]), essential_dgm))
+ else:
+ self.dgms.append((finite_dgm, essential_dgm))
+ return self.dgms
diff --git a/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py
new file mode 100644
index 00000000..5a8e5b75
--- /dev/null
+++ b/src/python/gudhi/tensorflow/lower_star_simplex_tree_layer.py
@@ -0,0 +1,87 @@
+import numpy as np
+import tensorflow as tf
+
+#########################################
+# Lower star filtration on simplex tree #
+#########################################
+
+# The parameters of the model are the vertex function values of the simplex tree.
+
+def _LowerStarSimplexTree(simplextree, filtration, dimensions, homology_coeff_field):
+ # Parameters: simplextree (simplex tree on which to compute persistence)
+ # filtration (function values on the vertices of st),
+ # dimensions (homology dimensions),
+ # homology_coeff_field (homology field coefficient)
+
+ simplextree.reset_filtration(-np.inf, 0)
+
+ # Assign new filtration values
+ for i in range(simplextree.num_vertices()):
+ simplextree.assign_filtration([i], filtration[i])
+ simplextree.make_filtration_non_decreasing()
+
+ # Compute persistence diagram
+ simplextree.compute_persistence(homology_coeff_field=homology_coeff_field)
+
+ # Get vertex pairs for optimization. First, get all simplex pairs
+ pairs = simplextree.lower_star_persistence_generators()
+
+ L_indices = []
+ for dimension in dimensions:
+
+ finite_pairs = pairs[0][dimension] if len(pairs[0]) >= dimension+1 else np.empty(shape=[0,2])
+ essential_pairs = pairs[1][dimension] if len(pairs[1]) >= dimension+1 else np.empty(shape=[0,1])
+
+ finite_indices = np.array(finite_pairs.flatten(), dtype=np.int32)
+ essential_indices = np.array(essential_pairs.flatten(), dtype=np.int32)
+
+ L_indices.append((finite_indices, essential_indices))
+
+ return L_indices
+
+class LowerStarSimplexTreeLayer(tf.keras.layers.Layer):
+ """
+ TensorFlow layer for computing lower-star persistence out of a simplex tree
+ """
+ def __init__(self, simplextree, homology_dimensions, min_persistence=None, homology_coeff_field=11, **kwargs):
+ """
+ Constructor for the LowerStarSimplexTreeLayer class
+
+ Parameters:
+ simplextree (gudhi.SimplexTree): underlying simplex tree. Its vertices MUST be named with integers from 0 to n-1, where n is its number of vertices. Note that its filtration values are modified in each call of the class.
+ homology_dimensions (List[int]): list of homology dimensions
+ min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions)
+ homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337.
+ """
+ super().__init__(dynamic=True, **kwargs)
+ self.dimensions = homology_dimensions
+ self.simplextree = simplextree
+ self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))]
+ self.hcf = homology_coeff_field
+ assert len(self.min_persistence) == len(self.dimensions)
+
+ def call(self, filtration):
+ """
+ Compute lower-star persistence diagram associated to a function defined on the vertices of the simplex tree
+
+ Parameters:
+ F (TensorFlow variable): filter function values over the vertices of the simplex tree. The ith entry of F corresponds to vertex i in self.simplextree
+
+ Returns:
+ List[Tuple[tf.Tensor,tf.Tensor]]: List of lower-star persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively
+ """
+ # Don't try to compute gradients for the vertex pairs
+ indices = _LowerStarSimplexTree(self.simplextree, filtration.numpy(), self.dimensions, self.hcf)
+ # Get persistence diagrams
+ self.dgms = []
+ for idx_dim, dimension in enumerate(self.dimensions):
+ finite_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][0]), [-1,2])
+ essential_dgm = tf.reshape(tf.gather(filtration, indices[idx_dim][1]), [-1,1])
+ min_pers = self.min_persistence[idx_dim]
+ if min_pers >= 0:
+ persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers)
+ self.dgms.append((tf.reshape(tf.gather(finite_dgm, indices=persistent_indices),[-1,2]), essential_dgm))
+ else:
+ self.dgms.append((finite_dgm, essential_dgm))
+ return self.dgms
+
diff --git a/src/python/gudhi/tensorflow/rips_layer.py b/src/python/gudhi/tensorflow/rips_layer.py
new file mode 100644
index 00000000..2a73472c
--- /dev/null
+++ b/src/python/gudhi/tensorflow/rips_layer.py
@@ -0,0 +1,93 @@
+import numpy as np
+import tensorflow as tf
+from ..rips_complex import RipsComplex
+
+############################
+# Vietoris-Rips filtration #
+############################
+
+# The parameters of the model are the point coordinates.
+
+def _Rips(DX, max_edge, dimensions, homology_coeff_field):
+ # Parameters: DX (distance matrix),
+ # max_edge (maximum edge length for Rips filtration),
+ # dimensions (homology dimensions)
+
+ # Compute the persistence pairs with Gudhi
+ rc = RipsComplex(distance_matrix=DX, max_edge_length=max_edge)
+ st = rc.create_simplex_tree(max_dimension=max(dimensions)+1)
+ st.compute_persistence(homology_coeff_field=homology_coeff_field)
+ pairs = st.flag_persistence_generators()
+
+ L_indices = []
+ for dimension in dimensions:
+
+ if dimension == 0:
+ finite_pairs = pairs[0]
+ essential_pairs = pairs[2]
+ else:
+ finite_pairs = pairs[1][dimension-1] if len(pairs[1]) >= dimension else np.empty(shape=[0,4])
+ essential_pairs = pairs[3][dimension-1] if len(pairs[3]) >= dimension else np.empty(shape=[0,2])
+
+ finite_indices = np.array(finite_pairs.flatten(), dtype=np.int32)
+ essential_indices = np.array(essential_pairs.flatten(), dtype=np.int32)
+
+ L_indices.append((finite_indices, essential_indices))
+
+ return L_indices
+
+class RipsLayer(tf.keras.layers.Layer):
+ """
+ TensorFlow layer for computing Rips persistence out of a point cloud
+ """
+ def __init__(self, homology_dimensions, maximum_edge_length=np.inf, min_persistence=None, homology_coeff_field=11, **kwargs):
+ """
+ Constructor for the RipsLayer class
+
+ Parameters:
+ maximum_edge_length (float): maximum edge length for the Rips complex
+ homology_dimensions (List[int]): list of homology dimensions
+ min_persistence (List[float]): minimum distance-to-diagonal of the points in the output persistence diagrams (default None, in which case 0. is used for all dimensions)
+ homology_coeff_field (int): homology field coefficient. Must be a prime number. Default value is 11. Max is 46337.
+ """
+ super().__init__(dynamic=True, **kwargs)
+ self.max_edge = maximum_edge_length
+ self.dimensions = homology_dimensions
+ self.min_persistence = min_persistence if min_persistence != None else [0. for _ in range(len(self.dimensions))]
+ self.hcf = homology_coeff_field
+ assert len(self.min_persistence) == len(self.dimensions)
+
+ def call(self, X):
+ """
+ Compute Rips persistence diagram associated to a point cloud
+
+ Parameters:
+ X (TensorFlow variable): point cloud of shape [number of points, number of dimensions]
+
+ Returns:
+ List[Tuple[tf.Tensor,tf.Tensor]]: List of Rips persistence diagrams. The length of this list is the same than that of dimensions, i.e., there is one persistence diagram per homology dimension provided in the input list dimensions. Moreover, the finite and essential parts of the persistence diagrams are provided separately: each element of this list is a tuple of size two that contains the finite and essential parts of the corresponding persistence diagram, of shapes [num_finite_points, 2] and [num_essential_points, 1] respectively
+ """
+ # Compute distance matrix
+ DX = tf.norm(tf.expand_dims(X, 1)-tf.expand_dims(X, 0), axis=2)
+ # Compute vertices associated to positive and negative simplices
+ # Don't compute gradient for this operation
+ indices = _Rips(DX.numpy(), self.max_edge, self.dimensions, self.hcf)
+ # Get persistence diagrams by simply picking the corresponding entries in the distance matrix
+ self.dgms = []
+ for idx_dim, dimension in enumerate(self.dimensions):
+ cur_idx = indices[idx_dim]
+ if dimension > 0:
+ finite_dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(cur_idx[0], [-1,2])), [-1,2])
+ essential_dgm = tf.reshape(tf.gather_nd(DX, tf.reshape(cur_idx[1], [-1,2])), [-1,1])
+ else:
+ reshaped_cur_idx = tf.reshape(cur_idx[0], [-1,3])
+ finite_dgm = tf.concat([tf.zeros([reshaped_cur_idx.shape[0],1]), tf.reshape(tf.gather_nd(DX, reshaped_cur_idx[:,1:]), [-1,1])], axis=1)
+ essential_dgm = tf.zeros([cur_idx[1].shape[0],1])
+ min_pers = self.min_persistence[idx_dim]
+ if min_pers >= 0:
+ persistent_indices = tf.where(tf.math.abs(finite_dgm[:,1]-finite_dgm[:,0]) > min_pers)
+ self.dgms.append((tf.reshape(tf.gather(finite_dgm, indices=persistent_indices),[-1,2]), essential_dgm))
+ else:
+ self.dgms.append((finite_dgm, essential_dgm))
+ return self.dgms
+
diff --git a/src/python/test/test_diff.py b/src/python/test/test_diff.py
new file mode 100644
index 00000000..dca001a9
--- /dev/null
+++ b/src/python/test/test_diff.py
@@ -0,0 +1,78 @@
+from gudhi.tensorflow import *
+import numpy as np
+import tensorflow as tf
+import gudhi as gd
+
+def test_rips_diff():
+
+ Xinit = np.array([[1.,1.],[2.,2.]], dtype=np.float32)
+ X = tf.Variable(initial_value=Xinit, trainable=True)
+ rl = RipsLayer(maximum_edge_length=2., homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = rl.call(X)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+ grads = tape.gradient(loss, [X])
+ assert tf.norm(grads[0]-tf.constant([[-.5,-.5],[.5,.5]]),1) <= 1e-6
+
+def test_cubical_diff():
+
+ Xinit = np.array([[0.,2.,2.],[2.,2.,2.],[2.,2.,1.]], dtype=np.float32)
+ X = tf.Variable(initial_value=Xinit, trainable=True)
+ cl = CubicalLayer(homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = cl.call(X)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+ grads = tape.gradient(loss, [X])
+ assert tf.norm(grads[0]-tf.constant([[0.,0.,0.],[0.,.5,0.],[0.,0.,-.5]]),1) <= 1e-6
+
+def test_nonsquare_cubical_diff():
+
+ Xinit = np.array([[-1.,1.,0.],[1.,1.,1.]], dtype=np.float32)
+ X = tf.Variable(initial_value=Xinit, trainable=True)
+ cl = CubicalLayer(homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = cl.call(X)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+ grads = tape.gradient(loss, [X])
+ assert tf.norm(grads[0]-tf.constant([[0.,0.5,-0.5],[0.,0.,0.]]),1) <= 1e-6
+
+def test_st_diff():
+
+ st = gd.SimplexTree()
+ st.insert([0])
+ st.insert([1])
+ st.insert([2])
+ st.insert([3])
+ st.insert([4])
+ st.insert([5])
+ st.insert([6])
+ st.insert([7])
+ st.insert([8])
+ st.insert([9])
+ st.insert([10])
+ st.insert([0, 1])
+ st.insert([1, 2])
+ st.insert([2, 3])
+ st.insert([3, 4])
+ st.insert([4, 5])
+ st.insert([5, 6])
+ st.insert([6, 7])
+ st.insert([7, 8])
+ st.insert([8, 9])
+ st.insert([9, 10])
+
+ Finit = np.array([6.,4.,3.,4.,5.,4.,3.,2.,3.,4.,5.], dtype=np.float32)
+ F = tf.Variable(initial_value=Finit, trainable=True)
+ sl = LowerStarSimplexTreeLayer(simplextree=st, homology_dimensions=[0])
+
+ with tf.GradientTape() as tape:
+ dgm = sl.call(F)[0][0]
+ loss = tf.math.reduce_sum(tf.square(.5*(dgm[:,1]-dgm[:,0])))
+ grads = tape.gradient(loss, [F])
+
+ assert tf.math.reduce_all(tf.math.equal(grads[0].indices, tf.constant([2,4])))
+ assert tf.math.reduce_all(tf.math.equal(grads[0].values, tf.constant([-1.,1.])))
+
diff --git a/src/python/test/test_dtm.py b/src/python/test/test_dtm.py
index e46d616c..b276f041 100755
--- a/src/python/test/test_dtm.py
+++ b/src/python/test/test_dtm.py
@@ -91,11 +91,11 @@ def test_density():
def test_dtm_overflow_warnings():
pts = numpy.array([[10., 100000000000000000000000000000.], [1000., 100000000000000000000000000.]])
-
- with warnings.catch_warnings(record=True) as w:
- # TODO Test "keops" implementation as well when next version of pykeops (current is 1.5) is released (should fix the problem (cf. issue #543))
- dtm = DistanceToMeasure(2, implementation="hnsw")
- r = dtm.fit_transform(pts)
- assert len(w) == 1
- assert issubclass(w[0].category, RuntimeWarning)
- assert "Overflow" in str(w[0].message)
+ impl_warn = ["keops", "hnsw"]
+ for impl in impl_warn:
+ with warnings.catch_warnings(record=True) as w:
+ dtm = DistanceToMeasure(2, implementation=impl)
+ r = dtm.fit_transform(pts)
+ assert len(w) == 1
+ assert issubclass(w[0].category, RuntimeWarning)
+ assert "Overflow" in str(w[0].message)