summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorvrouvrea <vrouvrea@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2017-11-22 09:31:29 +0000
committervrouvrea <vrouvrea@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2017-11-22 09:31:29 +0000
commitd42e0a508caf5aeb2944d097932801e1e86126f0 (patch)
tree7a12f3d9cd57f6b67399a2c6cd44a409b5e1b8fc /src
parent3e1f686df11ec3e1ad02a18f12249621620ac969 (diff)
parent36784120de86410bfbe51c563910df0d8718e2e9 (diff)
Merge last trunk modifications
git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/set_dimension_mechanism_precision@2931 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 44ca09283b600e6af453eb3507bb4ec92b31948b
Diffstat (limited to 'src')
-rw-r--r--src/Alpha_complex/utilities/CMakeLists.txt65
-rw-r--r--src/Alpha_complex/utilities/README172
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_helper.h (renamed from src/Persistent_cohomology/example/alpha_complex_3d_helper.h)0
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp)83
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_persistence.cpp (renamed from src/Persistent_cohomology/example/alpha_complex_persistence.cpp)63
-rw-r--r--src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp)83
-rw-r--r--src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp)95
-rw-r--r--src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp)99
-rw-r--r--src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp (renamed from src/Persistent_cohomology/example/weighted_periodic_alpha_complex_3d_persistence.cpp)0
-rw-r--r--src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h2
-rw-r--r--src/Bitmap_cubical_complex/example/CMakeLists.txt26
-rw-r--r--src/Bitmap_cubical_complex/utilities/Bitmap_cubical_complex.cpp (renamed from src/Bitmap_cubical_complex/example/Bitmap_cubical_complex.cpp)0
-rw-r--r--src/Bitmap_cubical_complex/utilities/Bitmap_cubical_complex_periodic_boundary_conditions.cpp (renamed from src/Bitmap_cubical_complex/example/Bitmap_cubical_complex_periodic_boundary_conditions.cpp)0
-rw-r--r--src/Bitmap_cubical_complex/utilities/CMakeLists.txt29
-rw-r--r--src/Bitmap_cubical_complex/utilities/README24
-rw-r--r--src/Bottleneck_distance/example/CMakeLists.txt34
-rw-r--r--src/Bottleneck_distance/example/README19
-rw-r--r--src/Bottleneck_distance/utilities/CMakeLists.txt16
-rw-r--r--src/Bottleneck_distance/utilities/README10
-rw-r--r--src/Bottleneck_distance/utilities/bottleneck_read_file.cpp (renamed from src/Bottleneck_distance/example/bottleneck_read_file_example.cpp)2
-rw-r--r--src/CMakeLists.txt1
-rw-r--r--src/Contraction/example/CMakeLists.txt3
-rw-r--r--src/Contraction/example/Garland_heckbert.cpp5
-rw-r--r--src/Doxyfile6
-rw-r--r--src/Nerve_GIC/doc/COPYRIGHT19
-rw-r--r--src/Nerve_GIC/doc/GIC.jpgbin0 -> 457905 bytes
-rw-r--r--src/Nerve_GIC/doc/GIC.pdfbin0 -> 26073 bytes
-rw-r--r--src/Nerve_GIC/doc/Intro_graph_induced_complex.h216
-rw-r--r--src/Nerve_GIC/doc/coordGICvisu.pdfbin0 -> 20745 bytes
-rw-r--r--src/Nerve_GIC/doc/coordGICvisu2.jpgbin0 -> 1259868 bytes
-rw-r--r--src/Nerve_GIC/doc/funcGICvisu.jpgbin0 -> 71647 bytes
-rw-r--r--src/Nerve_GIC/doc/gicvisu.jpgbin0 -> 167192 bytes
-rw-r--r--src/Nerve_GIC/doc/gicvoronoivisu.jpgbin0 -> 37785 bytes
-rw-r--r--src/Nerve_GIC/doc/nerve.pngbin0 -> 45129 bytes
-rw-r--r--src/Nerve_GIC/doc/nervevisu.jpgbin0 -> 127619 bytes
-rw-r--r--src/Nerve_GIC/example/CMakeLists.txt29
-rw-r--r--src/Nerve_GIC/example/CoordGIC.cpp93
-rw-r--r--src/Nerve_GIC/example/FuncGIC.cpp94
-rw-r--r--src/Nerve_GIC/example/GIC.cpp95
-rwxr-xr-xsrc/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py72
-rw-r--r--src/Nerve_GIC/example/Nerve.cpp95
-rw-r--r--src/Nerve_GIC/example/Nerve.txt43
-rw-r--r--src/Nerve_GIC/example/VoronoiGIC.cpp90
-rwxr-xr-xsrc/Nerve_GIC/example/km.py390
-rw-r--r--src/Nerve_GIC/example/km.py.COPYRIGHT26
-rw-r--r--src/Nerve_GIC/include/gudhi/GIC.h1166
-rw-r--r--src/Nerve_GIC/test/CMakeLists.txt14
-rw-r--r--src/Nerve_GIC/test/data/cloud6
-rw-r--r--src/Nerve_GIC/test/data/cover3
-rw-r--r--src/Nerve_GIC/test/data/graph3
-rw-r--r--src/Nerve_GIC/test/test_GIC.cpp90
-rw-r--r--src/Persistent_cohomology/doc/Intro_persistent_cohomology.h28
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt73
-rw-r--r--src/Persistent_cohomology/example/README121
-rw-r--r--src/Rips_complex/utilities/CMakeLists.txt21
-rw-r--r--src/Rips_complex/utilities/README67
-rw-r--r--src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp (renamed from src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp)0
-rw-r--r--src/Rips_complex/utilities/rips_persistence.cpp (renamed from src/Persistent_cohomology/example/rips_persistence.cpp)0
-rw-r--r--src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp2
-rw-r--r--src/Witness_complex/doc/Witness_complex_doc.h14
-rw-r--r--src/Witness_complex/example/CMakeLists.txt32
-rw-r--r--src/Witness_complex/example/example_strong_witness_complex_off.cpp8
-rw-r--r--src/Witness_complex/example/example_witness_complex_off.cpp6
-rw-r--r--src/Witness_complex/example/example_witness_complex_sphere.cpp4
-rw-r--r--src/Witness_complex/include/gudhi/Strong_witness_complex.h6
-rw-r--r--src/Witness_complex/utilities/CMakeLists.txt28
-rw-r--r--src/Witness_complex/utilities/README72
-rw-r--r--src/Witness_complex/utilities/strong_witness_persistence.cpp (renamed from src/Witness_complex/example/example_strong_witness_persistence.cpp)6
-rw-r--r--src/Witness_complex/utilities/weak_witness_persistence.cpp (renamed from src/Witness_complex/example/example_witness_complex_persistence.cpp)6
-rw-r--r--src/common/doc/file_formats.h15
-rw-r--r--src/common/doc/main_page.h134
-rw-r--r--src/common/utilities/README26
-rw-r--r--src/cython/cython/simplex_tree.pyx67
-rwxr-xr-xsrc/cython/doc/python3-sphinx-build.py2
74 files changed, 3622 insertions, 497 deletions
diff --git a/src/Alpha_complex/utilities/CMakeLists.txt b/src/Alpha_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..79d9e7dd
--- /dev/null
+++ b/src/Alpha_complex/utilities/CMakeLists.txt
@@ -0,0 +1,65 @@
+cmake_minimum_required(VERSION 2.6)
+project(Alpha_complex_utilities)
+
+if(CGAL_FOUND)
+ add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp)
+ target_link_libraries(alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ add_executable(exact_alpha_complex_3d_persistence exact_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(exact_alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+ add_executable(weighted_alpha_complex_3d_persistence weighted_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(exact_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ target_link_libraries(weighted_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Alpha_complex_utilities_alpha_complex_3d_persistence COMMAND $<TARGET_FILE:alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
+ add_test(NAME Alpha_complex_utilities_exact_alpha_complex_3d COMMAND $<TARGET_FILE:exact_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
+ add_test(NAME Alpha_complex_utilities_weighted_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.weights" "-p" "2" "-m" "0.45")
+
+ install(TARGETS alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS exact_alpha_complex_3d_persistence DESTINATION bin)
+ install(TARGETS weighted_alpha_complex_3d_persistence DESTINATION bin)
+
+ if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+ add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
+ target_link_libraries(alpha_complex_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
+ target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+ add_test(NAME Alpha_complex_utilities_alpha_complex_persistence COMMAND $<TARGET_FILE:alpha_complex_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
+ add_test(NAME Alpha_complex_utilities_periodic_alpha_complex_3d_persistence COMMAND $<TARGET_FILE:periodic_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "-p" "2" "-m" "0")
+
+ install(TARGETS alpha_complex_persistence DESTINATION bin)
+ install(TARGETS periodic_alpha_complex_3d_persistence DESTINATION bin)
+
+ endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
+
+ if (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+ add_executable(weighted_periodic_alpha_complex_3d_persistence weighted_periodic_alpha_complex_3d_persistence.cpp)
+ target_link_libraries(weighted_periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(weighted_periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Persistent_cohomology_example_weigted_periodic_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_periodic_alpha_complex_3d_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.weights"
+ "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "3" "1.0")
+
+ install(TARGETS weighted_periodic_alpha_complex_3d_persistence DESTINATION bin)
+
+ endif (NOT CGAL_VERSION VERSION_LESS 4.11.0)
+
+endif(CGAL_FOUND)
diff --git a/src/Alpha_complex/utilities/README b/src/Alpha_complex/utilities/README
new file mode 100644
index 00000000..c3dd170b
--- /dev/null
+++ b/src/Alpha_complex/utilities/README
@@ -0,0 +1,172 @@
+# Alpha_complex #
+
+## `alpha_complex_3d_persistence` ##
+This program computes the persistent homology with coefficient field Z/pZ of the 3D alpha complex built from a 3D point cloud. The output diagram contains one bar per line, written with the convention:
+
+`p dim b d`
+
+where `dim` is the dimension of the homological feature, `b` and `d` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+`alpha_complex_3d_persistence [options] <OFF input file>`
+
+**Allowed options**
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example**
+`alpha_complex_3d_persistence ../../data/points/tore3D_300.off -p 2 -m 0.45`
+
+outputs:
+```
+Simplex_tree dim: 3
+2 0 0 inf
+2 1 0.0682162 1.0001
+2 1 0.0934117 1.00003
+2 2 0.56444 1.03938
+```
+
+Here we retrieve expected Betti numbers on a tore 3D:
+```
+Betti numbers[0] = 1
+Betti numbers[1] = 2
+Betti numbers[2] = 1
+```
+
+N.B.:
+* `alpha_complex_3d_persistence` only accepts OFF files in dimension 3.
+* Filtration values are alpha square values.
+
+
+
+## `exact_alpha_complex_3d_persistence` ##
+Same as `alpha_complex_3d_persistence`, but using exact computation. It is slower, but it is necessary when points are on a grid for instance.
+
+
+
+## `weighted_alpha_complex_3d_persistence` ##
+Same as `alpha_complex_3d_persistence`, but using weighted points.
+
+**Usage**
+`weighted_alpha_complex_3d_persistence [options] <OFF input file> <weights input file>`
+
+**Allowed options**
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example**
+`weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off ../../data/points/tore3D_300.weights -p 2 -m 0.45`
+
+outputs:
+```
+Simplex_tree dim: 3
+2 0 -1 inf
+2 1 -0.931784 0.000103311
+2 1 -0.906588 2.60165e-05
+2 2 -0.43556 0.0393798
+```
+
+N.B.:
+* Weights values are explained on CGAL [Alpha shape](https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title0)
+and [Regular triangulation](https://doc.cgal.org/latest/Triangulation_3/index.html#Triangulation3secclassRegulartriangulation) documentation.
+* Filtration values are alpha square values.
+
+
+## `periodic_alpha_complex_3d_persistence` ##
+Same as `alpha_complex_3d_persistence`, but using periodic alpha shape 3d.
+
+**Usage**
+`periodic_alpha_complex_3d_persistence <input OFF file> <cuboid file> <p> <min_persistence>`
+where
+`<input OFF file>` is the path to the input point cloud in OFF format.
+`<cuboid file>` is the path to the file describing the periodic domain. It must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid).
+`<p>` is the characteristic p of the coefficient field *Z/pZ* for computing homology. It must be a stricly positive integer.
+`<min_persistence>` is the minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Usage**
+`./periodic_alpha_complex_3d_persistence [options] input-file cuboid-file`
+
+**Allowed options**
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals
+
+
+**Example**
+`periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0`
+
+outputs:
+```
+Periodic Delaunay computed.
+Simplex_tree dim: 3
+3 0 0 inf
+3 1 0.0025 inf
+3 1 0.0025 inf
+3 1 0.0025 inf
+3 2 0.005 inf
+3 2 0.005 inf
+3 2 0.005 inf
+3 3 0.0075 inf
+```
+
+Here we retrieve expected Betti numbers on an 3D iso-oriented cuboids:
+```
+Betti numbers[0] = 1
+Betti numbers[1] = 3
+Betti numbers[2] = 3
+Betti numbers[3] = 1
+```
+
+N.B.:
+* Cuboid file must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid).
+* Filtration values are alpha square values.
+
+
+
+
+## `alpha_complex_persistence` ##
+This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. The output diagram contains one bar per line, written with the convention:
+
+`p dim b d`
+
+where `dim` is the dimension of the homological feature, `b` and `d` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+`alpha_complex_persistence [options] <OFF input file>`
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-r [ --max-alpha-square-value ]` (default = inf) Maximal alpha square value for the Alpha complex construction.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example**
+`alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off`
+
+outputs:
+```
+Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
+Simplex_tree dim: 3
+2 0 0 inf
+2 1 0.0682162 1.0001
+2 1 0.0934117 1.00003
+2 2 0.56444 1.03938
+```
+
+Here we retrieve expected Betti numbers on a tore 3D:
+```
+Betti numbers[0] = 1
+Betti numbers[1] = 2
+Betti numbers[2] = 1
+```
+
+N.B.:
+* `alpha_complex_persistence` only accepts OFF files in dimension d.
+* Filtration values are alpha square values.
diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_helper.h b/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
index 6b3b7d5d..6b3b7d5d 100644
--- a/src/Persistent_cohomology/example/alpha_complex_3d_helper.h
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
index 26196a6f..0a021a0f 100644
--- a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
@@ -20,6 +20,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
@@ -77,30 +78,23 @@ using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
using Persistent_cohomology =
Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(const std::string& progName) {
- std::cerr << "Usage: " << progName
- << " path_to_the_OFF_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence);
-int main(int argc, char* const argv[]) {
- // program args management
- if (argc != 4) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- int coeff_field_characteristic = atoi(argv[2]);
- Filtration_value min_persistence = strtof(argv[3], nullptr);
+ program_options(argc, argv, off_file_points, output_file_diag, coeff_field_characteristic, min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read file " << off_file_points << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
@@ -229,7 +223,58 @@ int main(int argc, char* const argv[]) {
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
index 9e84e91f..2105220a 100644
--- a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_persistence.cpp
@@ -14,12 +14,9 @@
using Simplex_tree = Gudhi::Simplex_tree<>;
using Filtration_value = Simplex_tree::Filtration_value;
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , std::string & output_file_diag
- , Filtration_value & alpha_square_max_value
- , int & coeff_field_characteristic
- , Filtration_value & min_persistence);
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ Filtration_value &alpha_square_max_value, int &coeff_field_characteristic,
+ Filtration_value &min_persistence);
int main(int argc, char **argv) {
std::string off_file_points;
@@ -28,13 +25,13 @@ int main(int argc, char **argv) {
int coeff_field_characteristic;
Filtration_value min_persistence;
- program_options(argc, argv, off_file_points, output_file_diag, alpha_square_max_value,
- coeff_field_characteristic, min_persistence);
+ program_options(argc, argv, off_file_points, output_file_diag, alpha_square_max_value, coeff_field_characteristic,
+ min_persistence);
// ----------------------------------------------------------------------------
// Init of an alpha complex from an OFF file
// ----------------------------------------------------------------------------
- using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >;
+ using Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_points);
Simplex_tree simplex;
@@ -42,17 +39,16 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Display information about the alpha complex
// ----------------------------------------------------------------------------
- std::cout << "Simplicial complex is of dimension " << simplex.dimension() <<
- " - " << simplex.num_simplices() << " simplices - " <<
- simplex.num_vertices() << " vertices." << std::endl;
+ std::cout << "Simplicial complex is of dimension " << simplex.dimension() << " - " << simplex.num_simplices()
+ << " simplices - " << simplex.num_vertices() << " vertices." << std::endl;
// Sort the simplices in the order of the filtration
simplex.initialize_filtration();
std::cout << "Simplex_tree dim: " << simplex.dimension() << std::endl;
// Compute the persistence diagram of the complex
- Gudhi::persistent_cohomology::Persistent_cohomology< Simplex_tree,
- Gudhi::persistent_cohomology::Field_Zp > pcoh(simplex);
+ Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Gudhi::persistent_cohomology::Field_Zp> pcoh(
+ simplex);
// initializes the coefficient field for homology
pcoh.init_coefficients(coeff_field_characteristic);
@@ -72,30 +68,26 @@ int main(int argc, char **argv) {
return 0;
}
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , std::string & output_file_diag
- , Filtration_value & alpha_square_max_value
- , int & coeff_field_characteristic
- , Filtration_value & min_persistence) {
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ Filtration_value &alpha_square_max_value, int &coeff_field_characteristic,
+ Filtration_value &min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&off_file_points),
- "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
po::options_description visible("Allowed options", 100);
- visible.add_options()
- ("help,h", "produce help message")
- ("output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-alpha-square-value,r",
- po::value<Filtration_value>(&alpha_square_max_value)->default_value(std::numeric_limits<Filtration_value>::infinity()),
- "Maximal alpha square value for the Alpha complex construction.")
- ("field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
- "Characteristic p of the coefficient field Z/pZ for computing homology.")
- ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
- "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-alpha-square-value,r", po::value<Filtration_value>(&alpha_square_max_value)
+ ->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal alpha square value for the Alpha complex construction.")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -104,8 +96,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
diff --git a/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
index 2e2bfd2f..9a266418 100644
--- a/src/Persistent_cohomology/example/exact_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
@@ -20,6 +20,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
@@ -78,30 +79,23 @@ using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
using Persistent_cohomology =
Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(const std::string& progName) {
- std::cerr << "Usage: " << progName
- << " path_to_the_OFF_file coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence);
-int main(int argc, char* const argv[]) {
- // program args management
- if (argc != 4) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- int coeff_field_characteristic = atoi(argv[2]);
- Filtration_value min_persistence = strtof(argv[3], nullptr);
+ program_options(argc, argv, off_file_points, output_file_diag, coeff_field_characteristic, min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read file " << off_file_points << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
@@ -231,7 +225,58 @@ int main(int argc, char* const argv[]) {
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &output_file_diag,
+ int &coeff_field_characteristic, Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
index c6d3e236..186a58f8 100644
--- a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
@@ -20,6 +20,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
@@ -83,40 +84,35 @@ using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
using Persistent_cohomology =
Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(const std::string& progName) {
- std::cerr << "Usage: " << progName << " path_to_the_OFF_file path_to_iso_cuboid_3_file "
- "coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &cuboid_file,
+ std::string &output_file_diag, int &coeff_field_characteristic, Filtration_value &min_persistence);
-int main(int argc, char* const argv[]) {
- // program args management
- if (argc != 5) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string cuboid_file;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- int coeff_field_characteristic = atoi(argv[3]);
- Filtration_value min_persistence = strtof(argv[4], nullptr);
+ program_options(argc, argv, off_file_points, cuboid_file, output_file_diag, coeff_field_characteristic,
+ min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read OFF file " << off_file_points << std::endl;
+ exit(-1);
}
// Read iso_cuboid_3 information from file
- std::ifstream iso_cuboid_str(argv[2]);
+ std::ifstream iso_cuboid_str(cuboid_file);
double x_min, y_min, z_min, x_max, y_max, z_max;
if (iso_cuboid_str.good()) {
iso_cuboid_str >> x_min >> y_min >> z_min >> x_max >> y_max >> z_max;
} else {
- std::cerr << "Unable to read file " << argv[2] << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read file " << cuboid_file << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
@@ -251,7 +247,62 @@ int main(int argc, char* const argv[]) {
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &cuboid_file,
+ std::string &output_file_diag, int &coeff_field_characteristic,
+ Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ")(
+ "cuboid-file", po::value<std::string>(&cuboid_file),
+ "Name of file describing the periodic domain. Format is: min_hx min_hy min_hz\nmax_hx max_hy max_hz");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+ pos.add("cuboid-file", 2);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file") || !vm.count("cuboid-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a periodic 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file cuboid-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
index 249a7ece..0e73a99b 100644
--- a/src/Persistent_cohomology/example/weighted_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
@@ -20,6 +20,7 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
+#include <boost/program_options.hpp>
#include <boost/variant.hpp>
#include <gudhi/Simplex_tree.h>
@@ -103,37 +104,32 @@ using Simplex_tree_vector_vertex = std::vector<Simplex_tree_vertex>;
using Persistent_cohomology =
Gudhi::persistent_cohomology::Persistent_cohomology<ST, Gudhi::persistent_cohomology::Field_Zp>;
-void usage(const std::string& progName) {
- std::cerr << "Usage: " << progName << " path_to_the_OFF_file path_to_weight_file coeff_field_characteristic[integer > "
- "0] min_persistence[float >= -1.0]\n";
- exit(-1);
-}
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &weight_file,
+ std::string &output_file_diag, int &coeff_field_characteristic, Filtration_value &min_persistence);
-int main(int argc, char* const argv[]) {
- // program args management
- if (argc != 5) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string weight_file;
+ std::string output_file_diag;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
- int coeff_field_characteristic = atoi(argv[3]);
- Filtration_value min_persistence = strtof(argv[4], nullptr);
+ program_options(argc, argv, off_file_points, weight_file, output_file_diag, coeff_field_characteristic,
+ min_persistence);
- // Read points from file
- std::string offInputFile(argv[1]);
// Read the OFF file (input file name given as parameter) and triangulate points
- Gudhi::Points_3D_off_reader<Point_3> off_reader(offInputFile);
+ Gudhi::Points_3D_off_reader<Point_3> off_reader(off_file_points);
// Check the read operation was correct
if (!off_reader.is_valid()) {
- std::cerr << "Unable to read file " << offInputFile << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read OFF file " << off_file_points << std::endl;
+ exit(-1);
}
// Retrieve the triangulation
std::vector<Point_3> lp = off_reader.get_point_cloud();
// Read weights information from file
- std::ifstream weights_ifstr(argv[2]);
+ std::ifstream weights_ifstr(weight_file);
std::vector<Weighted_point_3> wp;
if (weights_ifstr.good()) {
double weight = 0.0;
@@ -145,12 +141,12 @@ int main(int argc, char* const argv[]) {
index++;
}
if (index != lp.size()) {
- std::cerr << "Bad number of weights in file " << argv[2] << std::endl;
- usage(argv[0]);
+ std::cerr << "Bad number of weights in file " << weight_file << std::endl;
+ exit(-1);
}
} else {
- std::cerr << "Unable to read file " << argv[2] << std::endl;
- usage(argv[0]);
+ std::cerr << "Unable to read weights file " << weight_file << std::endl;
+ exit(-1);
}
// alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode.
@@ -276,7 +272,62 @@ int main(int argc, char* const argv[]) {
pcoh.compute_persistent_cohomology(min_persistence);
- pcoh.output_diagram();
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
return 0;
}
+
+void program_options(int argc, char *argv[], std::string &off_file_points, std::string &weight_file,
+ std::string &output_file_diag, int &coeff_field_characteristic,
+ Filtration_value &min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ")(
+ "weight-file", po::value<std::string>(&weight_file),
+ "Name of file containing a point weights. Format is one weigt per line: W1\n...\nWn ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+ pos.add("weight-file", 2);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file") || !vm.count("weight-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of a weighted 3D Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file weight-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/weighted_periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
index 13634ff7..13634ff7 100644
--- a/src/Persistent_cohomology/example/weighted_periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
index 5963caa3..daba15a4 100644
--- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
+++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
@@ -144,7 +144,7 @@ namespace cubical_complex {
* Indicate that we have imposed periodic boundary conditions in the direction x, but not in the direction y.
* \section BitmapExamples Examples
- * End user programs are available in example/Bitmap_cubical_complex folder.
+ * End user programs are available in example/Bitmap_cubical_complex and utilities/Bitmap_cubical_complex folders.
*
* \copyright GNU General Public License v3.
*/
diff --git a/src/Bitmap_cubical_complex/example/CMakeLists.txt b/src/Bitmap_cubical_complex/example/CMakeLists.txt
index a0401619..99304aa4 100644
--- a/src/Bitmap_cubical_complex/example/CMakeLists.txt
+++ b/src/Bitmap_cubical_complex/example/CMakeLists.txt
@@ -1,17 +1,6 @@
cmake_minimum_required(VERSION 2.6)
project(Bitmap_cubical_complex_examples)
-add_executable ( Bitmap_cubical_complex Bitmap_cubical_complex.cpp )
-if (TBB_FOUND)
- target_link_libraries(Bitmap_cubical_complex ${TBB_LIBRARIES})
-endif()
-
-add_test(NAME Bitmap_cubical_complex_example_persistence_one_sphere COMMAND $<TARGET_FILE:Bitmap_cubical_complex>
- "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalOneSphere.txt")
-
-add_test(NAME Bitmap_cubical_complex_example_persistence_two_sphere COMMAND $<TARGET_FILE:Bitmap_cubical_complex>
- "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt")
-
add_executable ( Random_bitmap_cubical_complex Random_bitmap_cubical_complex.cpp )
if (TBB_FOUND)
target_link_libraries(Random_bitmap_cubical_complex ${TBB_LIBRARIES})
@@ -19,19 +8,4 @@ endif()
add_test(NAME Bitmap_cubical_complex_example_random COMMAND $<TARGET_FILE:Random_bitmap_cubical_complex>
"2" "100" "100")
-add_executable ( Bitmap_cubical_complex_periodic_boundary_conditions Bitmap_cubical_complex_periodic_boundary_conditions.cpp )
-if (TBB_FOUND)
- target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${TBB_LIBRARIES})
-endif()
-
-add_test(NAME Bitmap_cubical_complex_example_periodic_boundary_conditions_2d_torus
- COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
- "${CMAKE_SOURCE_DIR}/data/bitmap/2d_torus.txt")
-
-add_test(NAME Bitmap_cubical_complex_example_periodic_boundary_conditions_3d_torus
- COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
- "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt")
-
-install(TARGETS Bitmap_cubical_complex DESTINATION bin)
install(TARGETS Random_bitmap_cubical_complex DESTINATION bin)
-install(TARGETS Bitmap_cubical_complex_periodic_boundary_conditions DESTINATION bin)
diff --git a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex.cpp b/src/Bitmap_cubical_complex/utilities/Bitmap_cubical_complex.cpp
index 9d1bc08c..9d1bc08c 100644
--- a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex.cpp
+++ b/src/Bitmap_cubical_complex/utilities/Bitmap_cubical_complex.cpp
diff --git a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex_periodic_boundary_conditions.cpp b/src/Bitmap_cubical_complex/utilities/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
index c812cb3a..c812cb3a 100644
--- a/src/Bitmap_cubical_complex/example/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
+++ b/src/Bitmap_cubical_complex/utilities/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
diff --git a/src/Bitmap_cubical_complex/utilities/CMakeLists.txt b/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..f0f80059
--- /dev/null
+++ b/src/Bitmap_cubical_complex/utilities/CMakeLists.txt
@@ -0,0 +1,29 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bitmap_cubical_complex_utilities)
+
+add_executable ( Bitmap_cubical_complex Bitmap_cubical_complex.cpp )
+if (TBB_FOUND)
+ target_link_libraries(Bitmap_cubical_complex ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Bitmap_cubical_complex_utility_persistence_one_sphere COMMAND $<TARGET_FILE:Bitmap_cubical_complex>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalOneSphere.txt")
+
+add_test(NAME Bitmap_cubical_complex_utility_persistence_two_sphere COMMAND $<TARGET_FILE:Bitmap_cubical_complex>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt")
+
+add_executable ( Bitmap_cubical_complex_periodic_boundary_conditions Bitmap_cubical_complex_periodic_boundary_conditions.cpp )
+if (TBB_FOUND)
+ target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Bitmap_cubical_complex_utility_periodic_boundary_conditions_2d_torus
+ COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/2d_torus.txt")
+
+add_test(NAME Bitmap_cubical_complex_utility_periodic_boundary_conditions_3d_torus
+ COMMAND $<TARGET_FILE:Bitmap_cubical_complex_periodic_boundary_conditions>
+ "${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt")
+
+install(TARGETS Bitmap_cubical_complex DESTINATION bin)
+install(TARGETS Bitmap_cubical_complex_periodic_boundary_conditions DESTINATION bin)
diff --git a/src/Bitmap_cubical_complex/utilities/README b/src/Bitmap_cubical_complex/utilities/README
new file mode 100644
index 00000000..d9085635
--- /dev/null
+++ b/src/Bitmap_cubical_complex/utilities/README
@@ -0,0 +1,24 @@
+# Bitmap_cubical_complex #
+
+## `Bitmap_cubical_complex` ##
+This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style. The only number in the first line is a dimension D of a bitmap. In the lines I between 2 and D+1 there are numbers of top dimensional cells in the direction I. Let N denote product of the numbers in the lines between 2 and D. In the lines D+2 to D+2+N there are filtrations of top dimensional cells. We assume that the cells are in the lexicographical order.
+
+Examples:
+
+* Create a Cubical Complex from the Perseus style file CubicalOneSphere.txt, computes Persistence cohomology from it and writes the results in a persistence file:
+`Bitmap_cubical_complex data/bitmap/CubicalOneSphere.txt`
+
+* Create a Cubical Complex from the Perseus style file CubicalTwoSphere.txt, computes Persistence cohomology from it and writes the results in a persistence file:
+`Bitmap_cubical_complex data/bitmap/CubicalTwoSphere.txt`
+
+## `Bitmap_cubical_complex_periodic_boundary_conditions` ##
+
+Same as above, but with periodic boundary conditions.
+
+Examples:
+
+* Create a Periodical Cubical Complex from the Perseus style file 2d_torus.txt, computes Persistence cohomology from it and writes the results in a persistence file:
+`Bitmap_cubical_complex_periodic_boundary_conditions.exe data/bitmap/2d_torus.txt`
+
+* Create a Periodical Cubical Complex from the Perseus style file 3d_torus.txt, computes Persistence cohomology from it and writes the results in a persistence file:
+`Bitmap_cubical_complex_periodic_boundary_conditions.exe data/bitmap/3d_torus.txt`
diff --git a/src/Bottleneck_distance/example/CMakeLists.txt b/src/Bottleneck_distance/example/CMakeLists.txt
index eac617db..6095d6eb 100644
--- a/src/Bottleneck_distance/example/CMakeLists.txt
+++ b/src/Bottleneck_distance/example/CMakeLists.txt
@@ -2,37 +2,21 @@ cmake_minimum_required(VERSION 2.6)
project(Bottleneck_distance_examples)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
- add_executable (bottleneck_read_file_example bottleneck_read_file_example.cpp)
add_executable (bottleneck_basic_example bottleneck_basic_example.cpp)
+ add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp)
+ target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_PROGRAM_OPTIONS_LIBRARY})
if (TBB_FOUND)
- target_link_libraries(bottleneck_read_file_example ${TBB_LIBRARIES})
+ target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES})
target_link_libraries(bottleneck_basic_example ${TBB_LIBRARIES})
endif(TBB_FOUND)
-
+
add_test(NAME Bottleneck_distance_example_basic COMMAND $<TARGET_FILE:bottleneck_basic_example>)
-
- add_test(NAME Bottleneck_read_file_example
- COMMAND $<TARGET_FILE:bottleneck_read_file_example>
- "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/second.pers")
-
- install(TARGETS bottleneck_read_file_example DESTINATION bin)
- install(TARGETS bottleneck_basic_example DESTINATION bin)
-
-endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
-
-# Eigen3 and CGAL > 4.7.0 is required for alpha complex
-# CGAL > 4.8.1 is required for bottleneck distance =>
-if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
- add_executable (alpha_rips_persistence_bottleneck_distance alpha_rips_persistence_bottleneck_distance.cpp)
- target_link_libraries(alpha_rips_persistence_bottleneck_distance ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
add_test(NAME Bottleneck_distance_example_alpha_rips_persistence_bottleneck
- COMMAND $<TARGET_FILE:alpha_rips_persistence_bottleneck_distance>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.15" "-m" "0.12" "-d" "3" "-p" "3")
+ COMMAND $<TARGET_FILE:alpha_rips_persistence_bottleneck_distance>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.15" "-m" "0.12" "-d" "3" "-p" "3")
+ install(TARGETS bottleneck_basic_example DESTINATION bin)
install(TARGETS alpha_rips_persistence_bottleneck_distance DESTINATION bin)
- if (TBB_FOUND)
- target_link_libraries(alpha_rips_persistence_bottleneck_distance ${TBB_LIBRARIES})
- endif(TBB_FOUND)
-endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/example/README b/src/Bottleneck_distance/example/README
new file mode 100644
index 00000000..0e314608
--- /dev/null
+++ b/src/Bottleneck_distance/example/README
@@ -0,0 +1,19 @@
+# Bottleneck_distance #
+
+## `alpha_rips_persistence_bottleneck_distance` ##
+This program computes the persistent homology with coefficient field Z/pZ of a Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim b d`
+
+where `dim` is the dimension of the homological feature, `b` and `d` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+Usage:
+`alpha_rips_persistence_bottleneck_distance [options] <OFF input file>`
+
+Allowed options:
+
+* `-h [ --help ]` Produce help message
+* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the Rips complex construction.`
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Rips complex we want to compute.`
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
diff --git a/src/Bottleneck_distance/utilities/CMakeLists.txt b/src/Bottleneck_distance/utilities/CMakeLists.txt
new file mode 100644
index 00000000..063b6ae3
--- /dev/null
+++ b/src/Bottleneck_distance/utilities/CMakeLists.txt
@@ -0,0 +1,16 @@
+cmake_minimum_required(VERSION 2.6)
+project(Bottleneck_distance_utilities)
+
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
+ add_executable (bottleneck_read_file bottleneck_read_file.cpp)
+ if (TBB_FOUND)
+ target_link_libraries(bottleneck_read_file ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Bottleneck_distance_utilities_Bottleneck_read_file
+ COMMAND $<TARGET_FILE:bottleneck_read_file>
+ "${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers" "${CMAKE_SOURCE_DIR}/data/persistence_diagram/second.pers")
+
+ install(TARGETS bottleneck_read_file DESTINATION bin)
+
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Bottleneck_distance/utilities/README b/src/Bottleneck_distance/utilities/README
new file mode 100644
index 00000000..d9fdd252
--- /dev/null
+++ b/src/Bottleneck_distance/utilities/README
@@ -0,0 +1,10 @@
+# Bottleneck_distance #
+
+## `bottleneck_read_file_example` ##
+This program computes the Bottleneck distance between two persistence diagram files.
+
+Usage:
+`bottleneck_read_file_example <file_1.pers> <file_2.pers> [<tolerance>]`
+where
+`<file_1.pers>` and `<file_2.pers>` must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPers).
+`<tolerance>` is an error bound on the bottleneck distance (set by default to the smallest positive double value).
diff --git a/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp b/src/Bottleneck_distance/utilities/bottleneck_read_file.cpp
index 24d73c57..9dd52b31 100644
--- a/src/Bottleneck_distance/example/bottleneck_read_file_example.cpp
+++ b/src/Bottleneck_distance/utilities/bottleneck_read_file.cpp
@@ -31,7 +31,7 @@
int main(int argc, char** argv) {
if (argc < 3) {
std::cout << "To run this program please provide as an input two files with persistence diagrams. Each file" <<
- " should contain a birth-death pair per line. Third, optional parameter is an error bound on a bottleneck" <<
+ " should contain a birth-death pair per line. Third, optional parameter is an error bound on the bottleneck" <<
" distance (set by default to the smallest positive double value). If you set the error bound to 0, be" <<
" aware this version is exact but expensive. The program will now terminate \n";
return -1;
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 795005b1..5c25eab5 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -24,6 +24,7 @@ add_gudhi_module(Spatial_searching)
add_gudhi_module(Subsampling)
add_gudhi_module(Tangential_complex)
add_gudhi_module(Witness_complex)
+add_gudhi_module(Nerve_GIC)
message("++ GUDHI_MODULES list is:\"${GUDHI_MODULES}\"")
diff --git a/src/Contraction/example/CMakeLists.txt b/src/Contraction/example/CMakeLists.txt
index 83594c0e..a92d1685 100644
--- a/src/Contraction/example/CMakeLists.txt
+++ b/src/Contraction/example/CMakeLists.txt
@@ -1,9 +1,10 @@
cmake_minimum_required(VERSION 2.6)
project(Contraction_examples)
-
add_executable(RipsContraction Rips_contraction.cpp)
+
add_executable(GarlandHeckbert Garland_heckbert.cpp)
+target_link_libraries(GarlandHeckbert ${Boost_TIMER_LIBRARY})
add_test(NAME Contraction_example_tore3D_0.2 COMMAND $<TARGET_FILE:RipsContraction>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0.2")
diff --git a/src/Contraction/example/Garland_heckbert.cpp b/src/Contraction/example/Garland_heckbert.cpp
index f0cde95e..2b0dc973 100644
--- a/src/Contraction/example/Garland_heckbert.cpp
+++ b/src/Contraction/example/Garland_heckbert.cpp
@@ -29,7 +29,6 @@
#include <gudhi/Edge_contraction.h>
#include <gudhi/Skeleton_blocker.h>
#include <gudhi/Off_reader.h>
-#include <gudhi/Clock.h>
#include <iostream>
@@ -165,8 +164,6 @@ int main(int argc, char *argv[]) {
int num_contractions = atoi(argv[3]);
- Gudhi::Clock contraction_chrono("Time to simplify and enumerate simplices");
-
// constructs the contractor object with Garland Heckbert policies.
Complex_contractor contractor(complex,
new GH_cost(complex),
@@ -182,8 +179,6 @@ int main(int argc, char *argv[]) {
complex.num_edges() << " edges and " <<
complex.num_triangles() << " triangles." << std::endl;
- std::cout << contraction_chrono;
-
// write simplified complex
Gudhi::skeleton_blocker::Skeleton_blocker_off_writer<Complex> off_writer(argv[2], complex);
diff --git a/src/Doxyfile b/src/Doxyfile
index 0ef81e5c..39dcad7b 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -819,7 +819,8 @@ EXCLUDE_SYMBOLS =
# command).
EXAMPLE_PATH = biblio/ \
- example/
+ example/ \
+ utilities/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -851,7 +852,8 @@ IMAGE_PATH = doc/Skeleton_blocker/ \
doc/Subsampling/ \
doc/Spatial_searching/ \
doc/Tangential_complex/ \
- doc/Bottleneck_distance/
+ doc/Bottleneck_distance/ \
+ doc/Nerve_GIC/
# The INPUT_FILTER tag can be used to specify a program that doxygen should
# invoke to filter for each input file. Doxygen will invoke the filter program
diff --git a/src/Nerve_GIC/doc/COPYRIGHT b/src/Nerve_GIC/doc/COPYRIGHT
new file mode 100644
index 00000000..0c36a526
--- /dev/null
+++ b/src/Nerve_GIC/doc/COPYRIGHT
@@ -0,0 +1,19 @@
+The files of this directory are part of the Gudhi Library. The Gudhi library
+(Geometric Understanding in Higher Dimensions) is a generic C++ library for
+computational topology.
+
+Author(s): Mathieu Carrière
+
+Copyright (C) 2017 INRIA
+
+This program is free software: you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free Software
+Foundation, either version 3 of the License, or (at your option) any later
+version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program. If not, see <http://www.gnu.org/licenses/>.
diff --git a/src/Nerve_GIC/doc/GIC.jpg b/src/Nerve_GIC/doc/GIC.jpg
new file mode 100644
index 00000000..cb1b9b7f
--- /dev/null
+++ b/src/Nerve_GIC/doc/GIC.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/GIC.pdf b/src/Nerve_GIC/doc/GIC.pdf
new file mode 100644
index 00000000..30525745
--- /dev/null
+++ b/src/Nerve_GIC/doc/GIC.pdf
Binary files differ
diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
new file mode 100644
index 00000000..3a0d8154
--- /dev/null
+++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
@@ -0,0 +1,216 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carriere
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
+#define DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+/** \defgroup cover_complex Cover complex
+ *
+ * \author Mathieu Carrière
+ *
+ * @{
+ *
+ * Visualizations of the simplicial complexes can be done with either
+ * neato (from <a target="_blank" href="http://www.graphviz.org/">graphviz</a>),
+ * <a target="_blank" href="http://www.geomview.org/">geomview</a>,
+ * <a target="_blank" href="https://github.com/MLWave/kepler-mapper">KeplerMapper</a>.
+ * Input point clouds are assumed to be
+ * <a target="_blank" href="http://www.geomview.org/docs/html/OFF.html">OFF files</a>.
+ *
+ * \section covers Covers
+ *
+ * Nerves and Graph Induced Complexes require a cover C of the input point cloud P,
+ * that is a set of subsets of P whose union is P itself.
+ * Very often, this cover is obtained from the preimage of a family of intervals covering
+ * the image of some scalar-valued function f defined on P. This family is parameterized
+ * by its resolution, which can be either the number or the length of the intervals,
+ * and its gain, which is the overlap percentage between consecutive intervals (ordered by their first values).
+ *
+ * \section nerves Nerves
+ *
+ * \subsection nervedefinition Nerve definition
+ *
+ * Assume you are given a cover C of your point cloud P. Then, the Nerve of this cover
+ * is the simplicial complex that has one k-simplex per k-fold intersection of cover elements.
+ * See also <a target="_blank" href="https://en.wikipedia.org/wiki/Nerve_of_a_covering"> Wikipedia </a>.
+ *
+ * \image html "nerve.png" "Nerve of a double torus"
+ *
+ * \subsection nerveexample Example
+ *
+ * This example builds the Nerve of a point cloud sampled on a 3D human shape (human.off).
+ * The cover C comes from the preimages of intervals (10 intervals with gain 0.3)
+ * covering the height function (coordinate 2),
+ * which are then refined into their connected components using the triangulation of the .OFF file.
+ *
+ * \include Nerve_GIC/Nerve.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./Nerve ../../../../data/points/human.off 2 10 0.3 --v
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include Nerve_GIC/Nerve.txt
+ *
+ * The program also writes a file SC.txt. The first three lines in this file are the location of the input point cloud
+ * and the function used to compute the cover.
+ * The fourth line contains the number of vertices nv and edges ne of the Nerve.
+ * The next nv lines represent the vertices. Each line contains the vertex ID,
+ * the number of data points it contains, and their average color function value.
+ * Finally, the next ne lines represent the edges, characterized by the ID of their vertices.
+ *
+ * Using KeplerMapper, one can obtain the following visualization:
+ *
+ * \image html "nervevisu.jpg" "Visualization with KeplerMapper"
+ *
+ * \section gic Graph Induced Complexes (GIC)
+ *
+ * \subsection gicdefinition GIC definition
+ *
+ * Again, assume you are given a cover C of your point cloud P. Moreover, assume
+ * you are also given a graph G built on top of P. Then, for any clique in G
+ * whose nodes all belong to different elements of C, the GIC includes a corresponding
+ * simplex, whose dimension is the number of nodes in the clique minus one.
+ * See \cite Dey13 for more details.
+ *
+ * \image html "GIC.jpg" "GIC of a point cloud."
+ *
+ * \subsection gicexamplevor Example with cover from Voronoï
+ *
+ * This example builds the GIC of a point cloud sampled on a 3D human shape (human.off).
+ * We randomly subsampled 100 points in the point cloud, which act as seeds of
+ * a geodesic Voronoï diagram. Each cell of the diagram is then an element of C.
+ * The graph G (used to compute both the geodesics for Voronoï and the GIC)
+ * comes from the triangulation of the human shape. Note that the resulting simplicial complex is in dimension 3
+ * in this example.
+ *
+ * \include Nerve_GIC/VoronoiGIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./VoronoiGIC ../../../../data/points/human.off 700 --v
+ * \endcode
+ *
+ * the program outputs SC.off. Using e.g.
+ *
+ * \code $> geomview SC.off
+ * \endcode
+ *
+ * one can obtain the following visualization:
+ *
+ * \image html "gicvoronoivisu.jpg" "Visualization with Geomview"
+ *
+ * \subsection functionalGICdefinition Functional GIC
+ *
+ * If one restricts to the cliques in G whose nodes all belong to preimages of consecutive
+ * intervals (assuming the cover of the height function is minimal, i.e. no more than
+ * two intervals can intersect at a time), the GIC is of dimension one, i.e. a graph.
+ * We call this graph the functional GIC. See \cite Carriere16 for more details.
+ *
+ * \subsection functionalGICexample Example
+ *
+ * Functional GIC comes with automatic selection of the Rips threshold,
+ * the resolution and the gain of the function cover. See \cite Carriere17c for more details. In this example,
+ * we compute the functional GIC of a Klein bottle embedded in R^5,
+ * where the graph G comes from a Rips complex with automatic threshold,
+ * and the cover C comes from the preimages of intervals covering the first coordinate,
+ * with automatic resolution and gain. Note that automatic threshold, resolution and gain
+ * can be computed as well for the Nerve.
+ *
+ * \include Nerve_GIC/CoordGIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./CoordGIC ../../../../data/points/KleinBottle5D.off 0 --v
+ * \endcode
+ *
+ * the program outputs SC.dot. Using e.g.
+ *
+ * \code $> neato SC.dot -Tpdf -o SC.pdf
+ * \endcode
+ *
+ * one can obtain the following visualization:
+ *
+ * \image html "coordGICvisu2.jpg" "Visualization with Neato"
+ *
+ * where nodes are colored by the filter function values and, for each node, the first number is its ID
+ * and the second is the number of data points that its contain.
+ *
+ * We also provide an example on a set of 72 pictures taken around the same object (lucky_cat.off).
+ * The function is now the first eigenfunction given by PCA, whose values
+ * are written in a file (lucky_cat_PCA1). Threshold, resolution and gain are automatically selected as before.
+ *
+ * \include Nerve_GIC/FuncGIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./FuncGIC ../../data/points/COIL_database/lucky_cat.off ../../data/points/COIL_database/lucky_cat_PCA1 --v
+ * \endcode
+ *
+ * the program outputs again SC.dot which gives the following visualization after using neato:
+ *
+ * \image html "funcGICvisu.jpg" "Visualization with neato"
+ *
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup cover_complex
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
+
+
+/* * \subsection gicexample Example with cover from function
+ *
+ * This example builds the GIC of a point cloud sampled on a 3D human shape (human.off).
+ * The cover C comes from the preimages of intervals (with length 0.075 and gain 0)
+ * covering the height function (coordinate 2),
+ * and the graph G comes from a Rips complex built with threshold 0.075.
+ * Note that if the gain is too big, the number of cliques increases a lot,
+ * which make the computation time much larger.
+ *
+ * \include Nerve_GIC/GIC.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./GIC ../../../../data/points/human.off 0.075 2 0.075 0 --v
+ * \endcode
+ *
+ * the program outputs SC.txt, which can be visualized with python and firefox as before:
+ *
+ * \image html "gicvisu.jpg" "Visualization with KeplerMapper"
+ * */
+
+
+/* * Using e.g.
+ *
+ * \code $> python KeplerMapperVisuFromTxtFile.py && firefox SC.html
+ * \endcode */
diff --git a/src/Nerve_GIC/doc/coordGICvisu.pdf b/src/Nerve_GIC/doc/coordGICvisu.pdf
new file mode 100644
index 00000000..313aa1b5
--- /dev/null
+++ b/src/Nerve_GIC/doc/coordGICvisu.pdf
Binary files differ
diff --git a/src/Nerve_GIC/doc/coordGICvisu2.jpg b/src/Nerve_GIC/doc/coordGICvisu2.jpg
new file mode 100644
index 00000000..046feb2a
--- /dev/null
+++ b/src/Nerve_GIC/doc/coordGICvisu2.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/funcGICvisu.jpg b/src/Nerve_GIC/doc/funcGICvisu.jpg
new file mode 100644
index 00000000..f3da45ac
--- /dev/null
+++ b/src/Nerve_GIC/doc/funcGICvisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/gicvisu.jpg b/src/Nerve_GIC/doc/gicvisu.jpg
new file mode 100644
index 00000000..576dae47
--- /dev/null
+++ b/src/Nerve_GIC/doc/gicvisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/gicvoronoivisu.jpg b/src/Nerve_GIC/doc/gicvoronoivisu.jpg
new file mode 100644
index 00000000..cd86c411
--- /dev/null
+++ b/src/Nerve_GIC/doc/gicvoronoivisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/nerve.png b/src/Nerve_GIC/doc/nerve.png
new file mode 100644
index 00000000..b66da4a4
--- /dev/null
+++ b/src/Nerve_GIC/doc/nerve.png
Binary files differ
diff --git a/src/Nerve_GIC/doc/nervevisu.jpg b/src/Nerve_GIC/doc/nervevisu.jpg
new file mode 100644
index 00000000..67ae1d7e
--- /dev/null
+++ b/src/Nerve_GIC/doc/nervevisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt
new file mode 100644
index 00000000..461b6db2
--- /dev/null
+++ b/src/Nerve_GIC/example/CMakeLists.txt
@@ -0,0 +1,29 @@
+cmake_minimum_required(VERSION 2.6)
+project(Nerve_GIC_examples)
+
+add_executable ( Nerve Nerve.cpp )
+add_executable ( CoordGIC CoordGIC.cpp )
+add_executable ( FuncGIC FuncGIC.cpp )
+add_executable ( VoronoiGIC VoronoiGIC.cpp )
+
+if (TBB_FOUND)
+ target_link_libraries(Nerve ${TBB_LIBRARIES})
+ target_link_libraries(CoordGIC ${TBB_LIBRARIES})
+ target_link_libraries(FuncGIC ${TBB_LIBRARIES})
+ target_link_libraries(VoronoiGIC ${TBB_LIBRARIES})
+endif()
+
+file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+add_test(NAME Nerve_GIC_example_nerve COMMAND $<TARGET_FILE:Nerve>
+ "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3")
+
+add_test(NAME Nerve_GIC_example_VoronoiGIC COMMAND $<TARGET_FILE:VoronoiGIC>
+ "${CMAKE_SOURCE_DIR}/data/points/human.off" "100")
+
+add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0")
+
+add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC>
+ "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off"
+ "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1")
diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp
new file mode 100644
index 00000000..c03fcbb3
--- /dev/null
+++ b/src/Nerve_GIC/example/CoordGIC.cpp
@@ -0,0 +1,93 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off coordinate [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 3) && (argc != 4)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ int coord = atoi(argv[2]);
+ bool verb = 0;
+ if (argc == 4) verb = 1;
+
+ // -----------------------------------------
+ // Init of a functional GIC from an OFF file
+ // -----------------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_type("GIC");
+
+ GIC.set_color_from_coordinate(coord);
+ GIC.set_function_from_coordinate(coord);
+
+ GIC.set_graph_from_automatic_rips(Gudhi::Euclidean_distance());
+ GIC.set_automatic_resolution();
+ GIC.set_gain();
+ GIC.set_cover_from_function();
+
+ GIC.find_simplices();
+
+ GIC.plot_DOT();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // --------------------------------------------
+ // Display information about the functional GIC
+ // --------------------------------------------
+
+ if (verb) {
+ std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on functional GIC simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp
new file mode 100644
index 00000000..3762db4e
--- /dev/null
+++ b/src/Nerve_GIC/example/FuncGIC.cpp
@@ -0,0 +1,94 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off function [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/COIL_database/lucky_cat.off "
+ "../../data/points/COIL_database/lucky_cat_PCA1 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 3) && (argc != 4)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ std::string func_file_name = argv[2];
+ bool verb = 0;
+ if (argc == 4) verb = 1;
+
+ // -----------------------------------------
+ // Init of a functional GIC from an OFF file
+ // -----------------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_type("GIC");
+
+ GIC.set_color_from_file(func_file_name);
+ GIC.set_function_from_file(func_file_name);
+
+ GIC.set_graph_from_automatic_rips(Gudhi::Euclidean_distance());
+ GIC.set_automatic_resolution();
+ GIC.set_gain();
+ GIC.set_cover_from_function();
+
+ GIC.find_simplices();
+
+ GIC.plot_DOT();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // --------------------------------------------
+ // Display information about the functional GIC
+ // --------------------------------------------
+
+ if (verb) {
+ std::cout << "Functional GIC is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on functional GIC simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/GIC.cpp b/src/Nerve_GIC/example/GIC.cpp
new file mode 100644
index 00000000..2bc24a4d
--- /dev/null
+++ b/src/Nerve_GIC/example/GIC.cpp
@@ -0,0 +1,95 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off threshold coordinate resolution gain [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 0.075 2 0.075 0 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 6) && (argc != 7)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ double threshold = atof(argv[2]);
+ int coord = atoi(argv[3]);
+ double resolution = atof(argv[4]);
+ double gain = atof(argv[5]);
+ bool verb = 0;
+ if (argc == 7) verb = 1;
+
+ // ----------------------------------------------------------------------------
+ // Init of a graph induced complex from an OFF file
+ // ----------------------------------------------------------------------------
+
+ Gudhi::graph_induced_complex::Graph_induced_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_color_from_coordinate(coord);
+ GIC.set_function_from_coordinate(coord);
+
+ GIC.set_graph_from_rips(threshold, Gudhi::Euclidean_distance());
+
+ GIC.set_resolution_with_interval_length(resolution);
+ GIC.set_gain(gain);
+ GIC.set_cover_from_function();
+
+ GIC.find_GIC_simplices();
+
+ GIC.plot_TXT_for_KeplerMapper();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the graph induced complex
+ // ----------------------------------------------------------------------------
+
+ if (verb) {
+ std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on graph induced complex simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py
new file mode 100755
index 00000000..d2897774
--- /dev/null
+++ b/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+import km
+import numpy as np
+from collections import defaultdict
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Mathieu Carriere
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Mathieu Carriere"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+network = {}
+mapper = km.KeplerMapper(verbose=0)
+data = np.zeros((3,3))
+projected_data = mapper.fit_transform( data, projection="sum", scaler=None )
+
+f = open('SC.txt','r')
+nodes = defaultdict(list)
+links = defaultdict(list)
+custom = defaultdict(list)
+
+dat = f.readline()
+lens = f.readline()
+color = f.readline();
+param = [float(i) for i in f.readline().split(" ")]
+
+nums = [int(i) for i in f.readline().split(" ")]
+num_nodes = nums[0]
+num_edges = nums[1]
+
+for i in range(0,num_nodes):
+ point = [float(j) for j in f.readline().split(" ")]
+ nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ]
+ links[ str(int(point[0])) ] = []
+ custom[ int(point[0]) ] = point[1]
+
+m = min([custom[i] for i in range(0,num_nodes)])
+M = max([custom[i] for i in range(0,num_nodes)])
+
+for i in range(0,num_edges):
+ edge = [int(j) for j in f.readline().split(" ")]
+ links[ str(edge[0]) ].append( str(edge[1]) )
+ links[ str(edge[1]) ].append( str(edge[0]) )
+
+network["nodes"] = nodes
+network["links"] = links
+network["meta"] = lens
+
+mapper.visualize(network, color_function = color, path_html="SC.html", title=dat,
+graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0,
+height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M)
diff --git a/src/Nerve_GIC/example/Nerve.cpp b/src/Nerve_GIC/example/Nerve.cpp
new file mode 100644
index 00000000..4d5b009b
--- /dev/null
+++ b/src/Nerve_GIC/example/Nerve.cpp
@@ -0,0 +1,95 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 5) && (argc != 6)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ int coord = atoi(argv[2]);
+ int resolution = atoi(argv[3]);
+ double gain = atof(argv[4]);
+ bool verb = 0;
+ if (argc == 6) verb = 1;
+
+ // --------------------------------
+ // Init of a Nerve from an OFF file
+ // --------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> SC;
+ SC.set_verbose(verb);
+
+ bool check = SC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ SC.set_type("Nerve");
+
+ SC.set_color_from_coordinate(coord);
+ SC.set_function_from_coordinate(coord);
+
+ SC.set_graph_from_OFF();
+ SC.set_resolution_with_interval_number(resolution);
+ SC.set_gain(gain);
+ SC.set_cover_from_function();
+
+ SC.find_simplices();
+
+ SC.write_info();
+
+ Gudhi::Simplex_tree<> stree;
+ SC.create_complex(stree);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the graph induced complex
+ // ----------------------------------------------------------------------------
+
+ if (verb) {
+ std::cout << "Nerve is of dimension " << stree.dimension() << " - " << stree.num_simplices() << " simplices - "
+ << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on Nerve simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/Nerve.txt b/src/Nerve_GIC/example/Nerve.txt
new file mode 100644
index 00000000..2a861c5f
--- /dev/null
+++ b/src/Nerve_GIC/example/Nerve.txt
@@ -0,0 +1,43 @@
+Nerve is of dimension 1 - 41 simplices - 21 vertices.
+Iterator on Nerve simplices
+0
+1
+2
+2 0
+3
+3 1
+4
+4 3
+5
+5 2
+6
+6 4
+7
+7 5
+8
+9
+9 6
+10
+10 7
+11
+12
+12 8
+13
+13 9
+13 10
+14
+14 11
+15
+15 13
+16
+16 12
+17
+17 14
+18
+18 15
+18 16
+18 17
+19
+19 18
+20
+20 19
diff --git a/src/Nerve_GIC/example/VoronoiGIC.cpp b/src/Nerve_GIC/example/VoronoiGIC.cpp
new file mode 100644
index 00000000..32431cc2
--- /dev/null
+++ b/src/Nerve_GIC/example/VoronoiGIC.cpp
@@ -0,0 +1,90 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/GIC.h>
+
+#include <string>
+#include <vector>
+
+void usage(int nbArgs, char *const progName) {
+ std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
+ std::cerr << "Usage: " << progName << " filename.off N [--v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n";
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if ((argc != 3) && (argc != 4)) usage(argc, argv[0]);
+
+ using Point = std::vector<float>;
+
+ std::string off_file_name(argv[1]);
+ int m = atoi(argv[2]);
+ bool verb = 0;
+ if (argc == 4) verb = 1;
+
+ // ----------------------------------------------------------------------------
+ // Init of a graph induced complex from an OFF file
+ // ----------------------------------------------------------------------------
+
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_verbose(verb);
+
+ bool check = GIC.read_point_cloud(off_file_name);
+
+ if (!check) {
+ std::cout << "Incorrect OFF file." << std::endl;
+ } else {
+ GIC.set_type("GIC");
+
+ GIC.set_color_from_coordinate();
+
+ GIC.set_graph_from_OFF();
+ GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), m);
+
+ GIC.find_simplices();
+
+ GIC.plot_OFF();
+
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the graph induced complex
+ // ----------------------------------------------------------------------------
+
+ if (verb) {
+ std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
+ << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on graph induced complex simplices" << std::endl;
+ for (auto f_simplex : stree.filtration_simplex_range()) {
+ for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+ }
+ }
+
+ return 0;
+}
diff --git a/src/Nerve_GIC/example/km.py b/src/Nerve_GIC/example/km.py
new file mode 100755
index 00000000..53024aab
--- /dev/null
+++ b/src/Nerve_GIC/example/km.py
@@ -0,0 +1,390 @@
+from __future__ import division
+import numpy as np
+from collections import defaultdict
+import json
+import itertools
+from sklearn import cluster, preprocessing, manifold
+from datetime import datetime
+import sys
+
+class KeplerMapper(object):
+ # With this class you can build topological networks from (high-dimensional) data.
+ #
+ # 1) Fit a projection/lens/function to a dataset and transform it.
+ # For instance "mean_of_row(x) for x in X"
+ # 2) Map this projection with overlapping intervals/hypercubes.
+ # Cluster the points inside the interval
+ # (Note: we cluster on the inverse image/original data to lessen projection loss).
+ # If two clusters/nodes have the same members (due to the overlap), then:
+ # connect these with an edge.
+ # 3) Visualize the network using HTML and D3.js.
+ #
+ # functions
+ # ---------
+ # fit_transform: Create a projection (lens) from a dataset
+ # map: Apply Mapper algorithm on this projection and build a simplicial complex
+ # visualize: Turns the complex dictionary into a HTML/D3.js visualization
+
+ def __init__(self, verbose=2):
+ self.verbose = verbose
+
+ self.chunk_dist = []
+ self.overlap_dist = []
+ self.d = []
+ self.nr_cubes = 0
+ self.overlap_perc = 0
+ self.clusterer = False
+
+ def fit_transform(self, X, projection="sum", scaler=preprocessing.MinMaxScaler()):
+ # Creates the projection/lens from X.
+ #
+ # Input: X. Input features as a numpy array.
+ # Output: projected_X. original data transformed to a projection (lens).
+ #
+ # parameters
+ # ----------
+ # projection: Projection parameter is either a string,
+ # a scikit class with fit_transform, like manifold.TSNE(),
+ # or a list of dimension indices.
+ # scaler: if None, do no scaling, else apply scaling to the projection
+ # Default: Min-Max scaling
+
+ self.scaler = scaler
+ self.projection = str(projection)
+
+ # Detect if projection is a class (for scikit-learn)
+ #if str(type(projection))[1:6] == "class": #TODO: de-ugly-fy
+ # reducer = projection
+ # if self.verbose > 0:
+ # try:
+ # projection.set_params(**{"verbose":self.verbose})
+ # except:
+ # pass
+ # print("\n..Projecting data using: \n\t%s\n"%str(projection))
+ # X = reducer.fit_transform(X)
+
+ # Detect if projection is a string (for standard functions)
+ if isinstance(projection, str):
+ if self.verbose > 0:
+ print("\n..Projecting data using: %s"%(projection))
+ # Stats lenses
+ if projection == "sum": # sum of row
+ X = np.sum(X, axis=1).reshape((X.shape[0],1))
+ if projection == "mean": # mean of row
+ X = np.mean(X, axis=1).reshape((X.shape[0],1))
+ if projection == "median": # mean of row
+ X = np.median(X, axis=1).reshape((X.shape[0],1))
+ if projection == "max": # max of row
+ X = np.max(X, axis=1).reshape((X.shape[0],1))
+ if projection == "min": # min of row
+ X = np.min(X, axis=1).reshape((X.shape[0],1))
+ if projection == "std": # std of row
+ X = np.std(X, axis=1).reshape((X.shape[0],1))
+
+ if projection == "dist_mean": # Distance of x to mean of X
+ X_mean = np.mean(X, axis=0)
+ X = np.sum(np.sqrt((X - X_mean)**2), axis=1).reshape((X.shape[0],1))
+
+ # Detect if projection is a list (with dimension indices)
+ if isinstance(projection, list):
+ if self.verbose > 0:
+ print("\n..Projecting data using: %s"%(str(projection)))
+ X = X[:,np.array(projection)]
+
+ # Scaling
+ if scaler is not None:
+ if self.verbose > 0:
+ print("\n..Scaling with: %s\n"%str(scaler))
+ X = scaler.fit_transform(X)
+
+ return X
+
+ def map(self, projected_X, inverse_X=None, clusterer=cluster.DBSCAN(eps=0.5,min_samples=3), nr_cubes=10, overlap_perc=0.1):
+ # This maps the data to a simplicial complex. Returns a dictionary with nodes and links.
+ #
+ # Input: projected_X. A Numpy array with the projection/lens.
+ # Output: complex. A dictionary with "nodes", "links" and "meta information"
+ #
+ # parameters
+ # ----------
+ # projected_X projected_X. A Numpy array with the projection/lens. Required.
+ # inverse_X Numpy array or None. If None then the projection itself is used for clustering.
+ # clusterer Scikit-learn API compatible clustering algorithm. Default: DBSCAN
+ # nr_cubes Int. The number of intervals/hypercubes to create.
+ # overlap_perc Float. The percentage of overlap "between" the intervals/hypercubes.
+
+ start = datetime.now()
+
+ # Helper function
+ def cube_coordinates_all(nr_cubes, nr_dimensions):
+ # Helper function to get origin coordinates for our intervals/hypercubes
+ # Useful for looping no matter the number of cubes or dimensions
+ # Example: if there are 4 cubes per dimension and 3 dimensions
+ # return the bottom left (origin) coordinates of 64 hypercubes,
+ # as a sorted list of Numpy arrays
+ # TODO: elegance-ify...
+ l = []
+ for x in range(nr_cubes):
+ l += [x] * nr_dimensions
+ return [np.array(list(f)) for f in sorted(set(itertools.permutations(l,nr_dimensions)))]
+
+ nodes = defaultdict(list)
+ links = defaultdict(list)
+ complex = {}
+ self.nr_cubes = nr_cubes
+ self.clusterer = clusterer
+ self.overlap_perc = overlap_perc
+
+ if self.verbose > 0:
+ print("Mapping on data shaped %s using dimensions\n"%(str(projected_X.shape)))
+
+ # If inverse image is not provided, we use the projection as the inverse image (suffer projection loss)
+ if inverse_X is None:
+ inverse_X = projected_X
+
+ # We chop up the min-max column ranges into 'nr_cubes' parts
+ self.chunk_dist = (np.max(projected_X, axis=0) - np.min(projected_X, axis=0))/nr_cubes
+
+ # We calculate the overlapping windows distance
+ self.overlap_dist = self.overlap_perc * self.chunk_dist
+
+ # We find our starting point
+ self.d = np.min(projected_X, axis=0)
+
+ # Use a dimension index array on the projected X
+ # (For now this uses the entire dimensionality, but we keep for experimentation)
+ di = np.array([x for x in range(projected_X.shape[1])])
+
+ # Prefix'ing the data with ID's
+ ids = np.array([x for x in range(projected_X.shape[0])])
+ projected_X = np.c_[ids,projected_X]
+ inverse_X = np.c_[ids,inverse_X]
+
+ # Subdivide the projected data X in intervals/hypercubes with overlap
+ if self.verbose > 0:
+ total_cubes = len(cube_coordinates_all(nr_cubes,projected_X.shape[1]))
+ print("Creating %s hypercubes."%total_cubes)
+
+ for i, coor in enumerate(cube_coordinates_all(nr_cubes,di.shape[0])):
+ # Slice the hypercube
+ hypercube = projected_X[ np.invert(np.any((projected_X[:,di+1] >= self.d[di] + (coor * self.chunk_dist[di])) &
+ (projected_X[:,di+1] < self.d[di] + (coor * self.chunk_dist[di]) + self.chunk_dist[di] + self.overlap_dist[di]) == False, axis=1 )) ]
+
+ if self.verbose > 1:
+ print("There are %s points in cube_%s / %s with starting range %s"%
+ (hypercube.shape[0],i,total_cubes,self.d[di] + (coor * self.chunk_dist[di])))
+
+ # If at least one sample inside the hypercube
+ if hypercube.shape[0] > 0:
+ # Cluster the data point(s) in the cube, skipping the id-column
+ # Note that we apply clustering on the inverse image (original data samples) that fall inside the cube.
+ inverse_x = inverse_X[[int(nn) for nn in hypercube[:,0]]]
+
+ clusterer.fit(inverse_x[:,1:])
+
+ if self.verbose > 1:
+ print("Found %s clusters in cube_%s\n"%(np.unique(clusterer.labels_[clusterer.labels_ > -1]).shape[0],i))
+
+ #Now for every (sample id in cube, predicted cluster label)
+ for a in np.c_[hypercube[:,0],clusterer.labels_]:
+ if a[1] != -1: #if not predicted as noise
+ cluster_id = str(coor[0])+"_"+str(i)+"_"+str(a[1])+"_"+str(coor)+"_"+str(self.d[di] + (coor * self.chunk_dist[di])) # TODO: de-rudimentary-ify
+ nodes[cluster_id].append( int(a[0]) ) # Append the member id's as integers
+ else:
+ if self.verbose > 1:
+ print("Cube_%s is empty.\n"%(i))
+
+ # Create links when clusters from different hypercubes have members with the same sample id.
+ candidates = itertools.combinations(nodes.keys(),2)
+ for candidate in candidates:
+ # if there are non-unique members in the union
+ if len(nodes[candidate[0]]+nodes[candidate[1]]) != len(set(nodes[candidate[0]]+nodes[candidate[1]])):
+ links[candidate[0]].append( candidate[1] )
+
+ # Reporting
+ if self.verbose > 0:
+ nr_links = 0
+ for k in links:
+ nr_links += len(links[k])
+ print("\ncreated %s edges and %s nodes in %s."%(nr_links,len(nodes),str(datetime.now()-start)))
+
+ complex["nodes"] = nodes
+ complex["links"] = links
+ complex["meta"] = self.projection
+
+ return complex
+
+ def visualize(self, complex, color_function="", path_html="mapper_visualization_output.html", title="My Data",
+ graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=None, width_html=0,
+ height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=0,gain=0,minimum=0,maximum=0):
+ # Turns the dictionary 'complex' in a html file with d3.js
+ #
+ # Input: complex. Dictionary (output from calling .map())
+ # Output: a HTML page saved as a file in 'path_html'.
+ #
+ # parameters
+ # ----------
+ # color_function string. Not fully implemented. Default: "" (distance to origin)
+ # path_html file path as string. Where to save the HTML page.
+ # title string. HTML page document title and first heading.
+ # graph_link_distance int. Edge length.
+ # graph_gravity float. "Gravity" to center of layout.
+ # graph_charge int. charge between nodes.
+ # custom_tooltips None or Numpy Array. You could use "y"-label array for this.
+ # width_html int. Width of canvas. Default: 0 (full width)
+ # height_html int. Height of canvas. Default: 0 (full height)
+ # show_tooltips bool. default:True
+ # show_title bool. default:True
+ # show_meta bool. default:True
+
+ # Format JSON for D3 graph
+ json_s = {}
+ json_s["nodes"] = []
+ json_s["links"] = []
+ k2e = {} # a key to incremental int dict, used for id's when linking
+
+ for e, k in enumerate(complex["nodes"]):
+ # Tooltip and node color formatting, TODO: de-mess-ify
+ if custom_tooltips is not None:
+ tooltip_s = "<h2>Cluster %s</h2>"%k + " ".join(str(custom_tooltips[complex["nodes"][k][0]]).split(" "))
+ if maximum == minimum:
+ tooltip_i = 0
+ else:
+ tooltip_i = int(30*(custom_tooltips[complex["nodes"][k][0]]-minimum)/(maximum-minimum))
+ json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(complex["nodes"][k][2])), "color": tooltip_i})
+ else:
+ tooltip_s = "<h2>Cluster %s</h2>Contains %s members."%(k,len(complex["nodes"][k]))
+ json_s["nodes"].append({"name": str(k), "tooltip": tooltip_s, "group": 2 * int(np.log(len(complex["nodes"][k]))), "color": str(k.split("_")[0])})
+ k2e[k] = e
+ for k in complex["links"]:
+ for link in complex["links"][k]:
+ json_s["links"].append({"source": k2e[k], "target":k2e[link],"value":1})
+
+ # Width and height of graph in HTML output
+ if width_html == 0:
+ width_css = "100%"
+ width_js = 'document.getElementById("holder").offsetWidth-20'
+ else:
+ width_css = "%spx" % width_html
+ width_js = "%s" % width_html
+ if height_html == 0:
+ height_css = "100%"
+ height_js = 'document.getElementById("holder").offsetHeight-20'
+ else:
+ height_css = "%spx" % height_html
+ height_js = "%s" % height_html
+
+ # Whether to show certain UI elements or not
+ if show_tooltips == False:
+ tooltips_display = "display: none;"
+ else:
+ tooltips_display = ""
+
+ if show_meta == False:
+ meta_display = "display: none;"
+ else:
+ meta_display = ""
+
+ if show_title == False:
+ title_display = "display: none;"
+ else:
+ title_display = ""
+
+ with open(path_html,"wb") as outfile:
+ html = """<!DOCTYPE html>
+ <meta charset="utf-8">
+ <meta name="generator" content="KeplerMapper">
+ <title>%s | KeplerMapper</title>
+ <link href='https://fonts.googleapis.com/css?family=Roboto:700,300' rel='stylesheet' type='text/css'>
+ <style>
+ * {margin: 0; padding: 0;}
+ html { height: 100%%;}
+ body {background: #111; height: 100%%; font: 100 16px Roboto, Sans-serif;}
+ .link { stroke: #999; stroke-opacity: .333; }
+ .divs div { border-radius: 50%%; background: red; position: absolute; }
+ .divs { position: absolute; top: 0; left: 0; }
+ #holder { position: relative; width: %s; height: %s; background: #111; display: block;}
+ h1 { %s padding: 20px; color: #fafafa; text-shadow: 0px 1px #000,0px -1px #000; position: absolute; font: 300 30px Roboto, Sans-serif;}
+ h2 { text-shadow: 0px 1px #000,0px -1px #000; font: 700 16px Roboto, Sans-serif;}
+ .meta { position: absolute; opacity: 0.9; width: 220px; top: 80px; left: 20px; display: block; %s background: #000; line-height: 25px; color: #fafafa; border: 20px solid #000; font: 100 16px Roboto, Sans-serif;}
+ div.tooltip { position: absolute; width: 380px; display: block; %s padding: 20px; background: #000; border: 0px; border-radius: 3px; pointer-events: none; z-index: 999; color: #FAFAFA;}
+ }
+ </style>
+ <body>
+ <div id="holder">
+ <h1>%s</h1>
+ <p class="meta">
+ <b>Lens</b><br>%s<br><br>
+ <b>Length of intervals</b><br>%s<br><br>
+ <b>Overlap percentage</b><br>%s%%<br><br>
+ <b>Color Function</b><br>%s
+ </p>
+ </div>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"></script>
+ <script>
+ var width = %s,
+ height = %s;
+ var color = d3.scale.ordinal()
+ .domain(["0","1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30"])
+ .range(["#FF0000","#FF1400","#FF2800","#FF3c00","#FF5000","#FF6400","#FF7800","#FF8c00","#FFa000","#FFb400","#FFc800","#FFdc00","#FFf000","#fdff00","#b0ff00","#65ff00","#17ff00","#00ff36","#00ff83","#00ffd0","#00e4ff","#00c4ff","#00a4ff","#00a4ff","#0084ff","#0064ff","#0044ff","#0022ff","#0002ff","#0100ff","#0300ff","#0500ff"]);
+ var force = d3.layout.force()
+ .charge(%s)
+ .linkDistance(%s)
+ .gravity(%s)
+ .size([width, height]);
+ var svg = d3.select("#holder").append("svg")
+ .attr("width", width)
+ .attr("height", height);
+
+ var div = d3.select("#holder").append("div")
+ .attr("class", "tooltip")
+ .style("opacity", 0.0);
+
+ var divs = d3.select('#holder').append('div')
+ .attr('class', 'divs')
+ .attr('style', function(d) { return 'overflow: hidden; width: ' + width + 'px; height: ' + height + 'px;'; });
+
+ graph = %s;
+ force
+ .nodes(graph.nodes)
+ .links(graph.links)
+ .start();
+ var link = svg.selectAll(".link")
+ .data(graph.links)
+ .enter().append("line")
+ .attr("class", "link")
+ .style("stroke-width", function(d) { return Math.sqrt(d.value); });
+ var node = divs.selectAll('div')
+ .data(graph.nodes)
+ .enter().append('div')
+ .on("mouseover", function(d) {
+ div.transition()
+ .duration(200)
+ .style("opacity", .9);
+ div .html(d.tooltip + "<br/>")
+ .style("left", (d3.event.pageX + 100) + "px")
+ .style("top", (d3.event.pageY - 28) + "px");
+ })
+ .on("mouseout", function(d) {
+ div.transition()
+ .duration(500)
+ .style("opacity", 0);
+ })
+ .call(force.drag);
+
+ node.append("title")
+ .text(function(d) { return d.name; });
+ force.on("tick", function() {
+ link.attr("x1", function(d) { return d.source.x; })
+ .attr("y1", function(d) { return d.source.y; })
+ .attr("x2", function(d) { return d.target.x; })
+ .attr("y2", function(d) { return d.target.y; });
+ node.attr("cx", function(d) { return d.x; })
+ .attr("cy", function(d) { return d.y; })
+ .attr('style', function(d) { return 'width: ' + (d.group * 2) + 'px; height: ' + (d.group * 2) + 'px; ' + 'left: '+(d.x-(d.group))+'px; ' + 'top: '+(d.y-(d.group))+'px; background: '+color(d.color)+'; box-shadow: 0px 0px 3px #111; box-shadow: 0px 0px 33px '+color(d.color)+', inset 0px 0px 5px rgba(0, 0, 0, 0.2);'})
+ ;
+ });
+ </script>"""%(title,width_css, height_css, title_display, meta_display, tooltips_display, title,complex["meta"],res,gain*100,color_function,width_js,height_js,graph_charge,graph_link_distance,graph_gravity,json.dumps(json_s))
+ outfile.write(html.encode("utf-8"))
+ if self.verbose > 0:
+ print("\nWrote d3.js graph to '%s'"%path_html)
diff --git a/src/Nerve_GIC/example/km.py.COPYRIGHT b/src/Nerve_GIC/example/km.py.COPYRIGHT
new file mode 100644
index 00000000..bef7b121
--- /dev/null
+++ b/src/Nerve_GIC/example/km.py.COPYRIGHT
@@ -0,0 +1,26 @@
+km.py is a fork of https://github.com/MLWave/kepler-mapper.
+Only the visualization part has been kept (Mapper part has been removed).
+
+This file has te following Copyright :
+
+The MIT License (MIT)
+
+Copyright (c) 2015 Triskelion - HJ van Veen - info@mlwave.com
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h
new file mode 100644
index 00000000..9f107a7e
--- /dev/null
+++ b/src/Nerve_GIC/include/gudhi/GIC.h
@@ -0,0 +1,1166 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author: Mathieu Carriere
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef GIC_H_
+#define GIC_H_
+
+#include <gudhi/Debug_utils.h>
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/reader_utils.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Rips_complex.h>
+#include <gudhi/Points_off_io.h>
+#include <gudhi/distance_functions.h>
+
+#include <iostream>
+#include <vector>
+#include <map>
+#include <string>
+#include <limits> // for numeric_limits
+#include <utility> // for pair<>
+#include <algorithm> // for std::max
+#include <random>
+#include <cassert>
+
+namespace Gudhi {
+
+namespace cover_complex {
+
+using Simplex_tree = Gudhi::Simplex_tree<>;
+using Filtration_value = Simplex_tree::Filtration_value;
+using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+
+/**
+ * \class Cover_complex
+ * \brief Cover complex data structure.
+ *
+ * \ingroup cover_complex
+ *
+ * \details
+ * The data structure is a simplicial complex, representing a
+ * Graph Induced simplicial Complex (GIC) or a Nerve,
+ * and whose simplices are computed with a cover C of a point
+ * cloud P, which often comes from the preimages of intervals
+ * covering the image of a function f defined on P.
+ * These intervals are parameterized by their resolution
+ * (either their length or their number)
+ * and their gain (percentage of overlap).
+ * To compute a GIC, one also needs a graph G built on top of P,
+ * whose cliques with vertices belonging to different elements of C
+ * correspond to the simplices of the GIC.
+ *
+ */
+template <typename Point>
+class Cover_complex {
+ private:
+ // Graph_induced_complex(std::map<int, double> fun){func = fun;}
+ bool verbose = false; // whether to display information.
+ std::vector<Point> point_cloud;
+ std::vector<std::vector<int> > one_skeleton;
+ typedef int Cover_t; // elements of cover C are indexed by integers.
+ std::vector<std::vector<Cover_t> > simplices;
+ std::map<int, std::vector<Cover_t> > cover;
+ std::map<Cover_t, std::vector<int> > cover_back;
+ int maximal_dim; // maximal dimension of output simplicial complex.
+ int data_dimension; // dimension of input data.
+ int n; // number of points.
+ std::map<Cover_t, int>
+ cover_fct; // integer-valued function that allows to state if two elements of the cover are consecutive or not.
+ std::map<Cover_t, std::pair<int, double> >
+ cover_color; // size and coloring of the vertices of the output simplicial complex.
+ Simplex_tree st;
+
+ std::map<int, std::vector<int> > adjacency_matrix;
+ std::vector<std::vector<double> > distances;
+
+ int resolution_int = -1;
+ double resolution_double = -1;
+ double gain = -1;
+ double rate_constant = 10; // Constant in the subsampling.
+ double rate_power = 0.001; // Power in the subsampling.
+ int mask = 0; // Ignore nodes containing less than mask points.
+
+ std::map<int, double> func;
+ std::map<int, double> func_color;
+ std::vector<int> voronoi_subsamples;
+ std::string cover_name;
+ std::string point_cloud_name;
+ std::string color_name;
+ std::string type; // Nerve or GIC
+ bool functional_cover = false; // whether we use a cover with preimages of a function or not
+
+ // Point comparator
+ struct Less {
+ Less(std::map<int, double> func) { Fct = func; }
+ std::map<int, double> Fct;
+ bool operator()(int a, int b) {
+ if (Fct[a] == Fct[b])
+ return a < b;
+ else
+ return Fct[a] < Fct[b];
+ }
+ };
+
+ // DFS
+ private:
+ void dfs(std::map<int, std::vector<int> >& G, int p, std::vector<int>& cc, std::map<int, bool>& visit) {
+ cc.push_back(p);
+ visit[p] = true;
+ int neighb = G[p].size();
+ for (int i = 0; i < neighb; i++)
+ if (visit.find(G[p][i]) != visit.end())
+ if (!(visit[G[p][i]])) dfs(G, G[p][i], cc, visit);
+ }
+
+ // Find random number in [0,1].
+ double GetUniform() {
+ static std::default_random_engine re;
+ static std::uniform_real_distribution<double> Dist(0, 1);
+ return Dist(re);
+ }
+
+ // Subsample points.
+ void SampleWithoutReplacement(int populationSize, int sampleSize, std::vector<int>& samples) {
+ int t = 0;
+ int m = 0;
+ double u;
+ while (m < sampleSize) {
+ u = GetUniform();
+ if ((populationSize - t) * u >= sampleSize - m) {
+ t++;
+ } else {
+ samples[m] = t;
+ t++;
+ m++;
+ }
+ }
+ }
+
+ private:
+ void fill_adjacency_matrix_from_st() {
+ std::vector<int> empty;
+ for (int i = 0; i < n; i++) adjacency_matrix[i] = empty;
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<int> vertices;
+ for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
+ adjacency_matrix[vertices[0]].push_back(vertices[1]);
+ adjacency_matrix[vertices[1]].push_back(vertices[0]);
+ }
+ }
+ }
+
+ public:
+ /** \brief Specifies whether the type of the output simplicial complex.
+ *
+ * @param[in] t string (either "GIC" or "Nerve").
+ *
+ */
+ void set_type(const std::string& t) { type = t; }
+
+ public:
+ /** \brief Specifies whether the program should display information or not.
+ *
+ * @param[in] verb boolean (true = display info, false = do not display info).
+ *
+ */
+ void set_verbose(bool verb = false) { verbose = verb; }
+
+ public:
+ /** \brief Sets the constants used to subsample the data set. These constants are
+ * explained in \cite Carriere17c.
+ *
+ * @param[in] constant double.
+ * @param[in] power double.
+ *
+ */
+ void set_subsampling(double constant, double power) {
+ rate_constant = constant;
+ rate_power = power;
+ }
+
+ public:
+ /** \brief Sets the mask, which is a threshold integer such that nodes in the complex that contain a number of data
+ * points which is less than or equal to
+ * this threshold are not displayed.
+ *
+ * @param[in] nodemask integer.
+ *
+ */
+ void set_mask(int nodemask) { mask = nodemask; }
+
+ public:
+ /** \brief Reads and stores the input point cloud.
+ *
+ * @param[in] off_file_name name of the input .OFF or .nOFF file.
+ *
+ */
+ bool read_point_cloud(const std::string& off_file_name) {
+ point_cloud_name = off_file_name;
+ std::ifstream input(off_file_name);
+ std::string line;
+
+ char comment = '#';
+ while (comment == '#') {
+ getline(input, line);
+ if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace)) comment = line[line.find_first_not_of(' ')];
+ }
+ if (std::strcmp((char*)line.c_str(), "nOFF") == 0) {
+ comment = '#';
+ while (comment == '#') {
+ getline(input, line);
+ if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace))
+ comment = line[line.find_first_not_of(' ')];
+ }
+ std::stringstream stream(line);
+ stream >> data_dimension;
+ } else {
+ data_dimension = 3;
+ }
+
+ comment = '#';
+ int numedges, numfaces, i, num;
+ while (comment == '#') {
+ getline(input, line);
+ if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace)) comment = line[line.find_first_not_of(' ')];
+ }
+ std::stringstream stream(line);
+ stream >> n;
+ stream >> numfaces;
+ stream >> numedges;
+
+ i = 0;
+ while (i < n) {
+ getline(input, line);
+ if (!line.empty() && line[line.find_first_not_of(' ')] != '#' &&
+ !std::all_of(line.begin(), line.end(), isspace)) {
+ std::vector<double> point;
+ std::istringstream iss(line);
+ point.assign(std::istream_iterator<double>(iss), std::istream_iterator<double>());
+ point_cloud.emplace_back(point.begin(), point.begin() + data_dimension);
+ i++;
+ }
+ }
+
+ i = 0;
+ while (i < numfaces) {
+ getline(input, line);
+ if (!line.empty() && line[line.find_first_not_of(' ')] != '#' &&
+ !std::all_of(line.begin(), line.end(), isspace)) {
+ std::vector<int> simplex;
+ std::istringstream iss(line);
+ simplex.assign(std::istream_iterator<int>(iss), std::istream_iterator<int>());
+ num = simplex[0];
+ std::vector<int> edge(2);
+ for (int j = 1; j <= num; j++) {
+ for (int k = j + 1; k <= num; k++) {
+ edge[0] = simplex[j];
+ edge[1] = simplex[k];
+ one_skeleton.push_back(edge);
+ }
+ }
+ i++;
+ }
+ }
+
+ return input.is_open();
+ }
+
+ // *******************************************************************************************************************
+ // Graphs.
+ // *******************************************************************************************************************
+
+ public: // Set graph from file.
+ /** \brief Creates a graph G from a file containing the edges.
+ *
+ * @param[in] graph_file_name name of the input graph file.
+ * The graph file contains one edge per line,
+ * each edge being represented by the IDs of its two nodes.
+ *
+ */
+ void set_graph_from_file(const std::string& graph_file_name) {
+ int neighb;
+ std::ifstream input(graph_file_name);
+ std::string line;
+ int edge[2];
+ int n = 0;
+ while (std::getline(input, line)) {
+ std::stringstream stream(line);
+ stream >> edge[0];
+ while (stream >> neighb) {
+ edge[1] = neighb;
+ st.insert_simplex_and_subfaces(edge);
+ }
+ n++;
+ }
+
+ fill_adjacency_matrix_from_st();
+ }
+
+ public: // Set graph from OFF file.
+ /** \brief Creates a graph G from the triangulation given by the input .OFF file.
+ *
+ */
+ void set_graph_from_OFF() {
+ int num_edges = one_skeleton.size();
+ if (num_edges > 0) {
+ for (int i = 0; i < num_edges; i++) st.insert_simplex_and_subfaces(one_skeleton[i]);
+ fill_adjacency_matrix_from_st();
+ } else {
+ std::cout << "No triangulation read in OFF file!" << std::endl;
+ }
+ }
+
+ public: // Set graph from Rips complex.
+ /** \brief Creates a graph G from a Rips complex.
+ *
+ * @param[in] threshold threshold value for the Rips complex.
+ * @param[in] distance distance used to compute the Rips complex.
+ *
+ */
+ template <typename Distance>
+ void set_graph_from_rips(double threshold, Distance distance) {
+ Rips_complex rips_complex_from_points(point_cloud, threshold, distance);
+ rips_complex_from_points.create_complex(st, 1);
+ fill_adjacency_matrix_from_st();
+ }
+
+ public: // Pairwise distances.
+ /** \private \brief Computes all pairwise distances.
+ */
+ template <typename Distance>
+ void compute_pairwise_distances(Distance ref_distance) {
+ double d;
+ std::vector<double> zeros(n);
+ for (int i = 0; i < n; i++) distances.push_back(zeros);
+ std::string distance = point_cloud_name;
+ distance.append("_dist");
+ std::ifstream input(distance.c_str(), std::ios::out | std::ios::binary);
+
+ if (input.good()) {
+ if (verbose) std::cout << "Reading distances..." << std::endl;
+ for (int i = 0; i < n; i++) {
+ for (int j = i; j < n; j++) {
+ input.read((char*)&d, 8);
+ distances[i][j] = d;
+ distances[j][i] = d;
+ }
+ }
+ input.close();
+ } else {
+ if (verbose) std::cout << "Computing distances..." << std::endl;
+ input.close();
+ std::ofstream output(distance, std::ios::out | std::ios::binary);
+ for (int i = 0; i < n; i++) {
+ int state = (int)floor(100 * (i * 1.0 + 1) / n) % 10;
+ if (state == 0 && verbose) std::cout << "\r" << state << "%" << std::flush;
+ for (int j = i; j < n; j++) {
+ double dis = ref_distance(point_cloud[i], point_cloud[j]);
+ distances[i][j] = dis;
+ distances[j][i] = dis;
+ output.write((char*)&dis, 8);
+ }
+ }
+ output.close();
+ if (verbose) std::cout << std::endl;
+ }
+ }
+
+ public: // Automatic tuning of Rips complex.
+ /** \brief Creates a graph G from a Rips complex whose threshold value is automatically tuned with subsampling---see
+ * \cite Carriere17c.
+ *
+ * @param[in] distance distance between data points.
+ * @param[in] N number of subsampling iteration (the default reasonable value is 100, but there is no guarantee on
+ * how to choose it).
+ * @result delta threshold used for computing the Rips complex.
+ *
+ */
+ template <typename Distance>
+ double set_graph_from_automatic_rips(Distance distance, int N = 100) {
+ int m = floor(n / std::exp((1 + rate_power) * std::log(std::log(n) / std::log(rate_constant))));
+ m = std::min(m, n - 1);
+ std::vector<int> samples(m);
+ double delta = 0;
+
+ if (verbose) std::cout << n << " points in R^" << data_dimension << std::endl;
+ if (verbose) std::cout << "Subsampling " << m << " points" << std::endl;
+
+ if (distances.size() == 0) compute_pairwise_distances(distance);
+
+ // #pragma omp parallel for
+ for (int i = 0; i < N; i++) {
+ SampleWithoutReplacement(n, m, samples);
+ double hausdorff_dist = 0;
+ for (int j = 0; j < n; j++) {
+ double mj = distances[j][samples[0]];
+ for (int k = 1; k < m; k++) mj = std::min(mj, distances[j][samples[k]]);
+ hausdorff_dist = std::max(hausdorff_dist, mj);
+ }
+ delta += hausdorff_dist / N;
+ }
+
+ if (verbose) std::cout << "delta = " << delta << std::endl;
+ Rips_complex rips_complex_from_points(point_cloud, delta, distance);
+ rips_complex_from_points.create_complex(st, 1);
+ fill_adjacency_matrix_from_st();
+
+ return delta;
+ }
+
+ // *******************************************************************************************************************
+ // Functions.
+ // *******************************************************************************************************************
+
+ public: // Set function from file.
+ /** \brief Creates the function f from a file containing the function values.
+ *
+ * @param[in] func_file_name name of the input function file.
+ *
+ */
+ void set_function_from_file(const std::string& func_file_name) {
+ int vertex_id = 0;
+ std::ifstream input(func_file_name);
+ std::string line;
+ double f;
+ while (std::getline(input, line)) {
+ std::stringstream stream(line);
+ stream >> f;
+ func.emplace(vertex_id, f);
+ vertex_id++;
+ }
+ functional_cover = true;
+ cover_name = func_file_name;
+ }
+
+ public: // Set function from kth coordinate
+ /** \brief Creates the function f from the k-th coordinate of the point cloud P.
+ *
+ * @param[in] k coordinate to use (start at 0).
+ *
+ */
+ void set_function_from_coordinate(int k) {
+ for (int i = 0; i < n; i++) func.emplace(i, point_cloud[i][k]);
+ char coordinate[100];
+ sprintf(coordinate, "coordinate %d", k);
+ functional_cover = true;
+ cover_name = coordinate;
+ }
+
+ public: // Set function from vector.
+ /** \brief Creates the function f from a vector stored in memory.
+ *
+ * @param[in] function input vector of values.
+ *
+ */
+ template <class InputRange>
+ void set_function_from_range(InputRange const& function) {
+ functional_cover = true;
+ int index = 0;
+ for (auto v : function) {
+ func.emplace(index, v);
+ index++;
+ }
+ }
+
+ // *******************************************************************************************************************
+ // Covers.
+ // *******************************************************************************************************************
+
+ public: // Automatic tuning of resolution.
+ /** \brief Computes the optimal length of intervals
+ * (i.e. the smallest interval length avoiding discretization artifacts---see \cite Carriere17c) for a functional
+ * cover.
+ *
+ * @result reso interval length used to compute the cover.
+ *
+ */
+ double set_automatic_resolution() {
+ if (!functional_cover) {
+ std::cout << "Cover needs to come from the preimages of a function." << std::endl;
+ return 0;
+ }
+ if (type != "Nerve" && type != "GIC") {
+ std::cout << "Type of complex needs to be specified." << std::endl;
+ return 0;
+ }
+
+ double reso = 0;
+
+ if (type == "GIC") {
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<int> vertices;
+ for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
+ reso = std::max(reso, std::abs(func[vertices[0]] - func[vertices[1]]));
+ }
+ }
+ if (verbose) std::cout << "resolution = " << reso << std::endl;
+ resolution_double = reso;
+ }
+
+ if (type == "Nerve") {
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<int> vertices;
+ for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
+ reso = std::max(reso, (std::abs(func[vertices[0]] - func[vertices[1]])) / gain);
+ }
+ }
+ if (verbose) std::cout << "resolution = " << reso << std::endl;
+ resolution_double = reso;
+ }
+
+ return reso;
+ }
+
+ public:
+ /** \brief Sets a length of intervals from a value stored in memory.
+ *
+ * @param[in] reso length of intervals.
+ *
+ */
+ void set_resolution_with_interval_length(double reso) { resolution_double = reso; }
+ /** \brief Sets a number of intervals from a value stored in memory.
+ *
+ * @param[in] reso number of intervals.
+ *
+ */
+ void set_resolution_with_interval_number(int reso) { resolution_int = reso; }
+ /** \brief Sets a gain from a value stored in memory (default value 0.3).
+ *
+ * @param[in] g gain.
+ *
+ */
+ void set_gain(double g = 0.3) { gain = g; }
+
+ public: // Set cover with preimages of function.
+ /** \brief Creates a cover C from the preimages of the function f.
+ *
+ */
+ void set_cover_from_function() {
+ if (resolution_double == -1 && resolution_int == -1) {
+ std::cout << "Number and/or length of intervals not specified" << std::endl;
+ return;
+ }
+ if (gain == -1) {
+ std::cout << "Gain not specified" << std::endl;
+ return;
+ }
+
+ // Read function values and compute min and max
+ std::map<int, double>::iterator it;
+ double maxf, minf;
+ minf = std::numeric_limits<float>::max();
+ maxf = std::numeric_limits<float>::min();
+ for (it = func.begin(); it != func.end(); it++) {
+ minf = std::min(minf, it->second);
+ maxf = std::max(maxf, it->second);
+ }
+ int n = func.size();
+ if (verbose) std::cout << "Min function value = " << minf << " and Max function value = " << maxf << std::endl;
+
+ // Compute cover of im(f)
+ std::vector<std::pair<double, double> > intervals;
+ int res;
+
+ if (resolution_double == -1) { // Case we use an integer for the number of intervals.
+ double incr = (maxf - minf) / resolution_int;
+ double x = minf;
+ double alpha = (incr * gain) / (2 - 2 * gain);
+ double y = minf + incr + alpha;
+ std::pair<double, double> interm(x, y);
+ intervals.push_back(interm);
+ for (int i = 1; i < resolution_int - 1; i++) {
+ x = minf + i * incr - alpha;
+ y = minf + (i + 1) * incr + alpha;
+ std::pair<double, double> inter(x, y);
+ intervals.push_back(inter);
+ }
+ x = minf + (resolution_int - 1) * incr - alpha;
+ y = maxf;
+ std::pair<double, double> interM(x, y);
+ intervals.push_back(interM);
+ res = intervals.size();
+ if (verbose) {
+ for (int i = 0; i < res; i++)
+ std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ << std::endl;
+ }
+ } else {
+ if (resolution_int == -1) { // Case we use a double for the length of the intervals.
+ double x = minf;
+ double y = x + resolution_double;
+ while (y <= maxf && maxf - (y - gain * resolution_double) >= resolution_double) {
+ std::pair<double, double> inter(x, y);
+ intervals.push_back(inter);
+ x = y - gain * resolution_double;
+ y = x + resolution_double;
+ }
+ std::pair<double, double> interM(x, maxf);
+ intervals.push_back(interM);
+ res = intervals.size();
+ if (verbose) {
+ for (int i = 0; i < res; i++)
+ std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ << std::endl;
+ }
+ } else { // Case we use an integer and a double for the length of the intervals.
+ double x = minf;
+ double y = x + resolution_double;
+ int count = 0;
+ while (count < resolution_int && y <= maxf && maxf - (y - gain * resolution_double) >= resolution_double) {
+ std::pair<double, double> inter(x, y);
+ intervals.push_back(inter);
+ count++;
+ x = y - gain * resolution_double;
+ y = x + resolution_double;
+ }
+ res = intervals.size();
+ if (verbose) {
+ for (int i = 0; i < res; i++)
+ std::cout << "Interval " << i << " = [" << intervals[i].first << ", " << intervals[i].second << "]"
+ << std::endl;
+ }
+ }
+ }
+
+ // Sort points according to function values
+ std::vector<int> points(n);
+ for (int i = 0; i < n; i++) points[i] = i;
+ std::sort(points.begin(), points.end(), Less(this->func));
+ int id = 0;
+ int pos = 0;
+
+ for (int i = 0; i < res; i++) {
+ // Find points in the preimage
+ std::map<int, std::vector<int> > prop;
+ std::pair<double, double> inter1 = intervals[i];
+ int tmp = pos;
+
+ if (i != res - 1) {
+ if (i != 0) {
+ std::pair<double, double> inter3 = intervals[i - 1];
+ while (func[points[tmp]] < inter3.second && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+ }
+
+ std::pair<double, double> inter2 = intervals[i + 1];
+ while (func[points[tmp]] < inter2.first && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+
+ pos = tmp;
+ while (func[points[tmp]] < inter1.second && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+
+ } else {
+ std::pair<double, double> inter3 = intervals[i - 1];
+ while (func[points[tmp]] < inter3.second && tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+
+ while (tmp != n) {
+ prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ tmp++;
+ }
+ }
+
+ // Compute the connected components with DFS
+ std::map<int, bool> visit;
+ if (verbose) std::cout << "Preimage of interval " << i << std::endl;
+ for (std::map<int, std::vector<int> >::iterator it = prop.begin(); it != prop.end(); it++)
+ visit[it->first] = false;
+ if (!(prop.empty())) {
+ for (std::map<int, std::vector<int> >::iterator it = prop.begin(); it != prop.end(); it++) {
+ if (!(visit[it->first])) {
+ std::vector<int> cc;
+ cc.clear();
+ dfs(prop, it->first, cc, visit);
+ int cci = cc.size();
+ if (verbose) std::cout << "one CC with " << cci << " points, ";
+ double average_col = 0;
+ for (int j = 0; j < cci; j++) {
+ cover[cc[j]].push_back(id);
+ cover_back[id].push_back(cc[j]);
+ average_col += func_color[cc[j]] / cci;
+ }
+ cover_fct[id] = i;
+ cover_color[id] = std::pair<int, double>(cci, average_col);
+ id++;
+ }
+ }
+ }
+ if (verbose) std::cout << std::endl;
+ }
+
+ maximal_dim = id - 1;
+ }
+
+ public: // Set cover from file.
+ /** \brief Creates the cover C from a file containing the cover elements of each point (the order has to be the same
+ * as in the input file!).
+ *
+ * @param[in] cover_file_name name of the input cover file.
+ *
+ */
+ void set_cover_from_file(const std::string& cover_file_name) {
+ int vertex_id = 0;
+ Cover_t cov;
+ std::vector<Cover_t> cov_elts, cov_number;
+ std::ifstream input(cover_file_name);
+ std::string line;
+ while (std::getline(input, line)) {
+ cov_elts.clear();
+ std::stringstream stream(line);
+ while (stream >> cov) {
+ cov_elts.push_back(cov);
+ cov_number.push_back(cov);
+ cover_fct[cov] = cov;
+ cover_color[cov].second += func_color[vertex_id];
+ cover_color[cov].first++;
+ cover_back[cov].push_back(vertex_id);
+ }
+ cover[vertex_id] = cov_elts;
+ vertex_id++;
+ }
+ std::vector<Cover_t>::iterator it;
+ std::sort(cov_number.begin(), cov_number.end());
+ it = std::unique(cov_number.begin(), cov_number.end());
+ cov_number.resize(std::distance(cov_number.begin(), it));
+ maximal_dim = cov_number.size() - 1;
+ for (int i = 0; i <= maximal_dim; i++) cover_color[i].second /= cover_color[i].first;
+ cover_name = cover_file_name;
+ }
+
+ public: // Set cover from Voronoi
+ /** \brief Creates the cover C from the Voronoï cells of a subsampling of the point cloud.
+ *
+ * @param[in] distance distance between the points.
+ * @param[in] m number of points in the subsample.
+ *
+ */
+ template <typename Distance>
+ void set_cover_from_Voronoi(Distance distance, int m = 100) {
+ voronoi_subsamples.resize(m);
+ SampleWithoutReplacement(n, m, voronoi_subsamples);
+ if (distances.size() == 0) compute_pairwise_distances(distance);
+ std::vector<double> mindist(n);
+ for (int j = 0; j < n; j++) mindist[j] = std::numeric_limits<double>::max();
+
+ // Compute the geodesic distances to subsamples with Dijkstra
+ for (int i = 0; i < m; i++) {
+ if (verbose) std::cout << "Computing geodesic distances to seed " << i << "..." << std::endl;
+ int seed = voronoi_subsamples[i];
+ std::vector<double> dist(n);
+ std::vector<int> process(n);
+ for (int j = 0; j < n; j++) {
+ dist[j] = std::numeric_limits<double>::max();
+ process[j] = j;
+ }
+ dist[seed] = 0;
+ int curr_size = process.size();
+ int min_point, min_index;
+ double min_dist;
+ std::vector<int> neighbors;
+ int num_neighbors;
+
+ while (curr_size > 0) {
+ min_dist = std::numeric_limits<double>::max();
+ min_index = -1;
+ min_point = -1;
+ for (int j = 0; j < curr_size; j++) {
+ if (dist[process[j]] < min_dist) {
+ min_point = process[j];
+ min_dist = dist[process[j]];
+ min_index = j;
+ }
+ }
+ assert(min_index != -1);
+ process.erase(process.begin() + min_index);
+ assert(min_point != -1);
+ neighbors = adjacency_matrix[min_point];
+ num_neighbors = neighbors.size();
+ for (int j = 0; j < num_neighbors; j++) {
+ double d = dist[min_point] + distances[min_point][neighbors[j]];
+ dist[neighbors[j]] = std::min(dist[neighbors[j]], d);
+ }
+ curr_size = process.size();
+ }
+
+ for (int j = 0; j < n; j++)
+ if (mindist[j] > dist[j]) {
+ mindist[j] = dist[j];
+ if (cover[j].size() == 0)
+ cover[j].push_back(i);
+ else
+ cover[j][0] = i;
+ }
+ }
+
+ for (int i = 0; i < n; i++) {
+ cover_back[cover[i][0]].push_back(i);
+ cover_color[cover[i][0]].second += func_color[i];
+ cover_color[cover[i][0]].first++;
+ }
+ for (int i = 0; i < m; i++) cover_color[i].second /= cover_color[i].first;
+ maximal_dim = m - 1;
+ cover_name = "Voronoi";
+ }
+
+ public: // return subset of data corresponding to a node
+ /** \brief Returns the data subset corresponding to a specific node of the created complex.
+ *
+ * @param[in] c ID of the node.
+ * @result cover_back(c) vector of IDs of data points.
+ *
+ */
+ const std::vector<int>& subpopulation(Cover_t c) { return cover_back[c]; }
+
+ // *******************************************************************************************************************
+ // Visualization.
+ // *******************************************************************************************************************
+
+ public: // Set color from file.
+ /** \brief Computes the function used to color the nodes of the simplicial complex from a file containing the function
+ * values.
+ *
+ * @param[in] color_file_name name of the input color file.
+ *
+ */
+ void set_color_from_file(const std::string& color_file_name) {
+ int vertex_id = 0;
+ std::ifstream input(color_file_name);
+ std::string line;
+ double f;
+ while (std::getline(input, line)) {
+ std::stringstream stream(line);
+ stream >> f;
+ func_color.emplace(vertex_id, f);
+ vertex_id++;
+ }
+ color_name = color_file_name;
+ }
+
+ public: // Set color from kth coordinate
+ /** \brief Computes the function used to color the nodes of the simplicial complex from the k-th coordinate.
+ *
+ * @param[in] k coordinate to use (start at 0).
+ *
+ */
+ void set_color_from_coordinate(int k = 0) {
+ for (int i = 0; i < n; i++) func_color.emplace(i, point_cloud[i][k]);
+ color_name = "coordinate ";
+ color_name.append(std::to_string(k));
+ }
+
+ public: // Set color from vector.
+ /** \brief Computes the function used to color the nodes of the simplicial complex from a vector stored in memory.
+ *
+ * @param[in] color input vector of values.
+ *
+ */
+ void set_color_from_vector(std::vector<double> color) {
+ for (unsigned int i = 0; i < color.size(); i++) func_color.emplace(i, color[i]);
+ }
+
+ public: // Create a .dot file that can be compiled with neato to produce a .pdf file.
+ /** \brief Creates a .dot file called SC.dot for neato (part of the graphviz package) once the simplicial complex is
+ * computed to get a visualization
+ * of its 1-skeleton in a .pdf file.
+ */
+ void plot_DOT() {
+ char mapp[11] = "SC.dot";
+ std::ofstream graphic(mapp);
+ graphic << "graph GIC {" << std::endl;
+ double maxv, minv;
+ maxv = std::numeric_limits<double>::min();
+ minv = std::numeric_limits<double>::max();
+ for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
+ iit++) {
+ maxv = std::max(maxv, iit->second.second);
+ minv = std::min(minv, iit->second.second);
+ }
+ int k = 0;
+ std::vector<int> nodes;
+ nodes.clear();
+ for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
+ iit++) {
+ if (iit->second.first > mask) {
+ nodes.push_back(iit->first);
+ graphic << iit->first << "[shape=circle fontcolor=black color=black label=\"" << iit->first << ":"
+ << iit->second.first << "\" style=filled fillcolor=\""
+ << (1 - (maxv - iit->second.second) / (maxv - minv)) * 0.6 << ", 1, 1\"]" << std::endl;
+ k++;
+ }
+ }
+ int ke = 0;
+ int num_simplices = simplices.size();
+ for (int i = 0; i < num_simplices; i++)
+ if (simplices[i].size() == 2) {
+ if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) {
+ graphic << " " << simplices[i][0] << " -- " << simplices[i][1] << " [weight=15];" << std::endl;
+ ke++;
+ }
+ }
+ graphic << "}";
+ graphic.close();
+ std::cout << "SC.dot generated. It can be visualized with e.g. neato." << std::endl;
+ }
+
+ public: // Create a .txt file that can be compiled with KeplerMapper.
+ /** \brief Creates a .txt file called SC.txt describing the 1-skeleton, which can then be plotted with e.g.
+ * KeplerMapper.
+ */
+ void write_info() {
+ int num_simplices = simplices.size();
+ int num_edges = 0;
+ char mapp[11] = "SC.txt";
+ std::ofstream graphic(mapp);
+ for (int i = 0; i < num_simplices; i++)
+ if (simplices[i].size() == 2)
+ if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) num_edges++;
+
+ graphic << point_cloud_name << std::endl;
+ graphic << cover_name << std::endl;
+ graphic << color_name << std::endl;
+ graphic << resolution_double << " " << gain << std::endl;
+ graphic << cover_color.size() << " " << num_edges << std::endl;
+
+ for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
+ iit++)
+ graphic << iit->first << " " << iit->second.second << " " << iit->second.first << std::endl;
+
+ for (int i = 0; i < num_simplices; i++)
+ if (simplices[i].size() == 2)
+ if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask)
+ graphic << simplices[i][0] << " " << simplices[i][1] << std::endl;
+ graphic.close();
+ std::cout << "SC.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox."
+ << std::endl;
+ }
+
+ public: // Create a .off file that can be visualized (e.g. with Geomview).
+ /** \brief Creates a .off file called SC.off for 3D visualization, which contains the 2-skeleton of the GIC.
+ * This function assumes that the cover has been computed with Voronoi. If data points are in 1D or 2D,
+ * the remaining coordinates of the points embedded in 3D are set to 0.
+ */
+ void plot_OFF() {
+ assert(cover_name == "Voronoi");
+ char gic[11] = "SC.off";
+ std::ofstream graphic(gic);
+ graphic << "OFF" << std::endl;
+ int m = voronoi_subsamples.size();
+ int numedges = 0;
+ int numfaces = 0;
+ std::vector<std::vector<int> > edges, faces;
+ int numsimplices = simplices.size();
+ for (int i = 0; i < numsimplices; i++) {
+ if (simplices[i].size() == 2) {
+ numedges++;
+ edges.push_back(simplices[i]);
+ }
+ if (simplices[i].size() == 3) {
+ numfaces++;
+ faces.push_back(simplices[i]);
+ }
+ }
+ graphic << m << " " << numedges + numfaces << std::endl;
+ for (int i = 0; i < m; i++) {
+ if (data_dimension <= 3) {
+ for (int j = 0; j < data_dimension; j++) graphic << point_cloud[voronoi_subsamples[i]][j] << " ";
+ for (int j = data_dimension; j < 3; j++) graphic << 0 << " ";
+ graphic << std::endl;
+ } else {
+ for (int j = 0; j < 3; j++) graphic << point_cloud[voronoi_subsamples[i]][j] << " ";
+ }
+ }
+ for (int i = 0; i < numedges; i++) graphic << 2 << " " << edges[i][0] << " " << edges[i][1] << std::endl;
+ for (int i = 0; i < numfaces; i++)
+ graphic << 3 << " " << faces[i][0] << " " << faces[i][1] << " " << faces[i][2] << std::endl;
+ graphic.close();
+ std::cout << "SC.off generated. It can be visualized with e.g. geomview." << std::endl;
+ }
+
+ // *******************************************************************************************************************
+ // *******************************************************************************************************************
+
+ public:
+ /** \brief Creates the simplicial complex.
+ *
+ * @param[in] complex SimplicialComplexForRips to be created.
+ *
+ */
+ template <typename SimplicialComplexForRips>
+ void create_complex(SimplicialComplexForRips& complex) {
+ unsigned int dimension = 0;
+ for (auto const& simplex : simplices) {
+ complex.insert_simplex_and_subfaces(simplex);
+ if (dimension < simplex.size() - 1) dimension = simplex.size() - 1;
+ }
+ complex.set_dimension(dimension);
+ }
+
+ public:
+ /** \brief Computes the simplices of the simplicial complex.
+ */
+ void find_simplices() {
+ if (type != "Nerve" && type != "GIC") {
+ std::cout << "Type of complex needs to be specified." << std::endl;
+ return;
+ }
+
+ if (type == "Nerve") {
+ for (std::map<int, std::vector<Cover_t> >::iterator it = cover.begin(); it != cover.end(); it++)
+ simplices.push_back(it->second);
+ std::vector<std::vector<Cover_t> >::iterator it;
+ std::sort(simplices.begin(), simplices.end());
+ it = std::unique(simplices.begin(), simplices.end());
+ simplices.resize(std::distance(simplices.begin(), it));
+ }
+
+ if (type == "GIC") {
+ if (functional_cover) {
+ // Computes the simplices in the GIC by looking at all the edges of the graph and adding the
+ // corresponding edges in the GIC if the images of the endpoints belong to consecutive intervals.
+
+ if (gain >= 0.5)
+ throw std::invalid_argument(
+ "the output of this function is correct ONLY if the cover is minimal, i.e. the gain is less than 0.5.");
+
+ int v1, v2;
+
+ // Loop on all points.
+ for (std::map<int, std::vector<Cover_t> >::iterator it = cover.begin(); it != cover.end(); it++) {
+ int vid = it->first;
+ std::vector<int> neighbors = adjacency_matrix[vid];
+ int num_neighb = neighbors.size();
+
+ // Find cover of current point (vid).
+ if (cover[vid].size() == 2)
+ v1 = std::min(cover[vid][0], cover[vid][1]);
+ else
+ v1 = cover[vid][0];
+ std::vector<int> node(1);
+ node[0] = v1;
+ simplices.push_back(node);
+
+ // Loop on neighbors.
+ for (int i = 0; i < num_neighb; i++) {
+ int neighb = neighbors[i];
+
+ // Find cover of neighbor (neighb).
+ if (cover[neighb].size() == 2)
+ v2 = std::max(cover[neighb][0], cover[neighb][1]);
+ else
+ v2 = cover[neighb][0];
+
+ // If neighbor is in next interval, add edge.
+ if (cover_fct[v2] == cover_fct[v1] + 1) {
+ std::vector<int> edge(2);
+ edge[0] = v1;
+ edge[1] = v2;
+ simplices.push_back(edge);
+ }
+ }
+ }
+ std::vector<std::vector<Cover_t> >::iterator it;
+ std::sort(simplices.begin(), simplices.end());
+ it = std::unique(simplices.begin(), simplices.end());
+ simplices.resize(std::distance(simplices.begin(), it));
+
+ } else {
+ // Find IDs of edges to remove
+ std::vector<int> simplex_to_remove;
+ int simplex_id = 0;
+ for (auto simplex : st.complex_simplex_range()) {
+ if (st.dimension(simplex) == 1) {
+ std::vector<std::vector<Cover_t> > comp;
+ for (auto vertex : st.simplex_vertex_range(simplex)) comp.push_back(cover[vertex]);
+ if (comp[0].size() == 1 && comp[0] == comp[1]) simplex_to_remove.push_back(simplex_id);
+ }
+ simplex_id++;
+ }
+
+ // Remove edges
+ if (simplex_to_remove.size() > 1) {
+ int current_id = 1;
+ auto simplex = st.complex_simplex_range().begin();
+ int num_rem = 0;
+ for (int i = 0; i < simplex_id - 1; i++) {
+ int j = i + 1;
+ auto simplex_tmp = simplex;
+ simplex_tmp++;
+ if (j == simplex_to_remove[current_id]) {
+ st.remove_maximal_simplex(*simplex_tmp);
+ current_id++;
+ num_rem++;
+ } else {
+ simplex++;
+ }
+ }
+ simplex = st.complex_simplex_range().begin();
+ for (int i = 0; i < simplex_to_remove[0]; i++) simplex++;
+ st.remove_maximal_simplex(*simplex);
+ }
+
+ // Build the Simplex Tree corresponding to the graph
+ st.expansion(maximal_dim);
+
+ // Find simplices of GIC
+ simplices.clear();
+ for (auto simplex : st.complex_simplex_range()) {
+ if (!st.has_children(simplex)) {
+ std::vector<Cover_t> simplx;
+ for (auto vertex : st.simplex_vertex_range(simplex)) {
+ unsigned int sz = cover[vertex].size();
+ for (unsigned int i = 0; i < sz; i++) {
+ simplx.push_back(cover[vertex][i]);
+ }
+ }
+
+ std::sort(simplx.begin(), simplx.end());
+ std::vector<Cover_t>::iterator it = std::unique(simplx.begin(), simplx.end());
+ simplx.resize(std::distance(simplx.begin(), it));
+ simplices.push_back(simplx);
+ }
+ }
+ std::vector<std::vector<Cover_t> >::iterator it;
+ std::sort(simplices.begin(), simplices.end());
+ it = std::unique(simplices.begin(), simplices.end());
+ simplices.resize(std::distance(simplices.begin(), it));
+ }
+ }
+ }
+};
+
+} // namespace cover_complex
+
+} // namespace Gudhi
+
+#endif // GIC_H_
diff --git a/src/Nerve_GIC/test/CMakeLists.txt b/src/Nerve_GIC/test/CMakeLists.txt
new file mode 100644
index 00000000..03fe47ca
--- /dev/null
+++ b/src/Nerve_GIC/test/CMakeLists.txt
@@ -0,0 +1,14 @@
+cmake_minimum_required(VERSION 2.6)
+project(Graph_induced_complex_tests)
+
+include(GUDHI_test_coverage)
+
+add_executable ( Nerve_GIC_test_unit test_GIC.cpp )
+target_link_libraries(Nerve_GIC_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+if (TBB_FOUND)
+ target_link_libraries(Nerve_GIC_test_unit ${TBB_LIBRARIES})
+endif()
+
+file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+gudhi_add_coverage_test(Nerve_GIC_test_unit)
diff --git a/src/Nerve_GIC/test/data/cloud b/src/Nerve_GIC/test/data/cloud
new file mode 100644
index 00000000..4a0c170d
--- /dev/null
+++ b/src/Nerve_GIC/test/data/cloud
@@ -0,0 +1,6 @@
+nOFF
+3
+3 0 0
+0 0 0
+2 1 0
+4 0 0 \ No newline at end of file
diff --git a/src/Nerve_GIC/test/data/cover b/src/Nerve_GIC/test/data/cover
new file mode 100644
index 00000000..5f5fbe75
--- /dev/null
+++ b/src/Nerve_GIC/test/data/cover
@@ -0,0 +1,3 @@
+1
+2
+3 \ No newline at end of file
diff --git a/src/Nerve_GIC/test/data/graph b/src/Nerve_GIC/test/data/graph
new file mode 100644
index 00000000..37142800
--- /dev/null
+++ b/src/Nerve_GIC/test/data/graph
@@ -0,0 +1,3 @@
+0 1
+0 2
+1 2 \ No newline at end of file
diff --git a/src/Nerve_GIC/test/test_GIC.cpp b/src/Nerve_GIC/test/test_GIC.cpp
new file mode 100644
index 00000000..a8b1e7f7
--- /dev/null
+++ b/src/Nerve_GIC/test/test_GIC.cpp
@@ -0,0 +1,90 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Mathieu Carrière
+ *
+ * Copyright (C) 2017 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "graph_induced_complex"
+
+#include <boost/test/unit_test.hpp>
+#include <cmath> // float comparison
+#include <limits>
+#include <string>
+#include <vector>
+#include <algorithm> // std::max
+#include <gudhi/GIC.h>
+#include <gudhi/distance_functions.h>
+#include <gudhi/reader_utils.h>
+
+BOOST_AUTO_TEST_CASE(check_nerve) {
+ using Point = std::vector<float>;
+ Gudhi::cover_complex::Cover_complex<Point> N;
+ N.set_type("Nerve");
+ std::string cloud_file_name("data/cloud");
+ N.read_point_cloud(cloud_file_name);
+ std::string graph_file_name("data/graph");
+ N.set_graph_from_file(graph_file_name);
+ std::string cover_file_name("data/cover");
+ N.set_cover_from_file(cover_file_name);
+ N.find_simplices();
+ Gudhi::Simplex_tree<> stree;
+ N.create_complex(stree);
+
+ BOOST_CHECK(stree.num_vertices() == 3);
+ BOOST_CHECK((stree.num_simplices() - stree.num_vertices()) == 0);
+ BOOST_CHECK(stree.dimension() == 0);
+}
+
+BOOST_AUTO_TEST_CASE(check_GIC) {
+ using Point = std::vector<float>;
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_type("GIC");
+ std::string cloud_file_name("data/cloud");
+ GIC.read_point_cloud(cloud_file_name);
+ std::string graph_file_name("data/graph");
+ GIC.set_graph_from_file(graph_file_name);
+ std::string cover_file_name("data/cover");
+ GIC.set_cover_from_file(cover_file_name);
+ GIC.find_simplices();
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ BOOST_CHECK(stree.num_vertices() == 3);
+ BOOST_CHECK((stree.num_simplices() - stree.num_vertices()) == 4);
+ BOOST_CHECK(stree.dimension() == 2);
+}
+
+BOOST_AUTO_TEST_CASE(check_voronoiGIC) {
+ using Point = std::vector<float>;
+ Gudhi::cover_complex::Cover_complex<Point> GIC;
+ GIC.set_type("GIC");
+ std::string cloud_file_name("data/cloud");
+ GIC.read_point_cloud(cloud_file_name);
+ std::string graph_file_name("data/graph");
+ GIC.set_graph_from_file(graph_file_name);
+ GIC.set_cover_from_Voronoi(Gudhi::Euclidean_distance(), 2);
+ GIC.find_simplices();
+ Gudhi::Simplex_tree<> stree;
+ GIC.create_complex(stree);
+
+ BOOST_CHECK(stree.num_vertices() == 2);
+ BOOST_CHECK((stree.num_simplices() - stree.num_vertices()) == 1);
+ BOOST_CHECK(stree.dimension() == 1);
+}
diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
index 6400116b..62bbbfc5 100644
--- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
+++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
@@ -143,8 +143,8 @@ namespace persistent_cohomology {
We provide several example files: run these examples with -h for details on their use, and read the README file.
-\li <a href="_persistent_cohomology_2rips_persistence_8cpp-example.html">
-Persistent_cohomology/rips_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its persistence
+\li <a href="_rips_complex_2rips_persistence_8cpp-example.html">
+Rips_complex/rips_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its persistence
diagram.
\code $> ./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3 \endcode
\code The complex contains 177838 simplices
@@ -158,12 +158,12 @@ diagram.
Persistent_cohomology/rips_multifield_persistence.cpp</a> computes the Rips complex of a point cloud and outputs its
persistence diagram with a family of field coefficients.
-\li <a href="_persistent_cohomology_2rips_distance_matrix_persistence_8cpp-example.html">
-Persistent_cohomology/rips_distance_matrix_persistence.cpp</a> computes the Rips complex of a distance matrix and
+\li <a href="_rips_complex_2rips_distance_matrix_persistence_8cpp-example.html">
+Rips_complex/rips_distance_matrix_persistence.cpp</a> computes the Rips complex of a distance matrix and
outputs its persistence diagram.
-\li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
\code $> ./alpha_complex_3d_persistence ../../data/points/tore3D_300.off 2 0.45 \endcode
\code Simplex_tree dim: 3
@@ -172,8 +172,8 @@ Persistent_cohomology/alpha_complex_3d_persistence.cpp</a> computes the persiste
2 1 0.0934117 1.00003
2 2 0.56444 1.03938 \endcode
-\li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
Here, as CGAL computes the exact values, it is slower, but it is necessary when points are on a grid
for instance.
@@ -182,8 +182,8 @@ for instance.
2 0 0 inf
2 2 0.0002 0.2028 \endcode
-\li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the weighted alpha complex on points sampling from an OFF file
and a weights file.
\code $> ./weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off
@@ -194,8 +194,8 @@ and a weights file.
2 1 -0.906588 2.60165e-05
2 2 -0.43556 0.0393798 \endcode
-\li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
-Persistent_cohomology/alpha_complex_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+Alpha_complex/alpha_complex_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/p\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file.
\code $> ./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off \endcode
\code Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
@@ -205,8 +205,8 @@ Simplex_tree dim: 3
2 1 0.0934117 1.00003
2 2 0.56444 1.03938 \endcode
-\li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
-Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
+\li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a> computes the persistent homology with
\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the periodic alpha complex on points sampling from an OFF file.
\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off
../../data/points/iso_cuboid_3_in_0_1.txt 3 1.0 \endcode
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index e82ef04c..18e2913b 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -5,12 +5,6 @@ add_executable(plain_homology plain_homology.cpp)
add_executable(persistence_from_simple_simplex_tree persistence_from_simple_simplex_tree.cpp)
-add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
-target_link_libraries(rips_distance_matrix_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
-add_executable(rips_persistence rips_persistence.cpp)
-target_link_libraries(rips_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
add_executable(rips_persistence_step_by_step rips_persistence_step_by_step.cpp)
target_link_libraries(rips_persistence_step_by_step ${Boost_PROGRAM_OPTIONS_LIBRARY})
@@ -23,8 +17,6 @@ target_link_libraries(persistence_from_file ${Boost_PROGRAM_OPTIONS_LIBRARY})
if (TBB_FOUND)
target_link_libraries(plain_homology ${TBB_LIBRARIES})
target_link_libraries(persistence_from_simple_simplex_tree ${TBB_LIBRARIES})
- target_link_libraries(rips_distance_matrix_persistence ${TBB_LIBRARIES})
- target_link_libraries(rips_persistence ${TBB_LIBRARIES})
target_link_libraries(rips_persistence_step_by_step ${TBB_LIBRARIES})
target_link_libraries(rips_persistence_via_boundary_matrix ${TBB_LIBRARIES})
target_link_libraries(persistence_from_file ${TBB_LIBRARIES})
@@ -33,10 +25,6 @@ endif()
add_test(NAME Persistent_cohomology_example_plain_homology COMMAND $<TARGET_FILE:plain_homology>)
add_test(NAME Persistent_cohomology_example_from_simple_simplex_tree COMMAND $<TARGET_FILE:persistence_from_simple_simplex_tree>
"1" "0")
-add_test(NAME Persistent_cohomology_example_from_rips_distance_matrix COMMAND $<TARGET_FILE:rips_distance_matrix_persistence>
- "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" "-r" "1.0" "-d" "3" "-p" "3" "-m" "0")
-add_test(NAME Persistent_cohomology_example_from_rips_on_tore_3D COMMAND $<TARGET_FILE:rips_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
add_test(NAME Persistent_cohomology_example_from_rips_step_by_step_on_tore_3D COMMAND $<TARGET_FILE:rips_persistence_step_by_step>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
add_test(NAME Persistent_cohomology_example_via_boundary_matrix COMMAND $<TARGET_FILE:rips_persistence_via_boundary_matrix>
@@ -48,8 +36,6 @@ add_test(NAME Persistent_cohomology_example_from_file_3_3_100 COMMAND $<TARGET_F
install(TARGETS plain_homology DESTINATION bin)
install(TARGETS persistence_from_simple_simplex_tree DESTINATION bin)
-install(TARGETS rips_distance_matrix_persistence DESTINATION bin)
-install(TARGETS rips_persistence DESTINATION bin)
install(TARGETS rips_persistence_step_by_step DESTINATION bin)
install(TARGETS rips_persistence_via_boundary_matrix DESTINATION bin)
install(TARGETS persistence_from_file DESTINATION bin)
@@ -69,75 +55,16 @@ if(GMP_FOUND)
endif(GMP_FOUND)
if(CGAL_FOUND)
- add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp)
- target_link_libraries(alpha_complex_3d_persistence ${CGAL_LIBRARY})
- add_executable(exact_alpha_complex_3d_persistence exact_alpha_complex_3d_persistence.cpp)
- target_link_libraries(exact_alpha_complex_3d_persistence ${CGAL_LIBRARY})
-
- if (TBB_FOUND)
- target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES})
- target_link_libraries(exact_alpha_complex_3d_persistence ${TBB_LIBRARIES})
- endif(TBB_FOUND)
- add_test(NAME Persistent_cohomology_example_alpha_complex_3d COMMAND $<TARGET_FILE:alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "2" "0.45")
- add_test(NAME Persistent_cohomology_example_exact_alpha_complex_3d COMMAND $<TARGET_FILE:exact_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "2" "0.45")
-
- install(TARGETS alpha_complex_3d_persistence DESTINATION bin)
- install(TARGETS exact_alpha_complex_3d_persistence DESTINATION bin)
-
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
- add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
- target_link_libraries(alpha_complex_persistence
- ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp)
- target_link_libraries(periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY})
-
add_executable(custom_persistence_sort custom_persistence_sort.cpp)
target_link_libraries(custom_persistence_sort ${CGAL_LIBRARY})
if (TBB_FOUND)
- target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES})
- target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES})
endif(TBB_FOUND)
- add_test(NAME Persistent_cohomology_example_alpha_complex COMMAND $<TARGET_FILE:alpha_complex_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "-p" "2" "-m" "0.45")
- add_test(NAME Persistent_cohomology_example_periodic_alpha_complex_3d COMMAND $<TARGET_FILE:periodic_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "3" "1.0")
add_test(NAME Persistent_cohomology_example_custom_persistence_sort COMMAND $<TARGET_FILE:custom_persistence_sort>)
- install(TARGETS alpha_complex_persistence DESTINATION bin)
- install(TARGETS periodic_alpha_complex_3d_persistence DESTINATION bin)
install(TARGETS custom_persistence_sort DESTINATION bin)
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.7.0)
-
- if (NOT CGAL_VERSION VERSION_LESS 4.11.0)
- add_executable(weighted_periodic_alpha_complex_3d_persistence weighted_periodic_alpha_complex_3d_persistence.cpp)
- target_link_libraries(weighted_periodic_alpha_complex_3d_persistence ${CGAL_LIBRARY})
- if (TBB_FOUND)
- target_link_libraries(weighted_periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES})
- endif(TBB_FOUND)
-
- add_test(NAME Persistent_cohomology_example_weigted_periodic_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_periodic_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off" "${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.weights"
- "${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt" "3" "1.0")
-
- install(TARGETS weighted_periodic_alpha_complex_3d_persistence DESTINATION bin)
-
- endif (NOT CGAL_VERSION VERSION_LESS 4.11.0)
-
- add_executable(weighted_alpha_complex_3d_persistence weighted_alpha_complex_3d_persistence.cpp)
- target_link_libraries(weighted_alpha_complex_3d_persistence ${CGAL_LIBRARY})
- if (TBB_FOUND)
- target_link_libraries(weighted_alpha_complex_3d_persistence ${TBB_LIBRARIES})
- endif(TBB_FOUND)
-
- install(TARGETS weighted_alpha_complex_3d_persistence DESTINATION bin)
-
- add_test(NAME Persistent_cohomology_example_weighted_alpha_complex_3d COMMAND $<TARGET_FILE:weighted_alpha_complex_3d_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off" "${CMAKE_SOURCE_DIR}/data/points/tore3D_300.weights" "2" "0.45")
-
endif(CGAL_FOUND)
diff --git a/src/Persistent_cohomology/example/README b/src/Persistent_cohomology/example/README
index 794b94ae..f39d9584 100644
--- a/src/Persistent_cohomology/example/README
+++ b/src/Persistent_cohomology/example/README
@@ -1,43 +1,14 @@
-To build the example, run in a Terminal:
+To build the examples, run in a Terminal:
-cd /path-to-example/
+cd /path-to-examples/
cmake .
make
***********************************************************************************************************************
Example of use of RIPS:
-Computation of the persistent homology with Z/2Z coefficients of the Rips complex on points
-sampling a Klein bottle:
-
-./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2
-
-output:
-2 0 0 inf
-2 1 0.0983494 inf
-2 1 0.104347 inf
-2 2 0.138335 inf
-
-
-Every line is of this format: p1*...*pr dim b d
-where
- p1*...*pr is the product of prime numbers pi such that the homology feature exists in homology with Z/piZ coefficients.
- dim is the dimension of the homological feature,
- b and d are respectively the birth and death of the feature and
-
-
-
-with Z/3Z coefficients:
-
-./rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3
-
-output:
-3 0 0 inf
-3 1 0.0983494 inf
-3 1 0.104347 inf
-3 2 0.138335 inf
-
-and the computation with Z/2Z and Z/3Z coefficients simultaneously:
+Computation of the persistent homology with Z/2Z and Z/3Z coefficients simultaneously of the Rips complex
+on points sampling a 3D torus:
./rips_multifield_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.12 -d 3 -p 2 -q 3
@@ -53,7 +24,13 @@ output:
6 0 0 0.12047
6 0 0 0.120414
-and finally the computation with all Z/pZ for 2 <= p <= 71 (20 first prime numbers):
+Every line is of this format: p1*...*pr dim b d
+where
+ p1*...*pr is the product of prime numbers pi such that the homology feature exists in homology with Z/piZ coefficients.
+ dim is the dimension of the homological feature,
+ b and d are respectively the birth and death of the feature and
+
+and the computation with all Z/pZ for 2 <= p <= 71 (20 first prime numbers):
./rips_multifield_persistence ../../data/points/Kl.off -r 0.25 -m 0.5 -d 3 -p 2 -q 71
@@ -70,82 +47,6 @@ output:
557940830126698960967415390 0 0 0.120414
***********************************************************************************************************************
-Example of use of ALPHA:
-
-For a more verbose mode, please run cmake with option "DEBUG_TRACES=TRUE" and recompile the programs.
-
-1) 3D special case
-------------------
-Computation of the persistent homology with Z/2Z coefficients of the alpha complex on points
-sampling a torus 3D:
-
-./alpha_complex_3d_persistence ../../data/points/tore3D_300.off 2 0.45
-
-output:
-Simplex_tree dim: 3
-2 0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938
-
-Here we retrieve expected Betti numbers on a tore 3D:
-Betti numbers[0] = 1
-Betti numbers[1] = 2
-Betti numbers[2] = 1
-
-N.B.: - alpha_complex_3d_persistence accepts only OFF files in 3D dimension.
- - filtration values are alpha square values
-
-2) d-Dimension case
--------------------
-Computation of the persistent homology with Z/2Z coefficients of the alpha complex on points
-sampling a torus 3D:
-
-./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off
-
-output:
-Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
-Simplex_tree dim: 3
-2 0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938
-
-Here we retrieve expected Betti numbers on a tore 3D:
-Betti numbers[0] = 1
-Betti numbers[1] = 2
-Betti numbers[2] = 1
-
-N.B.: - alpha_complex_persistence accepts OFF files in d-Dimension.
- - filtration values are alpha square values
-
-3) 3D periodic special case
----------------------------
-./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt 3 1.0
-
-output:
-Periodic Delaunay computed.
-Simplex_tree dim: 3
-3 0 0 inf
-3 1 0.0025 inf
-3 1 0.0025 inf
-3 1 0.0025 inf
-3 2 0.005 inf
-3 2 0.005 inf
-3 2 0.005 inf
-3 3 0.0075 inf
-
-Here we retrieve expected Betti numbers on a tore 3D:
-Betti numbers[0] = 1
-Betti numbers[1] = 3
-Betti numbers[2] = 3
-Betti numbers[3] = 1
-
-N.B.: - periodic_alpha_complex_3d_persistence accepts only OFF files in 3D dimension. In this example, the periodic cube
-is hard coded to { x = [0,1]; y = [0,1]; z = [0,1] }
- - filtration values are alpha square values
-
-***********************************************************************************************************************
Example of use of PLAIN HOMOLOGY:
This example computes the plain homology of the following simplicial complex without filtration values:
diff --git a/src/Rips_complex/utilities/CMakeLists.txt b/src/Rips_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..baa571fa
--- /dev/null
+++ b/src/Rips_complex/utilities/CMakeLists.txt
@@ -0,0 +1,21 @@
+cmake_minimum_required(VERSION 2.6)
+project(Rips_complex_utilities)
+
+add_executable(rips_distance_matrix_persistence rips_distance_matrix_persistence.cpp)
+target_link_libraries(rips_distance_matrix_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+add_executable(rips_persistence rips_persistence.cpp)
+target_link_libraries(rips_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+if (TBB_FOUND)
+ target_link_libraries(rips_distance_matrix_persistence ${TBB_LIBRARIES})
+ target_link_libraries(rips_persistence ${TBB_LIBRARIES})
+endif()
+
+add_test(NAME Rips_complex_utility_from_rips_distance_matrix COMMAND $<TARGET_FILE:rips_distance_matrix_persistence>
+ "${CMAKE_SOURCE_DIR}/data/distance_matrix/full_square_distance_matrix.csv" "-r" "1.0" "-d" "3" "-p" "3" "-m" "0")
+add_test(NAME Rips_complex_utility_from_rips_on_tore_3D COMMAND $<TARGET_FILE:rips_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-r" "0.25" "-m" "0.5" "-d" "3" "-p" "3")
+
+install(TARGETS rips_distance_matrix_persistence DESTINATION bin)
+install(TARGETS rips_persistence DESTINATION bin)
diff --git a/src/Rips_complex/utilities/README b/src/Rips_complex/utilities/README
new file mode 100644
index 00000000..ca10424d
--- /dev/null
+++ b/src/Rips_complex/utilities/README
@@ -0,0 +1,67 @@
+# Rips_complex #
+
+## `rips_persistence` ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim b d`
+
+where `dim` is the dimension of the homological feature, `b` and `d` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+`rips_persistence [options] <OFF input file>`
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the Rips complex construction.
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Rips complex we want to compute.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example 1 with Z/2Z coefficients**
+`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2`
+
+outputs:
+```
+2 0 0 inf
+2 1 0.0983494 inf
+2 1 0.104347 inf
+2 2 0.138335 inf
+```
+
+**Example 2 with Z/3Z coefficients**
+
+rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3
+
+outputs:
+```
+3 0 0 inf
+3 1 0.0983494 inf
+3 1 0.104347 inf
+3 2 0.138335 inf
+```
+
+
+
+
+## `rips_distance_matrix_persistence` ##
+Same as `rips_persistence` but taking a distance matrix as input.
+
+**Example**
+`rips_distance_matrix_persistence data/distance_matrix/full_square_distance_matrix.csv -r 15 -d 3 -p 3 -m 0`
+
+outputs:
+```
+The complex contains 46 simplices
+ and has dimension 3
+3 0 0 inf
+3 0 0 8.94427
+3 0 0 7.28011
+3 0 0 6.08276
+3 0 0 5.83095
+3 0 0 5.38516
+3 0 0 5
+3 1 11 12.0416
+3 1 6.32456 6.7082
+```
diff --git a/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
index d38808c7..d38808c7 100644
--- a/src/Persistent_cohomology/example/rips_distance_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
diff --git a/src/Persistent_cohomology/example/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp
index d504798b..d504798b 100644
--- a/src/Persistent_cohomology/example/rips_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_persistence.cpp
diff --git a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
index ff2eebcb..d8289ba9 100644
--- a/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
+++ b/src/Simplex_tree/example/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
@@ -28,6 +28,8 @@
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Delaunay_triangulation_3.h>
#include <CGAL/Alpha_shape_3.h>
+#include <CGAL/Alpha_shape_vertex_base_3.h>
+#include <CGAL/Alpha_shape_cell_base_3.h>
#include <CGAL/iterator.h>
#include <fstream>
diff --git a/src/Witness_complex/doc/Witness_complex_doc.h b/src/Witness_complex/doc/Witness_complex_doc.h
index 171a185f..5d5c0735 100644
--- a/src/Witness_complex/doc/Witness_complex_doc.h
+++ b/src/Witness_complex/doc/Witness_complex_doc.h
@@ -90,8 +90,9 @@ int main(int argc, char * const argv[]) {
Gudhi::Points_off_reader<Point_d> off_reader(file_name);
point_vector = Point_vector(off_reader.get_point_cloud());
- // Choose landmarks
- Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (one can choose either of the two methods below)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Witness_complex witness_complex(landmarks,
@@ -107,7 +108,14 @@ int main(int argc, char * const argv[]) {
Here is an example of constructing a strong witness complex filtration and computing persistence on it:
- \include Witness_complex/example_strong_witness_persistence.cpp
+ \include Witness_complex/strong_witness_persistence.cpp
+
+ \section witnessexample3 Example3: Computing relaxed witness complex persistence from a distance matrix
+
+ In this example we compute the relaxed witness complex persistence from a given matrix of closest landmarks to each witness.
+ Each landmark is given as the couple (index, distance).
+
+ \include Witness_complex/example_nearest_landmark_table.cpp
\copyright GNU General Public License v3.
diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt
index cbc53902..a8231392 100644
--- a/src/Witness_complex/example/CMakeLists.txt
+++ b/src/Witness_complex/example/CMakeLists.txt
@@ -13,39 +13,23 @@ install(TARGETS Witness_complex_example_nearest_landmark_table DESTINATION bin)
# CGAL and Eigen3 are required for Euclidean version of Witness
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
add_executable( Witness_complex_example_off example_witness_complex_off.cpp )
- add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
add_executable ( Witness_complex_example_sphere example_witness_complex_sphere.cpp )
-
- add_executable ( Witness_complex_example_witness_persistence example_witness_complex_persistence.cpp )
- target_link_libraries(Witness_complex_example_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- add_executable ( Witness_complex_example_strong_witness_persistence example_strong_witness_persistence.cpp )
- target_link_libraries(Witness_complex_example_strong_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
-
- if (TBB_FOUND)
- target_link_libraries(Witness_complex_example_witness_persistence ${TBB_LIBRARIES})
- target_link_libraries(Witness_complex_example_strong_witness_persistence ${TBB_LIBRARIES})
- endif()
+
+ add_executable( Witness_complex_example_strong_off example_strong_witness_complex_off.cpp )
+ target_link_libraries(Witness_complex_example_strong_off)
add_test(NAME Witness_complex_example_off_test_torus
COMMAND $<TARGET_FILE:Witness_complex_example_off>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "20" "1.0" "3")
+ add_test(NAME Witness_complex_example_test_sphere_10
+ COMMAND $<TARGET_FILE:Witness_complex_example_sphere> "10")
add_test(NAME Witness_complex_example_strong_off_test_torus
COMMAND $<TARGET_FILE:Witness_complex_example_strong_off>
"${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "20" "1.0" "3")
- add_test(NAME Witness_complex_example_test_sphere_10
- COMMAND $<TARGET_FILE:Witness_complex_example_sphere> "10")
- add_test(NAME Witness_complex_example_test_torus_persistence
- COMMAND $<TARGET_FILE:Witness_complex_example_witness_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
- add_test(NAME Witness_complex_example_strong_test_torus_persistence
- COMMAND $<TARGET_FILE:Witness_complex_example_strong_witness_persistence>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
-
+
install(TARGETS Witness_complex_example_off DESTINATION bin)
- install(TARGETS Witness_complex_example_strong_off DESTINATION bin)
install(TARGETS Witness_complex_example_sphere DESTINATION bin)
- install(TARGETS Witness_complex_example_witness_persistence DESTINATION bin)
- install(TARGETS Witness_complex_example_strong_witness_persistence DESTINATION bin)
+ install(TARGETS Witness_complex_example_strong_off DESTINATION bin)
+
endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
index 0ee9ee90..bc069654 100644
--- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
@@ -23,6 +23,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Euclidean_strong_witness_complex.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/Points_off_io.h>
#include <CGAL/Epick_d.h>
@@ -63,9 +64,10 @@ int main(int argc, char * const argv[]) {
std::cout << "Successfully read " << point_vector.size() << " points.\n";
std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
- // Choose landmarks
- Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
-
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
+
// Compute witness complex
start = clock();
Witness_complex witness_complex(landmarks,
diff --git a/src/Witness_complex/example/example_witness_complex_off.cpp b/src/Witness_complex/example/example_witness_complex_off.cpp
index b36dac0d..be11c955 100644
--- a/src/Witness_complex/example/example_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_witness_complex_off.cpp
@@ -4,6 +4,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Euclidean_witness_complex.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/Points_off_io.h>
#include <CGAL/Epick_d.h>
@@ -44,8 +45,9 @@ int main(int argc, char * const argv[]) {
std::cout << "Successfully read " << point_vector.size() << " points.\n";
std::cout << "Ambient dimension is " << point_vector[0].dimension() << ".\n";
- // Choose landmarks
- Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
start = clock();
diff --git a/src/Witness_complex/example/example_witness_complex_sphere.cpp b/src/Witness_complex/example/example_witness_complex_sphere.cpp
index 124fd99b..a66da3f9 100644
--- a/src/Witness_complex/example/example_witness_complex_sphere.cpp
+++ b/src/Witness_complex/example/example_witness_complex_sphere.cpp
@@ -25,6 +25,7 @@
#include <gudhi/Simplex_tree.h>
#include <gudhi/Euclidean_witness_complex.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <gudhi/reader_utils.h>
#include <CGAL/Epick_d.h>
@@ -75,7 +76,8 @@ int main(int argc, char * const argv[]) {
// Choose landmarks
start = clock();
- Gudhi::subsampling::pick_n_random_points(point_vector, number_of_landmarks, std::back_inserter(landmarks));
+ // Gudhi::subsampling::pick_n_random_points(point_vector, number_of_landmarks, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, number_of_landmarks, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Witness_complex witness_complex(landmarks,
diff --git a/src/Witness_complex/include/gudhi/Strong_witness_complex.h b/src/Witness_complex/include/gudhi/Strong_witness_complex.h
index c18335d3..b3d00b11 100644
--- a/src/Witness_complex/include/gudhi/Strong_witness_complex.h
+++ b/src/Witness_complex/include/gudhi/Strong_witness_complex.h
@@ -34,7 +34,8 @@ namespace Gudhi {
namespace witness_complex {
-/* \private
+ /**
+ * \private
* \class Strong_witness_complex
* \brief Constructs strong witness complex for a given table of nearest landmarks with respect to witnesses.
* \ingroup witness_complex
@@ -130,6 +131,8 @@ class Strong_witness_complex {
return true;
}
+ //@}
+
private:
/* \brief Adds recursively all the faces of a certain dimension dim-1 witnessed by the same witness.
* Iterator is needed to know until how far we can take landmarks to form simplexes.
@@ -170,7 +173,6 @@ class Strong_witness_complex {
simplex.pop_back();
}
}
- //@}
};
} // namespace witness_complex
diff --git a/src/Witness_complex/utilities/CMakeLists.txt b/src/Witness_complex/utilities/CMakeLists.txt
new file mode 100644
index 00000000..125a41ff
--- /dev/null
+++ b/src/Witness_complex/utilities/CMakeLists.txt
@@ -0,0 +1,28 @@
+cmake_minimum_required(VERSION 2.6)
+project(Witness_complex_utilities)
+
+# CGAL and Eigen3 are required for Euclidean version of Witness
+if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
+
+ add_executable ( Witness_complex_strong_witness_persistence strong_witness_persistence.cpp )
+ target_link_libraries(Witness_complex_strong_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ add_executable ( Witness_complex_weak_witness_persistence weak_witness_persistence.cpp )
+ target_link_libraries(Witness_complex_weak_witness_persistence ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ if (TBB_FOUND)
+ target_link_libraries(Witness_complex_strong_witness_persistence ${TBB_LIBRARIES})
+ target_link_libraries(Witness_complex_weak_witness_persistence ${TBB_LIBRARIES})
+ endif()
+
+ add_test(NAME Witness_complex_strong_test_torus_persistence
+ COMMAND $<TARGET_FILE:Witness_complex_strong_witness_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
+ add_test(NAME Witness_complex_weak_test_torus_persistence
+ COMMAND $<TARGET_FILE:Witness_complex_weak_witness_persistence>
+ "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "-l" "20" "-a" "0.5")
+
+ install(TARGETS Witness_complex_strong_witness_persistence DESTINATION bin)
+ install(TARGETS Witness_complex_weak_witness_persistence DESTINATION bin)
+
+endif (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.6.0)
diff --git a/src/Witness_complex/utilities/README b/src/Witness_complex/utilities/README
new file mode 100644
index 00000000..d8dc9ca7
--- /dev/null
+++ b/src/Witness_complex/utilities/README
@@ -0,0 +1,72 @@
+# Witness_complex #
+
+## `weak_witness_persistence` ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim b d`
+
+where `dim` is the dimension of the homological feature, `b` and `d` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+*Usage*
+`weak_witness_persistence [options] <OFF input file>`
+
+*Allowed options*
+
+* `-h [ --help ]` Produce help message
+* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::cout.
+* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+* `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute.
+
+*Example*
+`weak_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.006`
+
+outputs:
+```
+Successfully read 1307 points.
+Ambient dimension is 3.
+The complex contains 732 simplices and has dimension 8
+11 0 0 inf
+11 1 0 inf
+11 2 0.0275251 0.0534586
+11 1 0 0.0239952
+```
+
+N.B.: output is random as the 20 landmarks are chosen randomly.
+
+## `strong_witness_persistence` ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Strong witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim b d`
+
+where `dim` is the dimension of the homological feature, `b` and `d` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+*Usage*
+`strong_witness_persistence [options] <OFF input file>`
+
+*Allowed options*
+
+* `-h [ --help ]` Produce help message
+* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. By default, print in std::cout.
+* `-a [ --max-sq-alpha ]` (default = inf) Maximal squared relaxation parameter.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+* `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute.
+
+*Example*
+`strong_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.06`
+
+outputs:
+```
+Successfully read 1307 points.
+Ambient dimension is 3.
+The complex contains 1836 simplices and has dimension 8
+11 0 0 inf
+11 1 0.00674748 inf
+11 2 0.0937751 0.235354
+```
+
+N.B.: output is random as the 20 landmarks are chosen randomly.
diff --git a/src/Witness_complex/example/example_strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp
index f786fe7b..e3e0c1ee 100644
--- a/src/Witness_complex/example/example_strong_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp
@@ -25,6 +25,7 @@
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_off_io.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <boost/program_options.hpp>
@@ -76,8 +77,9 @@ int main(int argc, char * argv[]) {
std::cout << "Successfully read " << witnesses.size() << " points.\n";
std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
- // Choose landmarks from witnesses
- Gudhi::subsampling::pick_n_random_points(witnesses, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Strong_witness_complex strong_witness_complex(landmarks,
diff --git a/src/Witness_complex/example/example_witness_complex_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp
index a1146922..a63b0837 100644
--- a/src/Witness_complex/example/example_witness_complex_persistence.cpp
+++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp
@@ -25,6 +25,7 @@
#include <gudhi/Persistent_cohomology.h>
#include <gudhi/Points_off_io.h>
#include <gudhi/pick_n_random_points.h>
+#include <gudhi/choose_n_farthest_points.h>
#include <boost/program_options.hpp>
@@ -76,8 +77,9 @@ int main(int argc, char * argv[]) {
std::cout << "Successfully read " << witnesses.size() << " points.\n";
std::cout << "Ambient dimension is " << witnesses[0].dimension() << ".\n";
- // Choose landmarks from witnesses
- Gudhi::subsampling::pick_n_random_points(witnesses, nbL, std::back_inserter(landmarks));
+ // Choose landmarks (decomment one of the following two lines)
+ // Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
// Compute witness complex
Witness_complex witness_complex(landmarks,
diff --git a/src/common/doc/file_formats.h b/src/common/doc/file_formats.h
index d715aa4d..3b3d9248 100644
--- a/src/common/doc/file_formats.h
+++ b/src/common/doc/file_formats.h
@@ -53,6 +53,21 @@ namespace Gudhi {
Such files can be generated with `Gudhi::persistent_cohomology::Persistent_cohomology::output_diagram()` and read with
`Gudhi::read_persistence_intervals_and_dimension()`, `Gudhi::read_persistence_intervals_grouped_by_dimension()` or
`Gudhi::read_persistence_intervals_in_dimension()`.
+
+
+ \section FileFormatsIsoCuboid Iso-cuboid
+
+ Such a file describes an iso-oriented cuboid with diagonal opposite vertices (min_hx, min_hy, min_hz,...) and (max_hx, max_hy, max_hz, ...). The format is:<br>
+ \verbatim
+ min_hx min_hy [min_hz ...]
+ max_hx max_hy [max_hz ...]
+ \endverbatim
+
+ Here is a simple sample file in the 3D case:
+ \verbatim
+ -1. -1. -1.
+ 1. 1. 1.
+ \endverbatim
*/
} // namespace Gudhi
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index d6569f0c..34d3893d 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -93,6 +93,24 @@
</td>
</tr>
</table>
+ \subsection CoverComplexDataStructure Cover Complexes: Nerves and Graph Induced Complexes
+ \image html "gicvisu.jpg" "Graph Induced Complex of a point cloud."
+<table border="0">
+ <tr>
+ <td width="25%">
+ <b>Author:</b> Mathieu Carri&egrave;re<br>
+ <b>Introduced in:</b> GUDHI 2.0.1<br>
+ <b>Copyright:</b> GPL v3<br>
+ </td>
+ <td width="75%">
+ Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain
+ topological information about the input data. They can be computed with a cover of the
+ data, that comes i.e. from the preimage of a family of intervals covering the image
+ of a scalar-valued function defined on the data. <br>
+ <b>User manual:</b> \ref cover_complex - <b>Reference manual:</b> Gudhi::cover_complex::Cover_complex
+ </td>
+ </tr>
+</table>
\subsection SkeletonBlockerDataStructure Skeleton blocker
\image html "ds_representation.png" "Skeleton blocker representation"
<table border="0">
@@ -269,30 +287,38 @@ make doxygen
* Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to
* your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
*
- * The following examples require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
+ * The following examples/utilities require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
* Library</a> (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed:
- * \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
* Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
*
- * The following example requires CGAL version &ge; 4.6.0:
- * \li <a href="_witness_complex_2witness_complex_sphere_8cpp-example.html">
- * Witness_complex/witness_complex_sphere.cpp</a>
- *
+ * The following examples/utilities require CGAL version &ge; 4.6.0:
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_strong_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
+ * Witness_complex/example_witness_complex_sphere.cpp</a>
+ *
* The following example requires CGAL version &ge; 4.7.0:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
*
@@ -301,8 +327,8 @@ make doxygen
* Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
* \li <a href="_bottleneck_distance_2bottleneck_basic_example_8cpp-example.html">
* Bottleneck_distance/bottleneck_basic_example.cpp</a>
- * \li <a href="_bottleneck_distance_2bottleneck_read_file_example_8cpp-example.html">
- * Bottleneck_distance/bottleneck_read_file_example.cpp</a>
+ * \li <a href="_bottleneck_distance_2bottleneck_read_file_8cpp-example.html">
+ * Bottleneck_distance/bottleneck_read_file.cpp</a>
* \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
* Spatial_searching/example_spatial_searching.cpp</a>
* \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
@@ -323,18 +349,18 @@ make doxygen
* <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> is a C++ template library for linear algebra:
* matrices, vectors, numerical solvers, and related algorithms.
*
- * The following example requires the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
+ * The following examples/utilities require the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
* built if Eigen3 is not installed:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
* Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
* \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
@@ -351,6 +377,16 @@ make doxygen
* Tangential_complex/example_basic.cpp</a>
* \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
* Tangential_complex/example_with_perturb.cpp</a>
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_strong_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
+ * Witness_complex/example_witness_complex_sphere.cpp</a>
*
* \subsection tbb Threading Building Blocks
* <a target="_blank" href="https://www.threadingbuildingblocks.org/">Intel&reg; TBB</a> lets you easily write parallel
@@ -359,21 +395,27 @@ make doxygen
*
* Having Intel&reg; TBB installed is recommended to parallelize and accelerate some GUDHI computations.
*
- * The following examples are using Intel&reg; TBB if installed:
+ * The following examples/utilities are using Intel&reg; TBB if installed:
* \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_off.cpp</a>
* \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
* Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_8cpp-example.html">
* Bitmap_cubical_complex/Bitmap_cubical_complex.cpp</a>
* \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_periodic_boundary_conditions_8cpp-example.html">
* Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp</a>
* \li <a href="_bitmap_cubical_complex_2_random_bitmap_cubical_complex_8cpp-example.html">
* Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
* \li <a href="_simplex_tree_2simple_simplex_tree_8cpp-example.html">
* Simplex_tree/simple_simplex_tree.cpp</a>
* \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
@@ -394,27 +436,31 @@ make doxygen
* Persistent_cohomology/persistence_from_simple_simplex_tree.cpp</a>
* \li <a href="_persistent_cohomology_2plain_homology_8cpp-example.html">
* Persistent_cohomology/plain_homology.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_distance_matrix_persistence_8cpp-example.html">
- * Persistent_cohomology/rips_distance_matrix_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
* Persistent_cohomology/rips_multifield_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_persistence_8cpp-example.html">
- * Persistent_cohomology/rips_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2rips_persistence_step_by_step_8cpp-example.html">
* Persistent_cohomology/rips_persistence_step_by_step.cpp</a>
* \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
* Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp</a>
* \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
* Persistent_cohomology/custom_persistence_sort.cpp</a>
* \li <a href="_rips_complex_2example_one_skeleton_rips_from_points_8cpp-example.html">
* Rips_complex/example_one_skeleton_rips_from_points.cpp</a>
* \li <a href="_rips_complex_2example_rips_complex_from_off_file_8cpp-example.html">
* Rips_complex/example_rips_complex_from_off_file.cpp</a>
- *
+ * \li <a href="_rips_complex_2rips_distance_matrix_persistence_8cpp-example.html">
+ * Rips_complex/rips_distance_matrix_persistence.cpp</a>
+ * \li <a href="_rips_complex_2rips_persistence_8cpp-example.html">
+ * Rips_complex/rips_persistence.cpp</a>
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_nearest_landmark_table_8cpp-example.html">
+ * Witness_complex/example_nearest_landmark_table.cpp</a>
+ *
* \section Contributions Bug reports and contributions
* Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
* \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
@@ -438,9 +484,14 @@ make doxygen
/*! @file Examples
* @example Alpha_complex/Alpha_complex_from_off.cpp
* @example Alpha_complex/Alpha_complex_from_points.cpp
+ * @example Alpha_complex/alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/alpha_complex_persistence.cpp
+ * @example Alpha_complex/exact_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
* @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
* @example Bottleneck_distance/bottleneck_basic_example.cpp
- * @example Bottleneck_distance/bottleneck_read_file_example.cpp
+ * @example Bottleneck_distance/bottleneck_read_file.cpp
* @example Bitmap_cubical_complex/Bitmap_cubical_complex.cpp
* @example Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp
* @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp
@@ -448,22 +499,17 @@ make doxygen
* @example common/example_CGAL_points_off_reader.cpp
* @example Contraction/Garland_heckbert.cpp
* @example Contraction/Rips_contraction.cpp
- * @example Persistent_cohomology/alpha_complex_3d_persistence.cpp
- * @example Persistent_cohomology/alpha_complex_persistence.cpp
* @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
- * @example Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp
- * @example Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp
- * @example Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp
* @example Persistent_cohomology/persistence_from_file.cpp
* @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
* @example Persistent_cohomology/plain_homology.cpp
* @example Persistent_cohomology/rips_multifield_persistence.cpp
- * @example Persistent_cohomology/rips_distance_matrix_persistence.cpp
- * @example Persistent_cohomology/rips_persistence.cpp
* @example Persistent_cohomology/custom_persistence_sort.cpp
* @example Persistent_cohomology/rips_persistence_step_by_step.cpp
* @example Rips_complex/example_one_skeleton_rips_from_points.cpp
* @example Rips_complex/example_rips_complex_from_off_file.cpp
+ * @example Rips_complex/rips_persistence.cpp
+ * @example Rips_complex/rips_distance_matrix_persistence.cpp
* @example Simplex_tree/mini_simplex_tree.cpp
* @example Simplex_tree/simple_simplex_tree.cpp
* @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
@@ -481,9 +527,9 @@ make doxygen
* @example Tangential_complex/example_with_perturb.cpp
* @example Witness_complex/example_nearest_landmark_table.cpp
* @example Witness_complex/example_strong_witness_complex_off.cpp
- * @example Witness_complex/example_strong_witness_persistence.cpp
* @example Witness_complex/example_witness_complex_off.cpp
- * @example Witness_complex/example_witness_complex_persistence.cpp
* @example Witness_complex/example_witness_complex_sphere.cpp
+ * @example Witness_complex/weak_witness_persistence.cpp
+ * @example Witness_complex/strong_witness_persistence.cpp
*/
diff --git a/src/common/utilities/README b/src/common/utilities/README
index dc841521..18fa8cc4 100644
--- a/src/common/utilities/README
+++ b/src/common/utilities/README
@@ -1,19 +1,19 @@
-======================= off_file_from_shape_generator ==================================
+# Pointset generator #
-Example of use :
+## `off_file_from_shape_generator` ##
-*** on|in sphere|cube|curve|torus|klein generator
+Generates a pointset and save it in an OFF file. Command-line is:
+`off_file_from_shape_generator on|in sphere|cube|curve|torus|klein <filename> <num_points> <dimension> <parameter1> <parameter2>...`
-./off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2
+Warning: "on cube" generator is not available!
- => generates a onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2
+Examples:
-./off_file_from_shape_generator in sphere inSphere.off 100 2
+* Generate an onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2:
+`off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2`
+
+* Generate an inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default):
+`off_file_from_shape_generator in sphere inSphere.off 100 2`
- => generates a inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default)
-
-./off_file_from_shape_generator in cube inCube.off 10000 3 5.8
-
- => generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8
-
-!! Warning: hypegenerator on cube is not available !!
+* Generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8:
+`off_file_from_shape_generator in cube inCube.off 10000 3 5.8`
diff --git a/src/cython/cython/simplex_tree.pyx b/src/cython/cython/simplex_tree.pyx
index ff12dd48..8a436619 100644
--- a/src/cython/cython/simplex_tree.pyx
+++ b/src/cython/cython/simplex_tree.pyx
@@ -133,9 +133,14 @@ cdef class SimplexTree:
.. note::
- This function must be launched before persistence, betti_numbers,
- persistent_betti_numbers or get_filtration after inserting or
- removing simplices.
+ This function must be launched before
+ :func:`persistence()<gudhi.SimplexTree.persistence>`,
+ :func:`betti_numbers()<gudhi.SimplexTree.betti_numbers>`,
+ :func:`persistent_betti_numbers()<gudhi.SimplexTree.persistent_betti_numbers>`,
+ or :func:`get_filtration()<gudhi.SimplexTree.get_filtration>`
+ after :func:`inserting<gudhi.SimplexTree.insert>` or
+ :func:`removing<gudhi.SimplexTree.remove_maximal_simplex>`
+ simplices.
"""
self.thisptr.initialize_filtration()
@@ -167,7 +172,10 @@ cdef class SimplexTree:
This function is not constant time because it can recompute
dimension if required (can be triggered by
- remove_maximal_simplex or prune_above_filtration methods).
+ :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ or
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ methods).
"""
return self.thisptr.dimension()
@@ -190,8 +198,11 @@ cdef class SimplexTree:
This function must be used with caution because it disables
dimension recomputation when required
- (this recomputation can be triggered by remove_maximal_simplex
- or prune_above_filtration methods.
+ (this recomputation can be triggered by
+ :func:`remove_maximal_simplex()<gudhi.SimplexTree.remove_maximal_simplex>`
+ or
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ ).
"""
self.thisptr.set_dimension(<int>dimension)
@@ -319,14 +330,16 @@ cdef class SimplexTree:
.. note::
Be aware that removing is shifting data in a flat_map
- (initialize_filtration to be done).
+ (:func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>` to be done).
.. note::
The dimension of the simplicial complex may be lower after calling
remove_maximal_simplex than it was before. However,
- upper_bound_dimension method will return the old value, which
- remains a valid upper bound. If you care, you can call dimension
+ :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ method will return the old value, which
+ remains a valid upper bound. If you care, you can call
+ :func:`dimension()<gudhi.SimplexTree.dimension>`
to recompute the exact dimension.
"""
self.thisptr.remove_maximal_simplex(simplex)
@@ -344,17 +357,23 @@ cdef class SimplexTree:
Some simplex tree functions require the filtration to be valid.
prune_above_filtration function is not launching
- initialize_filtration but returns the filtration modification
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ but returns the filtration modification
information. If the complex has changed , please call
- initialize_filtration to recompute it.
+ :func:`initialize_filtration()<gudhi.SimplexTree.initialize_filtration>`
+ to recompute it.
.. note::
Note that the dimension of the simplicial complex may be lower
- after calling prune_above_filtration than it was before. However,
- upper_bound_dimension will return the old value, which remains a
- valid upper bound. If you care, you can call dimension method to
- recompute the exact dimension.
+ after calling
+ :func:`prune_above_filtration()<gudhi.SimplexTree.prune_above_filtration>`
+ than it was before. However,
+ :func:`upper_bound_dimension()<gudhi.SimplexTree.upper_bound_dimension>`
+ will return the old value, which remains a
+ valid upper bound. If you care, you can call
+ :func:`dimension()<gudhi.SimplexTree.dimension>`
+ method to recompute the exact dimension.
"""
return self.thisptr.prune_above_filtration(filtration)
@@ -405,8 +424,9 @@ cdef class SimplexTree:
:returns: The Betti numbers ([B0, B1, ..., Bn]).
:rtype: list of int
- :note: betti_numbers function requires persistence function to be
- launched first.
+ :note: betti_numbers function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
"""
cdef vector[int] bn_result
if self.pcohptr != NULL:
@@ -430,7 +450,8 @@ cdef class SimplexTree:
:returns: The persistent Betti numbers ([B0, B1, ..., Bn]).
:rtype: list of int
- :note: persistent_betti_numbers function requires persistence
+ :note: persistent_betti_numbers function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
function to be launched first.
"""
cdef vector[int] pbn_result
@@ -450,8 +471,9 @@ cdef class SimplexTree:
:returns: The persistence intervals.
:rtype: list of pair of float
- :note: intervals_in_dim function requires persistence function to be
- launched first.
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
"""
cdef vector[pair[double,double]] intervals_result
if self.pcohptr != NULL:
@@ -468,8 +490,9 @@ cdef class SimplexTree:
:param persistence_file: The specific dimension.
:type persistence_file: string.
- :note: intervals_in_dim function requires persistence function to be
- launched first.
+ :note: intervals_in_dim function requires
+ :func:`persistence()<gudhi.SimplexTree.persistence>`
+ function to be launched first.
"""
if self.pcohptr != NULL:
if persistence_file != '':
diff --git a/src/cython/doc/python3-sphinx-build.py b/src/cython/doc/python3-sphinx-build.py
index 44b94169..84d158cf 100755
--- a/src/cython/doc/python3-sphinx-build.py
+++ b/src/cython/doc/python3-sphinx-build.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+#!/usr/bin/env python3
"""
Emulate sphinx-build for python3