summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorglisse <glisse@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2018-02-01 18:19:00 +0000
committerglisse <glisse@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2018-02-01 18:19:00 +0000
commit24386d494ba4b8bb19b4559f6d2b3e4ecc980571 (patch)
treef4c1034bd75d6f8a237a4ff7c5f30df6d6732803
parentd3ff96460cbcd7de4d1f2d03c61e4227dd4c4767 (diff)
parente15408b4af5cba8966aa8773f6ee6884942c1d95 (diff)
Merge from trunk.
hand-merge README that moved to *.md. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/sparserips-glisse@3201 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f1922a265c79937e507bbb963875d21fae88069e
-rw-r--r--CMakeGUDHIVersion.txt2
-rw-r--r--biblio/how_to_cite_gudhi.bib18
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h4
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_helper.h2
-rw-r--r--src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp13
-rw-r--r--src/Alpha_complex/utilities/alphacomplex.md (renamed from src/Alpha_complex/utilities/README)173
-rw-r--r--src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp10
-rw-r--r--src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp13
-rw-r--r--src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp18
-rw-r--r--src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp22
-rw-r--r--src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h1
-rw-r--r--src/Bitmap_cubical_complex/utilities/README18
-rw-r--r--src/Bitmap_cubical_complex/utilities/cubicalcomplex.md29
-rw-r--r--src/Bottleneck_distance/include/gudhi/Bottleneck.h4
-rw-r--r--src/Bottleneck_distance/include/gudhi/Neighbors_finder.h1
-rw-r--r--src/Bottleneck_distance/utilities/README10
-rw-r--r--src/Bottleneck_distance/utilities/bottleneckdistance.md18
-rw-r--r--src/Contraction/include/gudhi/Edge_contraction.h4
-rw-r--r--src/Doxyfile4
-rw-r--r--src/GudhUI/CMakeLists.txt66
-rw-r--r--src/Nerve_GIC/doc/Intro_graph_induced_complex.h44
-rw-r--r--src/Nerve_GIC/doc/funcGICvisu.jpgbin71647 -> 68388 bytes
-rw-r--r--src/Nerve_GIC/doc/funcGICvisu.pdfbin0 -> 11347 bytes
-rw-r--r--src/Nerve_GIC/example/CMakeLists.txt37
-rw-r--r--src/Nerve_GIC/example/CoordGIC.cpp4
-rw-r--r--src/Nerve_GIC/example/FuncGIC.cpp4
-rw-r--r--src/Nerve_GIC/example/GIC.cpp95
-rwxr-xr-xsrc/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py72
-rw-r--r--src/Nerve_GIC/example/Nerve.txt43
-rw-r--r--src/Nerve_GIC/include/gudhi/GIC.h768
-rw-r--r--src/Nerve_GIC/test/CMakeLists.txt19
-rw-r--r--src/Nerve_GIC/test/test_GIC.cpp4
-rw-r--r--src/Nerve_GIC/utilities/CMakeLists.txt24
-rwxr-xr-xsrc/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py89
-rw-r--r--src/Nerve_GIC/utilities/Nerve.cpp (renamed from src/Nerve_GIC/example/Nerve.cpp)5
-rw-r--r--src/Nerve_GIC/utilities/Nerve.txt63
-rw-r--r--src/Nerve_GIC/utilities/VoronoiGIC.cpp (renamed from src/Nerve_GIC/example/VoronoiGIC.cpp)4
-rw-r--r--src/Nerve_GIC/utilities/covercomplex.md62
-rwxr-xr-xsrc/Nerve_GIC/utilities/km.py (renamed from src/Nerve_GIC/example/km.py)0
-rw-r--r--src/Nerve_GIC/utilities/km.py.COPYRIGHT (renamed from src/Nerve_GIC/example/km.py.COPYRIGHT)0
-rw-r--r--src/Persistence_representations/doc/Persistence_representations_doc.h3
-rw-r--r--src/Persistence_representations/example/CMakeLists.txt5
-rw-r--r--src/Persistence_representations/include/gudhi/read_persistence_from_file.h15
-rw-r--r--src/Persistence_representations/test/CMakeLists.txt14
-rw-r--r--src/Persistent_cohomology/doc/Intro_persistent_cohomology.h1
-rw-r--r--src/Rips_complex/doc/Intro_rips_complex.h2
-rw-r--r--src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp61
-rw-r--r--src/Rips_complex/utilities/rips_persistence.cpp60
-rw-r--r--src/Rips_complex/utilities/ripscomplex.md75
-rw-r--r--src/Simplex_tree/doc/Intro_simplex_tree.h1
-rw-r--r--src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp85
-rw-r--r--src/Simplex_tree/example/graph_expansion_with_blocker.cpp40
-rw-r--r--src/Simplex_tree/example/simple_simplex_tree.cpp84
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h3
-rw-r--r--src/Spatial_searching/doc/Intro_spatial_searching.h2
-rw-r--r--src/Spatial_searching/include/gudhi/Kd_tree_search.h3
-rw-r--r--src/Subsampling/doc/Intro_subsampling.h2
-rw-r--r--src/Tangential_complex/doc/Intro_tangential_complex.h2
-rw-r--r--src/Witness_complex/doc/Witness_complex_doc.h3
-rw-r--r--src/Witness_complex/example/example_strong_witness_complex_off.cpp22
-rw-r--r--src/Witness_complex/example/example_witness_complex_sphere.cpp24
-rw-r--r--src/Witness_complex/utilities/strong_witness_persistence.cpp69
-rw-r--r--src/Witness_complex/utilities/weak_witness_persistence.cpp69
-rw-r--r--src/Witness_complex/utilities/witnesscomplex.md (renamed from src/Witness_complex/utilities/README)62
-rw-r--r--src/common/doc/examples.h99
-rw-r--r--src/common/doc/footer.html10
-rw-r--r--src/common/doc/header.html4
-rw-r--r--src/common/doc/installation.h263
-rw-r--r--src/common/doc/main_page.h305
-rw-r--r--src/common/include/gudhi/Unitary_tests_utils.h1
-rw-r--r--src/common/utilities/README19
-rw-r--r--src/common/utilities/pointsetgenerator.md33
-rw-r--r--src/cython/doc/_templates/layout.html6
-rwxr-xr-xsrc/cython/doc/conf.py4
-rw-r--r--src/cython/setup.py.in4
75 files changed, 1697 insertions, 1525 deletions
diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt
index cb07e55a..5e71f7eb 100644
--- a/CMakeGUDHIVersion.txt
+++ b/CMakeGUDHIVersion.txt
@@ -1,6 +1,6 @@
set (GUDHI_MAJOR_VERSION 2)
set (GUDHI_MINOR_VERSION 1)
-set (GUDHI_PATCH_VERSION 0.rc1)
+set (GUDHI_PATCH_VERSION 0)
set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION})
message(STATUS "GUDHI version : ${GUDHI_VERSION}")
diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib
index 59c05a5b..5994124a 100644
--- a/biblio/how_to_cite_gudhi.bib
+++ b/biblio/how_to_cite_gudhi.bib
@@ -122,3 +122,21 @@
, url = "http://gudhi.gforge.inria.fr/python/latest/"
, year = 2016
}
+
+@incollection{gudhi:CoverComplex
+, author = "Mathieu Carri\`ere"
+, title = "Cover complex"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group__cover__complex.html"
+, year = 2017
+}
+
+@incollection{gudhi:PersistenceRepresentations
+, author = "Pawel Dlotko"
+, title = "Persistence representations"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group___persistence__representations.html"
+, year = 2017
+}
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
index cf1a946a..a08663ca 100644
--- a/src/Alpha_complex/doc/Intro_alpha_complex.h
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -31,7 +31,7 @@ namespace alpha_complex {
/** \defgroup alpha_complex Alpha complex
*
* \author Vincent Rouvreau
- *
+ *
* @{
*
* \section definition Definition
@@ -195,8 +195,6 @@ namespace alpha_complex {
*
* \include Alpha_complex/alphaoffreader_for_doc_32.txt
*
- * \copyright GNU General Public License v3.
- * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup alpha_complex
diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_helper.h b/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
index 3747923f..a59f0654 100644
--- a/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_helper.h
@@ -52,7 +52,7 @@ Vertex_list from_facet(const Facet& fct) {
template <class Vertex_list, class Edge_3>
Vertex_list from_edge(const Edge_3& edg) {
Vertex_list the_list;
- for (auto i : { edg.second, edg.third }) {
+ for (auto i : {edg.second, edg.third}) {
#ifdef DEBUG_TRACES
std::cout << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl;
#endif // DEBUG_TRACES
diff --git a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
index 1070d17b..8ef5ffb2 100644
--- a/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/alpha_complex_3d_persistence.cpp
@@ -138,16 +138,16 @@ int main(int argc, char **argv) {
std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle *cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
- } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet *facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
- } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3 *edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
- } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle *vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
@@ -173,13 +173,12 @@ int main(int argc, char **argv) {
}
}
// Construction of the simplex_tree
- Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator);
+ Filtration_value filtr = /*std::sqrt*/ (*the_alpha_value_iterator);
#ifdef DEBUG_TRACES
std::cout << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
simplex_tree.insert_simplex(the_simplex, filtr);
- GUDHI_CHECK(the_alpha_value_iterator != the_alpha_values.end(),
- "CGAL provided more simplices than values");
+ GUDHI_CHECK(the_alpha_value_iterator != the_alpha_values.end(), "CGAL provided more simplices than values");
++the_alpha_value_iterator;
}
diff --git a/src/Alpha_complex/utilities/README b/src/Alpha_complex/utilities/alphacomplex.md
index 1cd2ca95..aace85d3 100644
--- a/src/Alpha_complex/utilities/README
+++ b/src/Alpha_complex/utilities/alphacomplex.md
@@ -1,14 +1,26 @@
-# Alpha_complex #
-## `alpha_complex_3d_persistence` ##
-This program computes the persistent homology with coefficient field Z/pZ of the 3D alpha complex built from a 3D point cloud. The output diagram contains one bar per line, written with the convention:
-`p dim birth death`
+# Alpha complex #
-where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+## alpha_complex_persistence ##
+
+This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud.
+The output diagram contains one bar per line, written with the convention:
+
+```
+ p dim birth death
+```
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature,
+and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
**Usage**
-`alpha_complex_3d_persistence [options] <input OFF file>`
+
+```
+ alpha_complex_persistence [options] <input OFF file>
+```
+
where
`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
@@ -16,47 +28,37 @@ where
* `-h [ --help ]` Produce help message
* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
-* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-r [ --max-alpha-square-value ]` (default = inf) Maximal alpha square value for the Alpha complex construction.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
**Example**
-`alpha_complex_3d_persistence ../../data/points/tore3D_300.off -p 2 -m 0.45`
-outputs:
```
-Simplex_tree dim: 3
-2 0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938
+ alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off
```
-Here we retrieve expected Betti numbers on a tore 3D:
-```
-Betti numbers[0] = 1
-Betti numbers[1] = 2
-Betti numbers[2] = 1
-```
+N.B.:
-N.B.:
-* `alpha_complex_3d_persistence` only accepts OFF files in dimension 3.
* Filtration values are alpha square values.
+## alpha_complex_3d_persistence ##
+This program computes the persistent homology with coefficient field Z/pZ of the 3D alpha complex built from a 3D point cloud. The output diagram contains one bar per line, written with the convention:
-## `exact_alpha_complex_3d_persistence` ##
-Same as `alpha_complex_3d_persistence`, but using exact computation. It is slower, but it is necessary when points are on a grid for instance.
+```
+p dim birth death
+```
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+**Usage**
-## `weighted_alpha_complex_3d_persistence` ##
-Same as `alpha_complex_3d_persistence`, but using weighted points.
+```
+ alpha_complex_3d_persistence [options] <input OFF file>
+```
-**Usage**
-`weighted_alpha_complex_3d_persistence [options] <input OFF file> <weights input file>`
-where
-`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
-`<input weights file>` is the path to the file containing the weights of the points (one value per line).
+where `<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
**Allowed options**
@@ -66,112 +68,91 @@ where
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
**Example**
-`weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off ../../data/points/tore3D_300.weights -p 2 -m 0.45`
-outputs:
```
-Simplex_tree dim: 3
-2 0 -1 inf
-2 1 -0.931784 0.000103311
-2 1 -0.906588 2.60165e-05
-2 2 -0.43556 0.0393798
+alpha_complex_3d_persistence ../../data/points/tore3D_300.off -p 2 -m 0.45
```
N.B.:
-* Weights values are explained on CGAL [Alpha shape](https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title0)
-and [Regular triangulation](https://doc.cgal.org/latest/Triangulation_3/index.html#Triangulation3secclassRegulartriangulation) documentation.
+
+* `alpha_complex_3d_persistence` only accepts OFF files in dimension 3.
* Filtration values are alpha square values.
-## `periodic_alpha_complex_3d_persistence` ##
-Same as `alpha_complex_3d_persistence`, but using periodic alpha shape 3d.
-Refer to the [CGAL's 3D Periodic Triangulations User Manual](https://doc.cgal.org/latest/Periodic_3_triangulation_3/index.html) for more details.
+## exact_alpha_complex_3d_persistence ##
+
+Same as `alpha_complex_3d_persistence`, but using exact computation.
+It is slower, but it is necessary when points are on a grid for instance.
+
+
+
+## weighted_alpha_complex_3d_persistence ##
+
+Same as `alpha_complex_3d_persistence`, but using weighted points.
**Usage**
-`periodic_alpha_complex_3d_persistence [options] <input OFF file> <cuboid file>`
+
+```
+ weighted_alpha_complex_3d_persistence [options] <input OFF file> <weights input file>
+```
+
where
-`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
-`<cuboid file>` is the path to the file describing the periodic domain. It must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid).
+
+* `<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+* `<input weights file>` is the path to the file containing the weights of the points (one value per line).
**Allowed options**
* `-h [ --help ]` Produce help message
* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
-* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals
-
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
**Example**
-`periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0`
-
-outputs:
-```
-Periodic Delaunay computed.
-Simplex_tree dim: 3
-3 0 0 inf
-3 1 0.0025 inf
-3 1 0.0025 inf
-3 1 0.0025 inf
-3 2 0.005 inf
-3 2 0.005 inf
-3 2 0.005 inf
-3 3 0.0075 inf
-```
-Here we retrieve expected Betti numbers on an 3D iso-oriented cuboids:
```
-Betti numbers[0] = 1
-Betti numbers[1] = 3
-Betti numbers[2] = 3
-Betti numbers[3] = 1
+ weighted_alpha_complex_3d_persistence ../../data/points/tore3D_300.off ../../data/points/tore3D_300.weights -p 2 -m 0.45
```
-N.B.:
-* Cuboid file must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsIsoCuboid).
-* Filtration values are alpha square values.
+N.B.:
+* Weights values are explained on CGAL [Alpha shape](https://doc.cgal.org/latest/Alpha_shapes_3/index.html#title0)
+and [Regular triangulation](https://doc.cgal.org/latest/Triangulation_3/index.html#Triangulation3secclassRegulartriangulation) documentation.
+* Filtration values are alpha square values.
-## `alpha_complex_persistence` ##
-This program computes the persistent homology with coefficient field Z/pZ of the dD alpha complex built from a dD point cloud. The output diagram contains one bar per line, written with the convention:
+## periodic_alpha_complex_3d_persistence ##
+Same as `alpha_complex_3d_persistence`, but using periodic alpha shape 3d.
+Refer to the [CGAL's 3D Periodic Triangulations User Manual](https://doc.cgal.org/latest/Periodic_3_triangulation_3/index.html) for more details.
-`p dim birth death`
+**Usage**
-where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+```
+ periodic_alpha_complex_3d_persistence [options] <input OFF file> <cuboid file>
+```
-**Usage**
-`alpha_complex_persistence [options] <input OFF file>`
where
-`<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+
+* `<input OFF file>` is the path to the input point cloud in [nOFF ASCII format](http://www.geomview.org/docs/html/OFF.html).
+* `<cuboid file>` is the path to the file describing the periodic domain. It must be in the format described
+[here](/doc/latest/fileformats.html#FileFormatsIsoCuboid).
**Allowed options**
* `-h [ --help ]` Produce help message
* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
-* `-r [ --max-alpha-square-value ]` (default = inf) Maximal alpha square value for the Alpha complex construction.
-* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
-* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+* `-p [ --field-charac ]` (default=11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals
-**Example**
-`alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off`
-outputs:
-```
-Alpha complex is of dimension 3 - 9273 simplices - 300 vertices.
-Simplex_tree dim: 3
-2 0 0 inf
-2 1 0.0682162 1.0001
-2 1 0.0934117 1.00003
-2 2 0.56444 1.03938
-```
+**Example**
-Here we retrieve expected Betti numbers on a tore 3D:
```
-Betti numbers[0] = 1
-Betti numbers[1] = 2
-Betti numbers[2] = 1
+periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off ../../data/points/iso_cuboid_3_in_0_1.txt -p 3 -m 1.0
```
N.B.:
+
+* Cuboid file must be in the format described [here](/doc/latest/fileformats.html#FileFormatsIsoCuboid).
* Filtration values are alpha square values.
diff --git a/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
index 3b9a2ae0..cceac46e 100644
--- a/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/exact_alpha_complex_3d_persistence.cpp
@@ -129,16 +129,16 @@ int main(int argc, char **argv) {
std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle *cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
- } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet *facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
- } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3 *edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
- } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle *vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
@@ -165,7 +165,7 @@ int main(int argc, char **argv) {
}
// Construction of the simplex_tree
// you can also use the_alpha_value_iterator->exact()
- Filtration_value filtr = /*std::sqrt*/CGAL::to_double(the_alpha_value_iterator->exact());
+ Filtration_value filtr = /*std::sqrt*/ CGAL::to_double(the_alpha_value_iterator->exact());
#ifdef DEBUG_TRACES
std::cout << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
diff --git a/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
index ac72dcc4..188cf604 100644
--- a/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/periodic_alpha_complex_3d_persistence.cpp
@@ -113,9 +113,8 @@ int main(int argc, char **argv) {
std::cerr << "Unable to read file " << cuboid_file << std::endl;
exit(-1);
}
- //Checking if the cuboid is the same in x,y and z direction. If not, CGAL will not process it.
- if ((x_max-x_min != y_max-y_min) || (x_max-x_min != z_max-z_min) || (z_max-z_min != y_max-y_min))
- {
+ // Checking if the cuboid is the same in x,y and z direction. If not, CGAL will not process it.
+ if ((x_max - x_min != y_max - y_min) || (x_max - x_min != z_max - z_min) || (z_max - z_min != y_max - y_min)) {
std::cerr << "The size of the cuboid in every directions is not the same." << std::endl;
exit(-1);
}
@@ -164,16 +163,16 @@ int main(int argc, char **argv) {
std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle *cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
- } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet *facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
- } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3 *edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
- } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle *vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
diff --git a/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
index f46f1a58..93be8a05 100644
--- a/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_alpha_complex_3d_persistence.cpp
@@ -65,13 +65,13 @@ using Point_3 = Gt::Bare_point;
using Weighted_point_3 = Gt::Weighted_point;
// For CGAL >= 4.11
-#else // CGAL_VERSION_NR < 1041100000
+#else // CGAL_VERSION_NR < 1041100000
using Rvb = CGAL::Regular_triangulation_vertex_base_3<Kernel>;
-using Vb = CGAL::Alpha_shape_vertex_base_3<Kernel,Rvb>;
+using Vb = CGAL::Alpha_shape_vertex_base_3<Kernel, Rvb>;
using Rcb = CGAL::Regular_triangulation_cell_base_3<Kernel>;
-using Cb = CGAL::Alpha_shape_cell_base_3<Kernel,Rcb>;
-using Tds = CGAL::Triangulation_data_structure_3<Vb,Cb>;
-using Triangulation_3 = CGAL::Regular_triangulation_3<Kernel,Tds>;
+using Cb = CGAL::Alpha_shape_cell_base_3<Kernel, Rcb>;
+using Tds = CGAL::Triangulation_data_structure_3<Vb, Cb>;
+using Triangulation_3 = CGAL::Regular_triangulation_3<Kernel, Tds>;
// From file type definition
using Point_3 = Triangulation_3::Bare_point;
@@ -177,16 +177,16 @@ int main(int argc, char **argv) {
std::vector<Alpha_value_type>::iterator the_alpha_value_iterator = the_alpha_values.begin();
for (auto object_iterator : the_objects) {
// Retrieve Alpha shape vertex list from object
- if (const Cell_handle* cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
+ if (const Cell_handle *cell = CGAL::object_cast<Cell_handle>(&object_iterator)) {
vertex_list = from_cell<Vertex_list, Cell_handle>(*cell);
count_cells++;
- } else if (const Facet* facet = CGAL::object_cast<Facet>(&object_iterator)) {
+ } else if (const Facet *facet = CGAL::object_cast<Facet>(&object_iterator)) {
vertex_list = from_facet<Vertex_list, Facet>(*facet);
count_facets++;
- } else if (const Edge_3* edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
+ } else if (const Edge_3 *edge = CGAL::object_cast<Edge_3>(&object_iterator)) {
vertex_list = from_edge<Vertex_list, Edge_3>(*edge);
count_edges++;
- } else if (const Vertex_handle* vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
+ } else if (const Vertex_handle *vertex = CGAL::object_cast<Vertex_handle>(&object_iterator)) {
count_vertices++;
vertex_list = from_vertex<Vertex_list, Vertex_handle>(*vertex);
}
diff --git a/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
index 0fe8931f..5321bb0a 100644
--- a/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
+++ b/src/Alpha_complex/utilities/weighted_periodic_alpha_complex_3d_persistence.cpp
@@ -50,14 +50,14 @@ using PK = CGAL::Periodic_3_regular_triangulation_traits_3<Kernel>;
// Vertex type
using DsVb = CGAL::Periodic_3_triangulation_ds_vertex_base_3<>;
-using Vb = CGAL::Regular_triangulation_vertex_base_3<PK,DsVb>;
-using AsVb = CGAL::Alpha_shape_vertex_base_3<PK,Vb>;
+using Vb = CGAL::Regular_triangulation_vertex_base_3<PK, DsVb>;
+using AsVb = CGAL::Alpha_shape_vertex_base_3<PK, Vb>;
// Cell type
using DsCb = CGAL::Periodic_3_triangulation_ds_cell_base_3<>;
-using Cb = CGAL::Regular_triangulation_cell_base_3<PK,DsCb>;
-using AsCb = CGAL::Alpha_shape_cell_base_3<PK,Cb>;
-using Tds = CGAL::Triangulation_data_structure_3<AsVb,AsCb>;
-using P3RT3 = CGAL::Periodic_3_regular_triangulation_3<PK,Tds>;
+using Cb = CGAL::Regular_triangulation_cell_base_3<PK, DsCb>;
+using AsCb = CGAL::Alpha_shape_cell_base_3<PK, Cb>;
+using Tds = CGAL::Triangulation_data_structure_3<AsVb, AsCb>;
+using P3RT3 = CGAL::Periodic_3_regular_triangulation_3<PK, Tds>;
using Alpha_shape_3 = CGAL::Alpha_shape_3<P3RT3>;
using Point_3 = P3RT3::Bare_point;
@@ -127,14 +127,13 @@ int main(int argc, char* const argv[]) {
std::cerr << "Unable to read file " << argv[3] << std::endl;
usage(argv[0]);
}
- //Checking if the cuboid is the same in x,y and z direction. If not, CGAL will not process it.
- if ((x_max-x_min != y_max-y_min) || (x_max-x_min != z_max-z_min) || (z_max-z_min != y_max-y_min))
- {
+ // Checking if the cuboid is the same in x,y and z direction. If not, CGAL will not process it.
+ if ((x_max - x_min != y_max - y_min) || (x_max - x_min != z_max - z_min) || (z_max - z_min != y_max - y_min)) {
std::cerr << "The size of the cuboid in every directions is not the same." << std::endl;
exit(-1);
}
- double maximal_possible_weight = 0.015625 * (x_max-x_min) * (x_max-x_min);
+ double maximal_possible_weight = 0.015625 * (x_max - x_min) * (x_max - x_min);
// Read weights information from file
std::ifstream weights_ifstr(argv[2]);
@@ -145,8 +144,7 @@ int main(int argc, char* const argv[]) {
wp.reserve(lp.size());
// Attempt read the weight in a double format, return false if it fails
while ((weights_ifstr >> weight) && (index < lp.size())) {
- if ((weight >= maximal_possible_weight) || (weight < 0))
- {
+ if ((weight >= maximal_possible_weight) || (weight < 0)) {
std::cerr << "At line " << (index + 1) << ", the weight (" << weight
<< ") is negative or more than or equal to maximal possible weight (" << maximal_possible_weight
<< ") = 1/64*cuboid length squared, which is not an acceptable input." << std::endl;
diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
index ee84e201..a5d7b60f 100644
--- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
+++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h
@@ -105,7 +105,6 @@ namespace cubical_complex {
* \section BitmapExamples Examples
* End user programs are available in example/Bitmap_cubical_complex and utilities/Bitmap_cubical_complex folders.
*
- * \copyright GNU General Public License v3.
*/
/** @} */ // end defgroup cubical_complex
diff --git a/src/Bitmap_cubical_complex/utilities/README b/src/Bitmap_cubical_complex/utilities/README
deleted file mode 100644
index ddff7034..00000000
--- a/src/Bitmap_cubical_complex/utilities/README
+++ /dev/null
@@ -1,18 +0,0 @@
-# Bitmap_cubical_complex #
-
-## `cubical_complex_persistence` ##
-This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style. See [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format.
-
-Example:
-
-* Create a Cubical Complex from the Perseus style file `CubicalTwoSphere.txt`, computes Persistence cohomology from it and writes the results in a persistence file `CubicalTwoSphere.txt_persistence`:
-`cubical_complex_persistence data/bitmap/CubicalTwoSphere.txt`
-
-## `periodic_cubical_complex_persistence` ##
-
-Same as above, but with periodic boundary conditions.
-
-Example:
-
-* Create a Periodical Cubical Complex from the Perseus style file `3d_torus.txt`, computes Persistence cohomology from it and writes the results in a persistence file `3d_torus.txt_persistence`:
-`periodic_cubical_complex_persistence data/bitmap/3d_torus.txt`
diff --git a/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md b/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md
new file mode 100644
index 00000000..6e1b2578
--- /dev/null
+++ b/src/Bitmap_cubical_complex/utilities/cubicalcomplex.md
@@ -0,0 +1,29 @@
+
+
+# Cubical complex#
+
+## cubical_complex_persistence ##
+This program computes persistent homology, by using the Bitmap_cubical_complex class, of cubical complexes provided in text files in Perseus style.
+See [here](/doc/latest/fileformats.html#FileFormatsPerseus) for a description of the file format.
+
+**Example**
+
+```
+ cubical_complex_persistence data/bitmap/CubicalTwoSphere.txt
+```
+
+* Creates a Cubical Complex from the Perseus style file `CubicalTwoSphere.txt`,
+computes Persistence cohomology from it and writes the results in a persistence file `CubicalTwoSphere.txt_persistence`.
+
+## periodic_cubical_complex_persistence ##
+
+Same as above, but with periodic boundary conditions.
+
+**Example**
+
+```
+ periodic_cubical_complex_persistence data/bitmap/3d_torus.txt
+```
+
+* Creates a Periodical Cubical Complex from the Perseus style file `3d_torus.txt`,
+computes Persistence cohomology from it and writes the results in a persistence file `3d_torus.txt_persistence`.
diff --git a/src/Bottleneck_distance/include/gudhi/Bottleneck.h b/src/Bottleneck_distance/include/gudhi/Bottleneck.h
index 8c97dce9..7aee07bb 100644
--- a/src/Bottleneck_distance/include/gudhi/Bottleneck.h
+++ b/src/Bottleneck_distance/include/gudhi/Bottleneck.h
@@ -46,7 +46,7 @@ double bottleneck_distance_approx(Persistence_graph& g, double e) {
if (step <= b_lower_bound || step >= b_upper_bound) // Avoid precision problem
break;
m.set_r(step);
- while (m.multi_augment()) {}; // compute a maximum matching (in the graph corresponding to the current r)
+ while (m.multi_augment()) {} // compute a maximum matching (in the graph corresponding to the current r)
if (m.perfect()) {
m = biggest_unperfect;
b_upper_bound = step;
@@ -68,7 +68,7 @@ double bottleneck_distance_exact(Persistence_graph& g) {
while (lower_bound_i != upper_bound_i) {
long step = lower_bound_i + static_cast<long> ((upper_bound_i - lower_bound_i - 1) / alpha);
m.set_r(sd.at(step));
- while (m.multi_augment()) {}; // compute a maximum matching (in the graph corresponding to the current r)
+ while (m.multi_augment()) {} // compute a maximum matching (in the graph corresponding to the current r)
if (m.perfect()) {
m = biggest_unperfect;
upper_bound_i = step;
diff --git a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
index dc804630..87c7cee5 100644
--- a/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
+++ b/src/Bottleneck_distance/include/gudhi/Neighbors_finder.h
@@ -32,6 +32,7 @@
#include <unordered_set>
#include <vector>
+#include <algorithm> // for std::max
namespace Gudhi {
diff --git a/src/Bottleneck_distance/utilities/README b/src/Bottleneck_distance/utilities/README
deleted file mode 100644
index d9fdd252..00000000
--- a/src/Bottleneck_distance/utilities/README
+++ /dev/null
@@ -1,10 +0,0 @@
-# Bottleneck_distance #
-
-## `bottleneck_read_file_example` ##
-This program computes the Bottleneck distance between two persistence diagram files.
-
-Usage:
-`bottleneck_read_file_example <file_1.pers> <file_2.pers> [<tolerance>]`
-where
-`<file_1.pers>` and `<file_2.pers>` must be in the format described [here](http://gudhi.gforge.inria.fr/doc/latest/fileformats.html#FileFormatsPers).
-`<tolerance>` is an error bound on the bottleneck distance (set by default to the smallest positive double value).
diff --git a/src/Bottleneck_distance/utilities/bottleneckdistance.md b/src/Bottleneck_distance/utilities/bottleneckdistance.md
new file mode 100644
index 00000000..526f5822
--- /dev/null
+++ b/src/Bottleneck_distance/utilities/bottleneckdistance.md
@@ -0,0 +1,18 @@
+
+
+# Bottleneck distance #
+
+## bottleneck_read_file_example ##
+
+This program computes the Bottleneck distance between two persistence diagram files.
+
+**Usage**
+
+```
+ bottleneck_read_file_example <file_1.pers> <file_2.pers> [<tolerance>]
+```
+
+where
+
+* `<file_1.pers>` and `<file_2.pers>` must be in the format described [here](/doc/latest/fileformats.html#FileFormatsPers).
+* `<tolerance>` is an error bound on the bottleneck distance (set by default to the smallest positive double value).
diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h
index 61f2d945..cf9a2c27 100644
--- a/src/Contraction/include/gudhi/Edge_contraction.h
+++ b/src/Contraction/include/gudhi/Edge_contraction.h
@@ -210,7 +210,6 @@ int main (int argc, char *argv[])
}
\endcode
-
\verbatim
./example/Contraction/RipsContraction ../../data/SO3_10000.off 0.3
[ 50%] [100%] Built target SkeletonBlockerIteration
@@ -223,9 +222,6 @@ Time to simplify and enumerate simplices:
3.166621s wall, 3.150000s user + 0.010000s system = 3.160000s CPU (99.8%)
\endverbatim
-
-
-\copyright GNU General Public License v3.
*/
/** @} */ // end defgroup
} // namespace contraction
diff --git a/src/Doxyfile b/src/Doxyfile
index 2d5adea1..f1981e2e 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -38,7 +38,7 @@ PROJECT_NAME = "GUDHI"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "2.1.0.rc1"
+PROJECT_NUMBER = "2.1.0"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -801,7 +801,7 @@ EXCLUDE_SYMLINKS = NO
# Note that the wildcards are matched against the file with absolute path, so to
# exclude all test directories for example use the pattern */test/*
-EXCLUDE_PATTERNS =
+EXCLUDE_PATTERNS = */utilities/*/*.md
# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
# (namespaces, classes, functions, etc.) that should be excluded from the
diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt
index 374195d0..2503a03e 100644
--- a/src/GudhUI/CMakeLists.txt
+++ b/src/GudhUI/CMakeLists.txt
@@ -1,37 +1,41 @@
cmake_minimum_required(VERSION 2.8)
project(GudhUI)
-find_package(Qt5 COMPONENTS Widgets Xml OpenGL)
-find_package(QGLViewer)
+# Need to find OpenGL first as find_package(Qt5) tries to #include"GL/gl.h" on some platforms
find_package(OpenGL)
-if ( CGAL_FOUND AND Qt5_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_VERSION VERSION_EQUAL 4.8.0)
-
- set(CMAKE_AUTOMOC ON)
- set(CMAKE_AUTOUIC ON)
- set(CMAKE_INCLUDE_CURRENT_DIR ON)
-
- SET(Boost_USE_STATIC_LIBS ON)
- SET(Boost_USE_MULTITHREAD OFF)
- include_directories (${QGLVIEWER_INCLUDE_DIR})
-
- add_executable ( GudhUI
- gui/gudhui.cpp
- gui/MainWindow.cpp
- gui/Menu_k_nearest_neighbors.cpp
- gui/Menu_uniform_neighbors.cpp
- gui/Menu_edge_contraction.cpp
- gui/Menu_persistence.cpp
- view/Viewer_instructor.cpp
- view/Viewer.cpp
- )
- target_link_libraries( GudhUI Qt5::Widgets Qt5::Xml Qt5::OpenGL )
- target_link_libraries( GudhUI ${QGLVIEWER_LIBRARIES} )
- target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
- if (TBB_FOUND)
- target_link_libraries( GudhUI ${TBB_LIBRARIES})
+if (OPENGL_FOUND)
+ find_package(Qt5 COMPONENTS Widgets Xml OpenGL)
+ find_package(QGLViewer)
+
+ if ( CGAL_FOUND AND Qt5_FOUND AND QGLVIEWER_FOUND AND NOT CGAL_VERSION VERSION_EQUAL 4.8.0)
+
+ set(CMAKE_AUTOMOC ON)
+ set(CMAKE_AUTOUIC ON)
+ set(CMAKE_INCLUDE_CURRENT_DIR ON)
+
+ SET(Boost_USE_STATIC_LIBS ON)
+ SET(Boost_USE_MULTITHREAD OFF)
+ include_directories (${QGLVIEWER_INCLUDE_DIR})
+
+ add_executable ( GudhUI
+ gui/gudhui.cpp
+ gui/MainWindow.cpp
+ gui/Menu_k_nearest_neighbors.cpp
+ gui/Menu_uniform_neighbors.cpp
+ gui/Menu_edge_contraction.cpp
+ gui/Menu_persistence.cpp
+ view/Viewer_instructor.cpp
+ view/Viewer.cpp
+ )
+ target_link_libraries( GudhUI Qt5::Widgets Qt5::Xml Qt5::OpenGL )
+ target_link_libraries( GudhUI ${QGLVIEWER_LIBRARIES} )
+ target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
+ if (TBB_FOUND)
+ target_link_libraries( GudhUI ${TBB_LIBRARIES})
+ endif()
+
+ install(TARGETS GudhUI DESTINATION bin)
+
endif()
-
- install(TARGETS GudhUI DESTINATION bin)
-
-endif()
+endif(OPENGL_FOUND) \ No newline at end of file
diff --git a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
index 3a0d8154..2b648425 100644
--- a/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
+++ b/src/Nerve_GIC/doc/Intro_graph_induced_complex.h
@@ -70,15 +70,15 @@ namespace cover_complex {
*
* When launching:
*
- * \code $> ./Nerve ../../../../data/points/human.off 2 10 0.3 --v
+ * \code $> ./Nerve ../../data/points/human.off 2 10 0.3 -v
* \endcode
*
* the program output is:
*
* \include Nerve_GIC/Nerve.txt
*
- * The program also writes a file SC.txt. The first three lines in this file are the location of the input point cloud
- * and the function used to compute the cover.
+ * The program also writes a file ../../data/points/human_sc.txt. The first three lines in this file are the location
+ * of the input point cloud and the function used to compute the cover.
* The fourth line contains the number of vertices nv and edges ne of the Nerve.
* The next nv lines represent the vertices. Each line contains the vertex ID,
* the number of data points it contains, and their average color function value.
@@ -113,12 +113,12 @@ namespace cover_complex {
*
* When launching:
*
- * \code $> ./VoronoiGIC ../../../../data/points/human.off 700 --v
+ * \code $> ./VoronoiGIC ../../data/points/human.off 700 -v
* \endcode
*
* the program outputs SC.off. Using e.g.
*
- * \code $> geomview SC.off
+ * \code $> geomview ../../data/points/human_sc.off
* \endcode
*
* one can obtain the following visualization:
@@ -146,7 +146,7 @@ namespace cover_complex {
*
* When launching:
*
- * \code $> ./CoordGIC ../../../../data/points/KleinBottle5D.off 0 --v
+ * \code $> ./CoordGIC ../../data/points/KleinBottle5D.off 0 -v
* \endcode
*
* the program outputs SC.dot. Using e.g.
@@ -169,15 +169,13 @@ namespace cover_complex {
*
* When launching:
*
- * \code $> ./FuncGIC ../../data/points/COIL_database/lucky_cat.off ../../data/points/COIL_database/lucky_cat_PCA1 --v
+ * \code $> ./FuncGIC ../../data/points/COIL_database/lucky_cat.off ../../data/points/COIL_database/lucky_cat_PCA1 -v
* \endcode
*
* the program outputs again SC.dot which gives the following visualization after using neato:
*
* \image html "funcGICvisu.jpg" "Visualization with neato"
*
- * \copyright GNU General Public License v3.
- * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup cover_complex
@@ -186,31 +184,3 @@ namespace cover_complex {
} // namespace Gudhi
#endif // DOC_COVER_COMPLEX_INTRO_COVER_COMPLEX_H_
-
-
-/* * \subsection gicexample Example with cover from function
- *
- * This example builds the GIC of a point cloud sampled on a 3D human shape (human.off).
- * The cover C comes from the preimages of intervals (with length 0.075 and gain 0)
- * covering the height function (coordinate 2),
- * and the graph G comes from a Rips complex built with threshold 0.075.
- * Note that if the gain is too big, the number of cliques increases a lot,
- * which make the computation time much larger.
- *
- * \include Nerve_GIC/GIC.cpp
- *
- * When launching:
- *
- * \code $> ./GIC ../../../../data/points/human.off 0.075 2 0.075 0 --v
- * \endcode
- *
- * the program outputs SC.txt, which can be visualized with python and firefox as before:
- *
- * \image html "gicvisu.jpg" "Visualization with KeplerMapper"
- * */
-
-
-/* * Using e.g.
- *
- * \code $> python KeplerMapperVisuFromTxtFile.py && firefox SC.html
- * \endcode */
diff --git a/src/Nerve_GIC/doc/funcGICvisu.jpg b/src/Nerve_GIC/doc/funcGICvisu.jpg
index f3da45ac..36b64dde 100644
--- a/src/Nerve_GIC/doc/funcGICvisu.jpg
+++ b/src/Nerve_GIC/doc/funcGICvisu.jpg
Binary files differ
diff --git a/src/Nerve_GIC/doc/funcGICvisu.pdf b/src/Nerve_GIC/doc/funcGICvisu.pdf
new file mode 100644
index 00000000..d7456cd3
--- /dev/null
+++ b/src/Nerve_GIC/doc/funcGICvisu.pdf
Binary files differ
diff --git a/src/Nerve_GIC/example/CMakeLists.txt b/src/Nerve_GIC/example/CMakeLists.txt
index 461b6db2..f2626927 100644
--- a/src/Nerve_GIC/example/CMakeLists.txt
+++ b/src/Nerve_GIC/example/CMakeLists.txt
@@ -1,29 +1,26 @@
cmake_minimum_required(VERSION 2.6)
project(Nerve_GIC_examples)
-add_executable ( Nerve Nerve.cpp )
-add_executable ( CoordGIC CoordGIC.cpp )
-add_executable ( FuncGIC FuncGIC.cpp )
-add_executable ( VoronoiGIC VoronoiGIC.cpp )
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
-if (TBB_FOUND)
- target_link_libraries(Nerve ${TBB_LIBRARIES})
- target_link_libraries(CoordGIC ${TBB_LIBRARIES})
- target_link_libraries(FuncGIC ${TBB_LIBRARIES})
- target_link_libraries(VoronoiGIC ${TBB_LIBRARIES})
-endif()
+ add_executable ( CoordGIC CoordGIC.cpp )
+ add_executable ( FuncGIC FuncGIC.cpp )
-file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ if (TBB_FOUND)
+ target_link_libraries(CoordGIC ${TBB_LIBRARIES})
+ target_link_libraries(FuncGIC ${TBB_LIBRARIES})
+ endif()
-add_test(NAME Nerve_GIC_example_nerve COMMAND $<TARGET_FILE:Nerve>
- "${CMAKE_SOURCE_DIR}/data/points/human.off" "2" "10" "0.3")
+ # Copy files for not to pollute sources when testing
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
-add_test(NAME Nerve_GIC_example_VoronoiGIC COMMAND $<TARGET_FILE:VoronoiGIC>
- "${CMAKE_SOURCE_DIR}/data/points/human.off" "100")
+ add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC>
+ "tore3D_1307.off" "0")
-add_test(NAME Nerve_GIC_example_CoordGIC COMMAND $<TARGET_FILE:CoordGIC>
- "${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off" "0")
+ add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC>
+ "lucky_cat.off"
+ "lucky_cat_PCA1")
-add_test(NAME Nerve_GIC_example_FuncGIC COMMAND $<TARGET_FILE:FuncGIC>
- "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat.off"
- "${CMAKE_SOURCE_DIR}/data/points/COIL_database/lucky_cat_PCA1")
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Nerve_GIC/example/CoordGIC.cpp b/src/Nerve_GIC/example/CoordGIC.cpp
index c03fcbb3..c92cf235 100644
--- a/src/Nerve_GIC/example/CoordGIC.cpp
+++ b/src/Nerve_GIC/example/CoordGIC.cpp
@@ -27,8 +27,8 @@
void usage(int nbArgs, char *const progName) {
std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
- std::cerr << "Usage: " << progName << " filename.off coordinate [--v] \n";
- std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 --v \n";
+ std::cerr << "Usage: " << progName << " filename.off coordinate [-v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 -v \n";
exit(-1); // ----- >>
}
diff --git a/src/Nerve_GIC/example/FuncGIC.cpp b/src/Nerve_GIC/example/FuncGIC.cpp
index 3762db4e..cb0f0d63 100644
--- a/src/Nerve_GIC/example/FuncGIC.cpp
+++ b/src/Nerve_GIC/example/FuncGIC.cpp
@@ -27,9 +27,9 @@
void usage(int nbArgs, char *const progName) {
std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
- std::cerr << "Usage: " << progName << " filename.off function [--v] \n";
+ std::cerr << "Usage: " << progName << " filename.off function [-v] \n";
std::cerr << " i.e.: " << progName << " ../../data/points/COIL_database/lucky_cat.off "
- "../../data/points/COIL_database/lucky_cat_PCA1 --v \n";
+ "../../data/points/COIL_database/lucky_cat_PCA1 -v \n";
exit(-1); // ----- >>
}
diff --git a/src/Nerve_GIC/example/GIC.cpp b/src/Nerve_GIC/example/GIC.cpp
deleted file mode 100644
index 2bc24a4d..00000000
--- a/src/Nerve_GIC/example/GIC.cpp
+++ /dev/null
@@ -1,95 +0,0 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
- * library for computational topology.
- *
- * Author(s): Mathieu Carrière
- *
- * Copyright (C) 2017 INRIA
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <gudhi/GIC.h>
-
-#include <string>
-#include <vector>
-
-void usage(int nbArgs, char *const progName) {
- std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
- std::cerr << "Usage: " << progName << " filename.off threshold coordinate resolution gain [--v] \n";
- std::cerr << " i.e.: " << progName << " ../../data/points/human.off 0.075 2 0.075 0 --v \n";
- exit(-1); // ----- >>
-}
-
-int main(int argc, char **argv) {
- if ((argc != 6) && (argc != 7)) usage(argc, argv[0]);
-
- using Point = std::vector<float>;
-
- std::string off_file_name(argv[1]);
- double threshold = atof(argv[2]);
- int coord = atoi(argv[3]);
- double resolution = atof(argv[4]);
- double gain = atof(argv[5]);
- bool verb = 0;
- if (argc == 7) verb = 1;
-
- // ----------------------------------------------------------------------------
- // Init of a graph induced complex from an OFF file
- // ----------------------------------------------------------------------------
-
- Gudhi::graph_induced_complex::Graph_induced_complex<Point> GIC;
- GIC.set_verbose(verb);
-
- bool check = GIC.read_point_cloud(off_file_name);
-
- if (!check) {
- std::cout << "Incorrect OFF file." << std::endl;
- } else {
- GIC.set_color_from_coordinate(coord);
- GIC.set_function_from_coordinate(coord);
-
- GIC.set_graph_from_rips(threshold, Gudhi::Euclidean_distance());
-
- GIC.set_resolution_with_interval_length(resolution);
- GIC.set_gain(gain);
- GIC.set_cover_from_function();
-
- GIC.find_GIC_simplices();
-
- GIC.plot_TXT_for_KeplerMapper();
-
- Gudhi::Simplex_tree<> stree;
- GIC.create_complex(stree);
-
- // ----------------------------------------------------------------------------
- // Display information about the graph induced complex
- // ----------------------------------------------------------------------------
-
- if (verb) {
- std::cout << "Graph induced complex is of dimension " << stree.dimension() << " - " << stree.num_simplices()
- << " simplices - " << stree.num_vertices() << " vertices." << std::endl;
-
- std::cout << "Iterator on graph induced complex simplices" << std::endl;
- for (auto f_simplex : stree.filtration_simplex_range()) {
- for (auto vertex : stree.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
- }
- std::cout << std::endl;
- }
- }
- }
-
- return 0;
-}
diff --git a/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py
deleted file mode 100755
index d2897774..00000000
--- a/src/Nerve_GIC/example/KeplerMapperVisuFromTxtFile.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-
-import km
-import numpy as np
-from collections import defaultdict
-
-"""This file is part of the Gudhi Library. The Gudhi library
- (Geometric Understanding in Higher Dimensions) is a generic C++
- library for computational topology.
-
- Author(s): Mathieu Carriere
-
- Copyright (C) 2017 INRIA
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-__author__ = "Mathieu Carriere"
-__copyright__ = "Copyright (C) 2017 INRIA"
-__license__ = "GPL v3"
-
-network = {}
-mapper = km.KeplerMapper(verbose=0)
-data = np.zeros((3,3))
-projected_data = mapper.fit_transform( data, projection="sum", scaler=None )
-
-f = open('SC.txt','r')
-nodes = defaultdict(list)
-links = defaultdict(list)
-custom = defaultdict(list)
-
-dat = f.readline()
-lens = f.readline()
-color = f.readline();
-param = [float(i) for i in f.readline().split(" ")]
-
-nums = [int(i) for i in f.readline().split(" ")]
-num_nodes = nums[0]
-num_edges = nums[1]
-
-for i in range(0,num_nodes):
- point = [float(j) for j in f.readline().split(" ")]
- nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ]
- links[ str(int(point[0])) ] = []
- custom[ int(point[0]) ] = point[1]
-
-m = min([custom[i] for i in range(0,num_nodes)])
-M = max([custom[i] for i in range(0,num_nodes)])
-
-for i in range(0,num_edges):
- edge = [int(j) for j in f.readline().split(" ")]
- links[ str(edge[0]) ].append( str(edge[1]) )
- links[ str(edge[1]) ].append( str(edge[0]) )
-
-network["nodes"] = nodes
-network["links"] = links
-network["meta"] = lens
-
-mapper.visualize(network, color_function = color, path_html="SC.html", title=dat,
-graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0,
-height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M)
diff --git a/src/Nerve_GIC/example/Nerve.txt b/src/Nerve_GIC/example/Nerve.txt
deleted file mode 100644
index 2a861c5f..00000000
--- a/src/Nerve_GIC/example/Nerve.txt
+++ /dev/null
@@ -1,43 +0,0 @@
-Nerve is of dimension 1 - 41 simplices - 21 vertices.
-Iterator on Nerve simplices
-0
-1
-2
-2 0
-3
-3 1
-4
-4 3
-5
-5 2
-6
-6 4
-7
-7 5
-8
-9
-9 6
-10
-10 7
-11
-12
-12 8
-13
-13 9
-13 10
-14
-14 11
-15
-15 13
-16
-16 12
-17
-17 14
-18
-18 15
-18 16
-18 17
-19
-19 18
-20
-20 19
diff --git a/src/Nerve_GIC/include/gudhi/GIC.h b/src/Nerve_GIC/include/gudhi/GIC.h
index 9f107a7e..40ff7a4a 100644
--- a/src/Nerve_GIC/include/gudhi/GIC.h
+++ b/src/Nerve_GIC/include/gudhi/GIC.h
@@ -30,16 +30,27 @@
#include <gudhi/Rips_complex.h>
#include <gudhi/Points_off_io.h>
#include <gudhi/distance_functions.h>
+#include <gudhi/Persistent_cohomology.h>
+#include <gudhi/Bottleneck.h>
+
+#include <boost/config.hpp>
+#include <boost/graph/graph_traits.hpp>
+#include <boost/graph/adjacency_list.hpp>
+#include <boost/graph/connected_components.hpp>
+#include <boost/graph/dijkstra_shortest_paths.hpp>
+#include <boost/graph/subgraph.hpp>
+#include <boost/graph/graph_utility.hpp>
#include <iostream>
#include <vector>
#include <map>
#include <string>
#include <limits> // for numeric_limits
-#include <utility> // for pair<>
+#include <utility> // for std::pair<>
#include <algorithm> // for std::max
#include <random>
#include <cassert>
+#include <cmath>
namespace Gudhi {
@@ -48,6 +59,13 @@ namespace cover_complex {
using Simplex_tree = Gudhi::Simplex_tree<>;
using Filtration_value = Simplex_tree::Filtration_value;
using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
+using Persistence_diagram = std::vector<std::pair<double, double> >;
+using Graph = boost::subgraph<
+ boost::adjacency_list<boost::setS, boost::vecS, boost::undirectedS, boost::no_property,
+ boost::property<boost::edge_index_t, int, boost::property<boost::edge_weight_t, double> > > >;
+using Vertex_t = boost::graph_traits<Graph>::vertex_descriptor;
+using Index_map = boost::property_map<Graph, boost::vertex_index_t>::type;
+using Weight_map = boost::property_map<Graph, boost::edge_weight_t>::type;
/**
* \class Cover_complex
@@ -72,25 +90,40 @@ using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
template <typename Point>
class Cover_complex {
private:
- // Graph_induced_complex(std::map<int, double> fun){func = fun;}
bool verbose = false; // whether to display information.
- std::vector<Point> point_cloud;
- std::vector<std::vector<int> > one_skeleton;
- typedef int Cover_t; // elements of cover C are indexed by integers.
- std::vector<std::vector<Cover_t> > simplices;
- std::map<int, std::vector<Cover_t> > cover;
- std::map<Cover_t, std::vector<int> > cover_back;
- int maximal_dim; // maximal dimension of output simplicial complex.
- int data_dimension; // dimension of input data.
- int n; // number of points.
- std::map<Cover_t, int>
+ std::string type; // Nerve or GIC
+
+ std::vector<Point> point_cloud; // input point cloud.
+ std::vector<std::vector<double> > distances; // all pairwise distances.
+ int maximal_dim; // maximal dimension of output simplicial complex.
+ int data_dimension; // dimension of input data.
+ int n; // number of points.
+
+ std::map<int, double> func; // function used to compute the output simplicial complex.
+ std::map<int, double>
+ func_color; // function used to compute the colors of the nodes of the output simplicial complex.
+ bool functional_cover = false; // whether we use a cover with preimages of a function or not.
+
+ Graph one_skeleton_OFF; // one-skeleton given by the input OFF file (if it exists).
+ Graph one_skeleton; // one-skeleton used to compute the connected components.
+ std::vector<Vertex_t> vertices; // vertices of one_skeleton.
+
+ std::vector<std::vector<int> > simplices; // simplices of output simplicial complex.
+ std::vector<int> voronoi_subsamples; // Voronoi germs (in case of Voronoi cover).
+
+ Persistence_diagram PD;
+ std::vector<double> distribution;
+
+ std::map<int, std::vector<int> >
+ cover; // function associating to each data point its vectors of cover elements to which it belongs.
+ std::map<int, std::vector<int> >
+ cover_back; // inverse of cover, in order to get the data points associated to a specific cover element.
+ std::map<int, double> cover_std; // standard function (induced by func) used to compute the extended persistence
+ // diagram of the output simplicial complex.
+ std::map<int, int>
cover_fct; // integer-valued function that allows to state if two elements of the cover are consecutive or not.
- std::map<Cover_t, std::pair<int, double> >
- cover_color; // size and coloring of the vertices of the output simplicial complex.
- Simplex_tree st;
-
- std::map<int, std::vector<int> > adjacency_matrix;
- std::vector<std::vector<double> > distances;
+ std::map<int, std::pair<int, double> >
+ cover_color; // size and coloring (induced by func_color) of the vertices of the output simplicial complex.
int resolution_int = -1;
double resolution_double = -1;
@@ -99,14 +132,11 @@ class Cover_complex {
double rate_power = 0.001; // Power in the subsampling.
int mask = 0; // Ignore nodes containing less than mask points.
- std::map<int, double> func;
- std::map<int, double> func_color;
- std::vector<int> voronoi_subsamples;
+ std::map<int, int> name2id, name2idinv;
+
std::string cover_name;
std::string point_cloud_name;
std::string color_name;
- std::string type; // Nerve or GIC
- bool functional_cover = false; // whether we use a cover with preimages of a function or not
// Point comparator
struct Less {
@@ -120,21 +150,16 @@ class Cover_complex {
}
};
- // DFS
- private:
- void dfs(std::map<int, std::vector<int> >& G, int p, std::vector<int>& cc, std::map<int, bool>& visit) {
- cc.push_back(p);
- visit[p] = true;
- int neighb = G[p].size();
- for (int i = 0; i < neighb; i++)
- if (visit.find(G[p][i]) != visit.end())
- if (!(visit[G[p][i]])) dfs(G, G[p][i], cc, visit);
+ // Remove all edges of a graph.
+ void remove_edges(Graph& G) {
+ boost::graph_traits<Graph>::edge_iterator ei, ei_end;
+ for (boost::tie(ei, ei_end) = boost::edges(G); ei != ei_end; ++ei) boost::remove_edge(*ei, G);
}
// Find random number in [0,1].
double GetUniform() {
- static std::default_random_engine re;
- static std::uniform_real_distribution<double> Dist(0, 1);
+ thread_local std::default_random_engine re;
+ thread_local std::uniform_real_distribution<double> Dist(0, 1);
return Dist(re);
}
@@ -155,24 +180,14 @@ class Cover_complex {
}
}
- private:
- void fill_adjacency_matrix_from_st() {
- std::vector<int> empty;
- for (int i = 0; i < n; i++) adjacency_matrix[i] = empty;
- for (auto simplex : st.complex_simplex_range()) {
- if (st.dimension(simplex) == 1) {
- std::vector<int> vertices;
- for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
- adjacency_matrix[vertices[0]].push_back(vertices[1]);
- adjacency_matrix[vertices[1]].push_back(vertices[0]);
- }
- }
- }
+ // *******************************************************************************************************************
+ // Utils.
+ // *******************************************************************************************************************
public:
/** \brief Specifies whether the type of the output simplicial complex.
*
- * @param[in] t string (either "GIC" or "Nerve").
+ * @param[in] t std::string (either "GIC" or "Nerve").
*
*/
void set_type(const std::string& t) { type = t; }
@@ -221,14 +236,15 @@ class Cover_complex {
char comment = '#';
while (comment == '#') {
- getline(input, line);
- if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace)) comment = line[line.find_first_not_of(' ')];
+ std::getline(input, line);
+ if (!line.empty() && !all_of(line.begin(), line.end(), (int (*)(int))isspace))
+ comment = line[line.find_first_not_of(' ')];
}
- if (std::strcmp((char*)line.c_str(), "nOFF") == 0) {
+ if (strcmp((char*)line.c_str(), "nOFF") == 0) {
comment = '#';
while (comment == '#') {
- getline(input, line);
- if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace))
+ std::getline(input, line);
+ if (!line.empty() && !all_of(line.begin(), line.end(), (int (*)(int))isspace))
comment = line[line.find_first_not_of(' ')];
}
std::stringstream stream(line);
@@ -238,10 +254,11 @@ class Cover_complex {
}
comment = '#';
- int numedges, numfaces, i, num;
+ int numedges, numfaces, i, dim;
while (comment == '#') {
- getline(input, line);
- if (!line.empty() && !std::all_of(line.begin(), line.end(), isspace)) comment = line[line.find_first_not_of(' ')];
+ std::getline(input, line);
+ if (!line.empty() && !all_of(line.begin(), line.end(), (int (*)(int))isspace))
+ comment = line[line.find_first_not_of(' ')];
}
std::stringstream stream(line);
stream >> n;
@@ -250,34 +267,31 @@ class Cover_complex {
i = 0;
while (i < n) {
- getline(input, line);
+ std::getline(input, line);
if (!line.empty() && line[line.find_first_not_of(' ')] != '#' &&
- !std::all_of(line.begin(), line.end(), isspace)) {
+ !all_of(line.begin(), line.end(), (int (*)(int))isspace)) {
+ std::stringstream iss(line);
std::vector<double> point;
- std::istringstream iss(line);
point.assign(std::istream_iterator<double>(iss), std::istream_iterator<double>());
point_cloud.emplace_back(point.begin(), point.begin() + data_dimension);
+ boost::add_vertex(one_skeleton_OFF);
+ vertices.push_back(boost::add_vertex(one_skeleton));
i++;
}
}
i = 0;
while (i < numfaces) {
- getline(input, line);
+ std::getline(input, line);
if (!line.empty() && line[line.find_first_not_of(' ')] != '#' &&
- !std::all_of(line.begin(), line.end(), isspace)) {
+ !all_of(line.begin(), line.end(), (int (*)(int))isspace)) {
std::vector<int> simplex;
- std::istringstream iss(line);
+ std::stringstream iss(line);
simplex.assign(std::istream_iterator<int>(iss), std::istream_iterator<int>());
- num = simplex[0];
- std::vector<int> edge(2);
- for (int j = 1; j <= num; j++) {
- for (int k = j + 1; k <= num; k++) {
- edge[0] = simplex[j];
- edge[1] = simplex[k];
- one_skeleton.push_back(edge);
- }
- }
+ dim = simplex[0];
+ for (int j = 1; j <= dim; j++)
+ for (int k = j + 1; k <= dim; k++)
+ boost::add_edge(vertices[simplex[j]], vertices[simplex[k]], one_skeleton_OFF);
i++;
}
}
@@ -298,22 +312,16 @@ class Cover_complex {
*
*/
void set_graph_from_file(const std::string& graph_file_name) {
+ remove_edges(one_skeleton);
int neighb;
std::ifstream input(graph_file_name);
std::string line;
- int edge[2];
- int n = 0;
+ int source;
while (std::getline(input, line)) {
std::stringstream stream(line);
- stream >> edge[0];
- while (stream >> neighb) {
- edge[1] = neighb;
- st.insert_simplex_and_subfaces(edge);
- }
- n++;
+ stream >> source;
+ while (stream >> neighb) boost::add_edge(vertices[source], vertices[neighb], one_skeleton);
}
-
- fill_adjacency_matrix_from_st();
}
public: // Set graph from OFF file.
@@ -321,13 +329,11 @@ class Cover_complex {
*
*/
void set_graph_from_OFF() {
- int num_edges = one_skeleton.size();
- if (num_edges > 0) {
- for (int i = 0; i < num_edges; i++) st.insert_simplex_and_subfaces(one_skeleton[i]);
- fill_adjacency_matrix_from_st();
- } else {
+ remove_edges(one_skeleton);
+ if (num_edges(one_skeleton_OFF))
+ one_skeleton = one_skeleton_OFF;
+ else
std::cout << "No triangulation read in OFF file!" << std::endl;
- }
}
public: // Set graph from Rips complex.
@@ -339,9 +345,27 @@ class Cover_complex {
*/
template <typename Distance>
void set_graph_from_rips(double threshold, Distance distance) {
- Rips_complex rips_complex_from_points(point_cloud, threshold, distance);
- rips_complex_from_points.create_complex(st, 1);
- fill_adjacency_matrix_from_st();
+ remove_edges(one_skeleton);
+ if (distances.size() == 0) compute_pairwise_distances(distance);
+ for (int i = 0; i < n; i++) {
+ for (int j = i + 1; j < n; j++) {
+ if (distances[i][j] <= threshold) {
+ boost::add_edge(vertices[i], vertices[j], one_skeleton);
+ boost::put(boost::edge_weight, one_skeleton, boost::edge(vertices[i], vertices[j], one_skeleton).first,
+ distances[i][j]);
+ }
+ }
+ }
+ }
+
+ public:
+ void set_graph_weights() {
+ Index_map index = boost::get(boost::vertex_index, one_skeleton);
+ Weight_map weight = boost::get(boost::edge_weight, one_skeleton);
+ boost::graph_traits<Graph>::edge_iterator ei, ei_end;
+ for (boost::tie(ei, ei_end) = boost::edges(one_skeleton); ei != ei_end; ++ei)
+ boost::put(weight, *ei,
+ distances[index[boost::source(*ei, one_skeleton)]][index[boost::target(*ei, one_skeleton)]]);
}
public: // Pairwise distances.
@@ -352,9 +376,8 @@ class Cover_complex {
double d;
std::vector<double> zeros(n);
for (int i = 0; i < n; i++) distances.push_back(zeros);
- std::string distance = point_cloud_name;
- distance.append("_dist");
- std::ifstream input(distance.c_str(), std::ios::out | std::ios::binary);
+ std::string distance = point_cloud_name + "_dist";
+ std::ifstream input(distance, std::ios::out | std::ios::binary);
if (input.good()) {
if (verbose) std::cout << "Reading distances..." << std::endl;
@@ -420,10 +443,7 @@ class Cover_complex {
}
if (verbose) std::cout << "delta = " << delta << std::endl;
- Rips_complex rips_complex_from_points(point_cloud, delta, distance);
- rips_complex_from_points.create_complex(st, 1);
- fill_adjacency_matrix_from_st();
-
+ set_graph_from_rips(delta, distance);
return delta;
}
@@ -438,15 +458,15 @@ class Cover_complex {
*
*/
void set_function_from_file(const std::string& func_file_name) {
- int vertex_id = 0;
+ int i = 0;
std::ifstream input(func_file_name);
std::string line;
double f;
while (std::getline(input, line)) {
std::stringstream stream(line);
stream >> f;
- func.emplace(vertex_id, f);
- vertex_id++;
+ func.emplace(i, f);
+ i++;
}
functional_cover = true;
cover_name = func_file_name;
@@ -460,10 +480,8 @@ class Cover_complex {
*/
void set_function_from_coordinate(int k) {
for (int i = 0; i < n; i++) func.emplace(i, point_cloud[i][k]);
- char coordinate[100];
- sprintf(coordinate, "coordinate %d", k);
functional_cover = true;
- cover_name = coordinate;
+ cover_name = "coordinate " + std::to_string(k);
}
public: // Set function from vector.
@@ -474,12 +492,8 @@ class Cover_complex {
*/
template <class InputRange>
void set_function_from_range(InputRange const& function) {
+ for (int i = 0; i < n; i++) func.emplace(i, function[i]);
functional_cover = true;
- int index = 0;
- for (auto v : function) {
- func.emplace(index, v);
- index++;
- }
}
// *******************************************************************************************************************
@@ -505,27 +519,23 @@ class Cover_complex {
}
double reso = 0;
+ Index_map index = boost::get(boost::vertex_index, one_skeleton);
if (type == "GIC") {
- for (auto simplex : st.complex_simplex_range()) {
- if (st.dimension(simplex) == 1) {
- std::vector<int> vertices;
- for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
- reso = std::max(reso, std::abs(func[vertices[0]] - func[vertices[1]]));
- }
- }
+ boost::graph_traits<Graph>::edge_iterator ei, ei_end;
+ for (boost::tie(ei, ei_end) = boost::edges(one_skeleton); ei != ei_end; ++ei)
+ reso = std::max(reso, std::abs(func[index[boost::source(*ei, one_skeleton)]] -
+ func[index[boost::target(*ei, one_skeleton)]]));
if (verbose) std::cout << "resolution = " << reso << std::endl;
resolution_double = reso;
}
if (type == "Nerve") {
- for (auto simplex : st.complex_simplex_range()) {
- if (st.dimension(simplex) == 1) {
- std::vector<int> vertices;
- for (auto vertex : st.simplex_vertex_range(simplex)) vertices.push_back(vertex);
- reso = std::max(reso, (std::abs(func[vertices[0]] - func[vertices[1]])) / gain);
- }
- }
+ boost::graph_traits<Graph>::edge_iterator ei, ei_end;
+ for (boost::tie(ei, ei_end) = boost::edges(one_skeleton); ei != ei_end; ++ei)
+ reso = std::max(reso, std::abs(func[index[boost::source(*ei, one_skeleton)]] -
+ func[index[boost::target(*ei, one_skeleton)]]) /
+ gain);
if (verbose) std::cout << "resolution = " << reso << std::endl;
resolution_double = reso;
}
@@ -568,15 +578,12 @@ class Cover_complex {
}
// Read function values and compute min and max
- std::map<int, double>::iterator it;
- double maxf, minf;
- minf = std::numeric_limits<float>::max();
- maxf = std::numeric_limits<float>::min();
- for (it = func.begin(); it != func.end(); it++) {
- minf = std::min(minf, it->second);
- maxf = std::max(maxf, it->second);
+ double minf = std::numeric_limits<float>::max();
+ double maxf = std::numeric_limits<float>::lowest();
+ for (int i = 0; i < n; i++) {
+ minf = std::min(minf, func[i]);
+ maxf = std::max(maxf, func[i]);
}
- int n = func.size();
if (verbose) std::cout << "Min function value = " << minf << " and Max function value = " << maxf << std::endl;
// Compute cover of im(f)
@@ -648,78 +655,96 @@ class Cover_complex {
std::vector<int> points(n);
for (int i = 0; i < n; i++) points[i] = i;
std::sort(points.begin(), points.end(), Less(this->func));
+
int id = 0;
int pos = 0;
+ Index_map index = boost::get(boost::vertex_index, one_skeleton); // int maxc = -1;
+ std::map<int, std::vector<int> > preimages;
+ std::map<int, double> funcstd;
+ if (verbose) std::cout << "Computing preimages..." << std::endl;
for (int i = 0; i < res; i++) {
// Find points in the preimage
- std::map<int, std::vector<int> > prop;
std::pair<double, double> inter1 = intervals[i];
int tmp = pos;
+ double u, v;
if (i != res - 1) {
if (i != 0) {
std::pair<double, double> inter3 = intervals[i - 1];
while (func[points[tmp]] < inter3.second && tmp != n) {
- prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ preimages[i].push_back(points[tmp]);
tmp++;
}
+ u = inter3.second;
+ } else {
+ u = inter1.first;
}
std::pair<double, double> inter2 = intervals[i + 1];
while (func[points[tmp]] < inter2.first && tmp != n) {
- prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ preimages[i].push_back(points[tmp]);
tmp++;
}
-
+ v = inter2.first;
pos = tmp;
while (func[points[tmp]] < inter1.second && tmp != n) {
- prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ preimages[i].push_back(points[tmp]);
tmp++;
}
} else {
std::pair<double, double> inter3 = intervals[i - 1];
while (func[points[tmp]] < inter3.second && tmp != n) {
- prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ preimages[i].push_back(points[tmp]);
tmp++;
}
-
while (tmp != n) {
- prop[points[tmp]] = adjacency_matrix[points[tmp]];
+ preimages[i].push_back(points[tmp]);
tmp++;
}
+ u = inter3.second;
+ v = inter1.second;
}
- // Compute the connected components with DFS
- std::map<int, bool> visit;
- if (verbose) std::cout << "Preimage of interval " << i << std::endl;
- for (std::map<int, std::vector<int> >::iterator it = prop.begin(); it != prop.end(); it++)
- visit[it->first] = false;
- if (!(prop.empty())) {
- for (std::map<int, std::vector<int> >::iterator it = prop.begin(); it != prop.end(); it++) {
- if (!(visit[it->first])) {
- std::vector<int> cc;
- cc.clear();
- dfs(prop, it->first, cc, visit);
- int cci = cc.size();
- if (verbose) std::cout << "one CC with " << cci << " points, ";
- double average_col = 0;
- for (int j = 0; j < cci; j++) {
- cover[cc[j]].push_back(id);
- cover_back[id].push_back(cc[j]);
- average_col += func_color[cc[j]] / cci;
- }
- cover_fct[id] = i;
- cover_color[id] = std::pair<int, double>(cci, average_col);
- id++;
- }
- }
+ funcstd[i] = 0.5 * (u + v);
+ }
+
+ if (verbose) std::cout << "Computing connected components..." << std::endl;
+ // #pragma omp parallel for
+ for (int i = 0; i < res; i++) {
+ // Compute connected components
+ Graph G = one_skeleton.create_subgraph();
+ int num = preimages[i].size();
+ std::vector<int> component(num);
+ for (int j = 0; j < num; j++) boost::add_vertex(index[vertices[preimages[i][j]]], G);
+ boost::connected_components(G, &component[0]);
+ int max = 0;
+
+ // For each point in preimage
+ for (int j = 0; j < num; j++) {
+ // Update number of components in preimage
+ if (component[j] > max) max = component[j];
+
+ // Identify component with Cantor polynomial N^2 -> N
+ int identifier = (std::pow(i + component[j], 2) + 3 * i + component[j]) / 2;
+
+ // Update covers
+ cover[preimages[i][j]].push_back(identifier);
+ cover_back[identifier].push_back(preimages[i][j]);
+ cover_fct[identifier] = i;
+ cover_std[identifier] = funcstd[i];
+ cover_color[identifier].second += func_color[preimages[i][j]];
+ cover_color[identifier].first += 1;
}
- if (verbose) std::cout << std::endl;
+
+ // Maximal dimension is total number of connected components
+ id += max + 1;
}
maximal_dim = id - 1;
+ for (std::map<int, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end(); iit++)
+ iit->second.second /= iit->second.first;
}
public: // Set cover from file.
@@ -730,9 +755,9 @@ class Cover_complex {
*
*/
void set_cover_from_file(const std::string& cover_file_name) {
- int vertex_id = 0;
- Cover_t cov;
- std::vector<Cover_t> cov_elts, cov_number;
+ int i = 0;
+ int cov;
+ std::vector<int> cov_elts, cov_number;
std::ifstream input(cover_file_name);
std::string line;
while (std::getline(input, line)) {
@@ -742,17 +767,18 @@ class Cover_complex {
cov_elts.push_back(cov);
cov_number.push_back(cov);
cover_fct[cov] = cov;
- cover_color[cov].second += func_color[vertex_id];
+ cover_color[cov].second += func_color[i];
cover_color[cov].first++;
- cover_back[cov].push_back(vertex_id);
+ cover_back[cov].push_back(i);
}
- cover[vertex_id] = cov_elts;
- vertex_id++;
+ cover[i] = cov_elts;
+ i++;
}
- std::vector<Cover_t>::iterator it;
+
std::sort(cov_number.begin(), cov_number.end());
- it = std::unique(cov_number.begin(), cov_number.end());
+ std::vector<int>::iterator it = std::unique(cov_number.begin(), cov_number.end());
cov_number.resize(std::distance(cov_number.begin(), it));
+
maximal_dim = cov_number.size() - 1;
for (int i = 0; i <= maximal_dim; i++) cover_color[i].second /= cover_color[i].first;
cover_name = cover_file_name;
@@ -770,52 +796,25 @@ class Cover_complex {
voronoi_subsamples.resize(m);
SampleWithoutReplacement(n, m, voronoi_subsamples);
if (distances.size() == 0) compute_pairwise_distances(distance);
+ set_graph_weights();
+ Weight_map weight = boost::get(boost::edge_weight, one_skeleton);
+ Index_map index = boost::get(boost::vertex_index, one_skeleton);
std::vector<double> mindist(n);
for (int j = 0; j < n; j++) mindist[j] = std::numeric_limits<double>::max();
// Compute the geodesic distances to subsamples with Dijkstra
+ // #pragma omp parallel for
for (int i = 0; i < m; i++) {
if (verbose) std::cout << "Computing geodesic distances to seed " << i << "..." << std::endl;
int seed = voronoi_subsamples[i];
- std::vector<double> dist(n);
- std::vector<int> process(n);
- for (int j = 0; j < n; j++) {
- dist[j] = std::numeric_limits<double>::max();
- process[j] = j;
- }
- dist[seed] = 0;
- int curr_size = process.size();
- int min_point, min_index;
- double min_dist;
- std::vector<int> neighbors;
- int num_neighbors;
-
- while (curr_size > 0) {
- min_dist = std::numeric_limits<double>::max();
- min_index = -1;
- min_point = -1;
- for (int j = 0; j < curr_size; j++) {
- if (dist[process[j]] < min_dist) {
- min_point = process[j];
- min_dist = dist[process[j]];
- min_index = j;
- }
- }
- assert(min_index != -1);
- process.erase(process.begin() + min_index);
- assert(min_point != -1);
- neighbors = adjacency_matrix[min_point];
- num_neighbors = neighbors.size();
- for (int j = 0; j < num_neighbors; j++) {
- double d = dist[min_point] + distances[min_point][neighbors[j]];
- dist[neighbors[j]] = std::min(dist[neighbors[j]], d);
- }
- curr_size = process.size();
- }
+ std::vector<double> dmap(n);
+ boost::dijkstra_shortest_paths(
+ one_skeleton, vertices[seed],
+ boost::weight_map(weight).distance_map(boost::make_iterator_property_map(dmap.begin(), index)));
for (int j = 0; j < n; j++)
- if (mindist[j] > dist[j]) {
- mindist[j] = dist[j];
+ if (mindist[j] > dmap[j]) {
+ mindist[j] = dmap[j];
if (cover[j].size() == 0)
cover[j].push_back(i);
else
@@ -840,7 +839,7 @@ class Cover_complex {
* @result cover_back(c) vector of IDs of data points.
*
*/
- const std::vector<int>& subpopulation(Cover_t c) { return cover_back[c]; }
+ const std::vector<int>& subpopulation(int c) { return cover_back[name2idinv[c]]; }
// *******************************************************************************************************************
// Visualization.
@@ -854,15 +853,15 @@ class Cover_complex {
*
*/
void set_color_from_file(const std::string& color_file_name) {
- int vertex_id = 0;
+ int i = 0;
std::ifstream input(color_file_name);
std::string line;
double f;
while (std::getline(input, line)) {
std::stringstream stream(line);
stream >> f;
- func_color.emplace(vertex_id, f);
- vertex_id++;
+ func_color.emplace(i, f);
+ i++;
}
color_name = color_file_name;
}
@@ -874,7 +873,7 @@ class Cover_complex {
*
*/
void set_color_from_coordinate(int k = 0) {
- for (int i = 0; i < n; i++) func_color.emplace(i, point_cloud[i][k]);
+ for (int i = 0; i < n; i++) func_color[i] = point_cloud[i][k];
color_name = "coordinate ";
color_name.append(std::to_string(k));
}
@@ -886,7 +885,7 @@ class Cover_complex {
*
*/
void set_color_from_vector(std::vector<double> color) {
- for (unsigned int i = 0; i < color.size(); i++) func_color.emplace(i, color[i]);
+ for (unsigned int i = 0; i < color.size(); i++) func_color[i] = color[i];
}
public: // Create a .dot file that can be compiled with neato to produce a .pdf file.
@@ -895,26 +894,30 @@ class Cover_complex {
* of its 1-skeleton in a .pdf file.
*/
void plot_DOT() {
- char mapp[11] = "SC.dot";
+ std::string mapp = point_cloud_name + "_sc.dot";
std::ofstream graphic(mapp);
- graphic << "graph GIC {" << std::endl;
- double maxv, minv;
- maxv = std::numeric_limits<double>::min();
- minv = std::numeric_limits<double>::max();
- for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
- iit++) {
+
+ double maxv = std::numeric_limits<double>::lowest();
+ double minv = std::numeric_limits<double>::max();
+ for (std::map<int, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end(); iit++) {
maxv = std::max(maxv, iit->second.second);
minv = std::min(minv, iit->second.second);
}
+
int k = 0;
std::vector<int> nodes;
nodes.clear();
- for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
- iit++) {
+
+ graphic << "graph GIC {" << std::endl;
+ int id = 0;
+ for (std::map<int, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end(); iit++) {
if (iit->second.first > mask) {
nodes.push_back(iit->first);
- graphic << iit->first << "[shape=circle fontcolor=black color=black label=\"" << iit->first << ":"
- << iit->second.first << "\" style=filled fillcolor=\""
+ name2id[iit->first] = id;
+ name2idinv[id] = iit->first;
+ id++;
+ graphic << name2id[iit->first] << "[shape=circle fontcolor=black color=black label=\"" << name2id[iit->first]
+ << ":" << iit->second.first << "\" style=filled fillcolor=\""
<< (1 - (maxv - iit->second.second) / (maxv - minv)) * 0.6 << ", 1, 1\"]" << std::endl;
k++;
}
@@ -924,13 +927,14 @@ class Cover_complex {
for (int i = 0; i < num_simplices; i++)
if (simplices[i].size() == 2) {
if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) {
- graphic << " " << simplices[i][0] << " -- " << simplices[i][1] << " [weight=15];" << std::endl;
+ graphic << " " << name2id[simplices[i][0]] << " -- " << name2id[simplices[i][1]] << " [weight=15];"
+ << std::endl;
ke++;
}
}
graphic << "}";
graphic.close();
- std::cout << "SC.dot generated. It can be visualized with e.g. neato." << std::endl;
+ std::cout << mapp << " file generated. It can be visualized with e.g. neato." << std::endl;
}
public: // Create a .txt file that can be compiled with KeplerMapper.
@@ -940,8 +944,9 @@ class Cover_complex {
void write_info() {
int num_simplices = simplices.size();
int num_edges = 0;
- char mapp[11] = "SC.txt";
+ std::string mapp = point_cloud_name + "_sc.txt";
std::ofstream graphic(mapp);
+
for (int i = 0; i < num_simplices; i++)
if (simplices[i].size() == 2)
if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask) num_edges++;
@@ -952,16 +957,21 @@ class Cover_complex {
graphic << resolution_double << " " << gain << std::endl;
graphic << cover_color.size() << " " << num_edges << std::endl;
- for (std::map<Cover_t, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end();
- iit++)
- graphic << iit->first << " " << iit->second.second << " " << iit->second.first << std::endl;
+ int id = 0;
+ for (std::map<int, std::pair<int, double> >::iterator iit = cover_color.begin(); iit != cover_color.end(); iit++) {
+ graphic << id << " " << iit->second.second << " " << iit->second.first << std::endl;
+ name2id[iit->first] = id;
+ name2idinv[id] = iit->first;
+ id++;
+ }
for (int i = 0; i < num_simplices; i++)
if (simplices[i].size() == 2)
if (cover_color[simplices[i][0]].first > mask && cover_color[simplices[i][1]].first > mask)
- graphic << simplices[i][0] << " " << simplices[i][1] << std::endl;
+ graphic << name2id[simplices[i][0]] << " " << name2id[simplices[i][1]] << std::endl;
graphic.close();
- std::cout << "SC.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox."
+ std::cout << mapp
+ << " generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox."
<< std::endl;
}
@@ -972,14 +982,17 @@ class Cover_complex {
*/
void plot_OFF() {
assert(cover_name == "Voronoi");
- char gic[11] = "SC.off";
- std::ofstream graphic(gic);
- graphic << "OFF" << std::endl;
+
int m = voronoi_subsamples.size();
int numedges = 0;
int numfaces = 0;
std::vector<std::vector<int> > edges, faces;
int numsimplices = simplices.size();
+
+ std::string mapp = point_cloud_name + "_sc.off";
+ std::ofstream graphic(mapp);
+
+ graphic << "OFF" << std::endl;
for (int i = 0; i < numsimplices; i++) {
if (simplices[i].size() == 2) {
numedges++;
@@ -1004,26 +1017,183 @@ class Cover_complex {
for (int i = 0; i < numfaces; i++)
graphic << 3 << " " << faces[i][0] << " " << faces[i][1] << " " << faces[i][2] << std::endl;
graphic.close();
- std::cout << "SC.off generated. It can be visualized with e.g. geomview." << std::endl;
+ std::cout << mapp << " generated. It can be visualized with e.g. geomview." << std::endl;
}
// *******************************************************************************************************************
+ // Extended Persistence Diagrams.
+ // *******************************************************************************************************************
+
+ public:
+ /** \brief Computes the extended persistence diagram of the complex.
+ *
+ */
+ void compute_PD() {
+ Simplex_tree st;
+
+ // Compute max and min
+ double maxf = std::numeric_limits<double>::lowest();
+ double minf = std::numeric_limits<double>::max();
+ for (std::map<int, double>::iterator it = cover_std.begin(); it != cover_std.end(); it++) {
+ maxf = std::max(maxf, it->second);
+ minf = std::min(minf, it->second);
+ }
+
+ for (auto const& simplex : simplices) {
+ // Add a simplex and a cone on it
+ std::vector<int> splx = simplex;
+ splx.push_back(-2);
+ st.insert_simplex_and_subfaces(splx);
+ }
+
+ // Build filtration
+ for (auto simplex : st.complex_simplex_range()) {
+ double filta = std::numeric_limits<double>::lowest();
+ double filts = filta;
+ bool ascending = true;
+ for (auto vertex : st.simplex_vertex_range(simplex)) {
+ if (vertex == -2) {
+ ascending = false;
+ continue;
+ }
+ filta = std::max(-2 + (cover_std[vertex] - minf) / (maxf - minf), filta);
+ filts = std::max(2 - (cover_std[vertex] - minf) / (maxf - minf), filts);
+ }
+ if (ascending)
+ st.assign_filtration(simplex, filta);
+ else
+ st.assign_filtration(simplex, filts);
+ }
+ int magic[] = {-2};
+ st.assign_filtration(st.find(magic), -3);
+
+ // Compute PD
+ st.initialize_filtration();
+ Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Gudhi::persistent_cohomology::Field_Zp> pcoh(st);
+ pcoh.init_coefficients(2);
+ pcoh.compute_persistent_cohomology();
+
+ // Output PD
+ int max_dim = st.dimension();
+ for (int i = 0; i < max_dim; i++) {
+ std::vector<std::pair<double, double> > bars = pcoh.intervals_in_dimension(i);
+ int num_bars = bars.size();
+ if(verbose) std::cout << num_bars << " interval(s) in dimension " << i << ":" << std::endl;
+ for (int j = 0; j < num_bars; j++) {
+ double birth = bars[j].first;
+ double death = bars[j].second;
+ if (i == 0 && std::isinf(death)) continue;
+ if (birth < 0)
+ birth = minf + (birth + 2) * (maxf - minf);
+ else
+ birth = minf + (2 - birth) * (maxf - minf);
+ if (death < 0)
+ death = minf + (death + 2) * (maxf - minf);
+ else
+ death = minf + (2 - death) * (maxf - minf);
+ PD.push_back(std::pair<double, double>(birth, death));
+ if (verbose) std::cout << " [" << birth << ", " << death << "]" << std::endl;
+ }
+ }
+ }
+
+ public:
+ /** \brief Computes bootstrapped distances distribution.
+ *
+ * @param[in] N number of bootstrap iterations.
+ *
+ */
+ template <typename SimplicialComplex>
+ void compute_distribution(int N = 100) {
+ if (distribution.size() >= N) {
+ std::cout << "Already done!" << std::endl;
+ } else {
+ for (int i = 0; i < N - distribution.size(); i++) {
+ Cover_complex Cboot;
+ Cboot.n = this->n;
+ std::vector<int> boot(this->n);
+ for (int j = 0; j < this->n; j++) {
+ double u = GetUniform();
+ int id = std::floor(u * (this->n));
+ boot[j] = id;
+ Cboot.point_cloud[j] = this->point_cloud[id];
+ Cboot.func.emplace(j, this->func[id]);
+ }
+ for (int j = 0; j < n; j++) {
+ std::vector<double> dist(n);
+ for (int k = 0; k < n; k++) dist[k] = distances[boot[j]][boot[k]];
+ Cboot.distances.push_back(dist);
+ }
+
+ Cboot.set_graph_from_automatic_rips(Gudhi::Euclidean_distance());
+ Cboot.set_automatic_resolution();
+ Cboot.set_gain();
+ Cboot.set_cover_from_function();
+ Cboot.find_simplices();
+ Cboot.compute_PD();
+
+ distribution.push_back(Gudhi::persistence_diagram::bottleneck_distance(this->PD, Cboot.PD));
+ }
+
+ std::sort(distribution.begin(), distribution.end());
+ }
+ }
+
+ public:
+ /** \brief Computes the bottleneck distance threshold corresponding to a specific confidence level.
+ *
+ * @param[in] alpha Confidence level.
+ *
+ */
+ double compute_distance_from_confidence_level(double alpha) {
+ int N = distribution.size();
+ return distribution[std::floor(alpha * N)];
+ }
+
+ public:
+ /** \brief Computes the confidence level of a specific bottleneck distance threshold.
+ *
+ * @param[in] d Bottleneck distance.
+ *
+ */
+ double compute_confidence_level_from_distance(double d) {
+ int N = distribution.size();
+ for (int i = 0; i < N; i++)
+ if (distribution[i] > d) return i * 1.0 / N;
+ }
+
+ public:
+ /** \brief Computes the p-value, i.e. the opposite of the confidence level of the largest bottleneck
+ * distance preserving the points in the persistence diagram of the output simplicial complex.
+ *
+ */
+ double compute_p_value() {
+ double distancemin = -std::numeric_limits<double>::lowest();
+ int N = PD.size();
+ for (int i = 0; i < N; i++) distancemin = std::min(distancemin, 0.5 * (PD[i].second - PD[i].first));
+ return 1 - compute_confidence_level_from_distance(distancemin);
+ }
+
+ // *******************************************************************************************************************
+ // Computation of simplices.
// *******************************************************************************************************************
public:
/** \brief Creates the simplicial complex.
*
- * @param[in] complex SimplicialComplexForRips to be created.
+ * @param[in] complex SimplicialComplex to be created.
*
*/
- template <typename SimplicialComplexForRips>
- void create_complex(SimplicialComplexForRips& complex) {
+ template <typename SimplicialComplex>
+ void create_complex(SimplicialComplex& complex) {
unsigned int dimension = 0;
for (auto const& simplex : simplices) {
- complex.insert_simplex_and_subfaces(simplex);
+ int numvert = simplex.size();
+ double filt = std::numeric_limits<double>::lowest();
+ for (int i = 0; i < numvert; i++) filt = std::max(cover_color[simplex[i]].second, filt);
+ complex.insert_simplex_and_subfaces(simplex, filt);
if (dimension < simplex.size() - 1) dimension = simplex.size() - 1;
}
- complex.set_dimension(dimension);
}
public:
@@ -1036,15 +1206,16 @@ class Cover_complex {
}
if (type == "Nerve") {
- for (std::map<int, std::vector<Cover_t> >::iterator it = cover.begin(); it != cover.end(); it++)
- simplices.push_back(it->second);
- std::vector<std::vector<Cover_t> >::iterator it;
+ for(auto& simplex : cover)
+ simplices.push_back(simplex.second);
std::sort(simplices.begin(), simplices.end());
- it = std::unique(simplices.begin(), simplices.end());
+ std::vector<std::vector<int> >::iterator it = std::unique(simplices.begin(), simplices.end());
simplices.resize(std::distance(simplices.begin(), it));
}
if (type == "GIC") {
+ Index_map index = boost::get(boost::vertex_index, one_skeleton);
+
if (functional_cover) {
// Computes the simplices in the GIC by looking at all the edges of the graph and adding the
// corresponding edges in the GIC if the images of the endpoints belong to consecutive intervals.
@@ -1053,81 +1224,44 @@ class Cover_complex {
throw std::invalid_argument(
"the output of this function is correct ONLY if the cover is minimal, i.e. the gain is less than 0.5.");
- int v1, v2;
-
- // Loop on all points.
- for (std::map<int, std::vector<Cover_t> >::iterator it = cover.begin(); it != cover.end(); it++) {
- int vid = it->first;
- std::vector<int> neighbors = adjacency_matrix[vid];
- int num_neighb = neighbors.size();
-
- // Find cover of current point (vid).
- if (cover[vid].size() == 2)
- v1 = std::min(cover[vid][0], cover[vid][1]);
- else
- v1 = cover[vid][0];
- std::vector<int> node(1);
- node[0] = v1;
- simplices.push_back(node);
-
- // Loop on neighbors.
- for (int i = 0; i < num_neighb; i++) {
- int neighb = neighbors[i];
-
- // Find cover of neighbor (neighb).
- if (cover[neighb].size() == 2)
- v2 = std::max(cover[neighb][0], cover[neighb][1]);
- else
- v2 = cover[neighb][0];
-
- // If neighbor is in next interval, add edge.
- if (cover_fct[v2] == cover_fct[v1] + 1) {
- std::vector<int> edge(2);
- edge[0] = v1;
- edge[1] = v2;
- simplices.push_back(edge);
+ // Loop on all edges.
+ boost::graph_traits<Graph>::edge_iterator ei, ei_end;
+ for (boost::tie(ei, ei_end) = boost::edges(one_skeleton); ei != ei_end; ++ei) {
+ int nums = cover[index[boost::source(*ei, one_skeleton)]].size();
+ for (int i = 0; i < nums; i++) {
+ int vs = cover[index[boost::source(*ei, one_skeleton)]][i];
+ int numt = cover[index[boost::target(*ei, one_skeleton)]].size();
+ for (int j = 0; j < numt; j++) {
+ int vt = cover[index[boost::target(*ei, one_skeleton)]][j];
+ if (cover_fct[vs] == cover_fct[vt] + 1 || cover_fct[vt] == cover_fct[vs] + 1) {
+ std::vector<int> edge(2);
+ edge[0] = std::min(vs, vt);
+ edge[1] = std::max(vs, vt);
+ simplices.push_back(edge);
+ goto afterLoop;
+ }
}
}
+ afterLoop:;
}
- std::vector<std::vector<Cover_t> >::iterator it;
std::sort(simplices.begin(), simplices.end());
- it = std::unique(simplices.begin(), simplices.end());
+ std::vector<std::vector<int> >::iterator it = std::unique(simplices.begin(), simplices.end());
simplices.resize(std::distance(simplices.begin(), it));
} else {
- // Find IDs of edges to remove
- std::vector<int> simplex_to_remove;
- int simplex_id = 0;
- for (auto simplex : st.complex_simplex_range()) {
- if (st.dimension(simplex) == 1) {
- std::vector<std::vector<Cover_t> > comp;
- for (auto vertex : st.simplex_vertex_range(simplex)) comp.push_back(cover[vertex]);
- if (comp[0].size() == 1 && comp[0] == comp[1]) simplex_to_remove.push_back(simplex_id);
+ // Find edges to keep
+ Simplex_tree st;
+ boost::graph_traits<Graph>::edge_iterator ei, ei_end;
+ for (boost::tie(ei, ei_end) = boost::edges(one_skeleton); ei != ei_end; ++ei)
+ if (!(cover[index[boost::target(*ei, one_skeleton)]].size() == 1 &&
+ cover[index[boost::target(*ei, one_skeleton)]] == cover[index[boost::source(*ei, one_skeleton)]])) {
+ std::vector<int> edge(2);
+ edge[0] = index[boost::source(*ei, one_skeleton)];
+ edge[1] = index[boost::target(*ei, one_skeleton)];
+ st.insert_simplex_and_subfaces(edge);
}
- simplex_id++;
- }
- // Remove edges
- if (simplex_to_remove.size() > 1) {
- int current_id = 1;
- auto simplex = st.complex_simplex_range().begin();
- int num_rem = 0;
- for (int i = 0; i < simplex_id - 1; i++) {
- int j = i + 1;
- auto simplex_tmp = simplex;
- simplex_tmp++;
- if (j == simplex_to_remove[current_id]) {
- st.remove_maximal_simplex(*simplex_tmp);
- current_id++;
- num_rem++;
- } else {
- simplex++;
- }
- }
- simplex = st.complex_simplex_range().begin();
- for (int i = 0; i < simplex_to_remove[0]; i++) simplex++;
- st.remove_maximal_simplex(*simplex);
- }
+ // st.insert_graph(one_skeleton);
// Build the Simplex Tree corresponding to the graph
st.expansion(maximal_dim);
@@ -1136,23 +1270,21 @@ class Cover_complex {
simplices.clear();
for (auto simplex : st.complex_simplex_range()) {
if (!st.has_children(simplex)) {
- std::vector<Cover_t> simplx;
+ std::vector<int> simplx;
for (auto vertex : st.simplex_vertex_range(simplex)) {
unsigned int sz = cover[vertex].size();
for (unsigned int i = 0; i < sz; i++) {
simplx.push_back(cover[vertex][i]);
}
}
-
std::sort(simplx.begin(), simplx.end());
- std::vector<Cover_t>::iterator it = std::unique(simplx.begin(), simplx.end());
+ std::vector<int>::iterator it = std::unique(simplx.begin(), simplx.end());
simplx.resize(std::distance(simplx.begin(), it));
simplices.push_back(simplx);
}
}
- std::vector<std::vector<Cover_t> >::iterator it;
std::sort(simplices.begin(), simplices.end());
- it = std::unique(simplices.begin(), simplices.end());
+ std::vector<std::vector<int> >::iterator it = std::unique(simplices.begin(), simplices.end());
simplices.resize(std::distance(simplices.begin(), it));
}
}
diff --git a/src/Nerve_GIC/test/CMakeLists.txt b/src/Nerve_GIC/test/CMakeLists.txt
index 03fe47ca..c35cdff7 100644
--- a/src/Nerve_GIC/test/CMakeLists.txt
+++ b/src/Nerve_GIC/test/CMakeLists.txt
@@ -1,14 +1,17 @@
cmake_minimum_required(VERSION 2.6)
project(Graph_induced_complex_tests)
-include(GUDHI_test_coverage)
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+ include(GUDHI_test_coverage)
-add_executable ( Nerve_GIC_test_unit test_GIC.cpp )
-target_link_libraries(Nerve_GIC_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
-if (TBB_FOUND)
- target_link_libraries(Nerve_GIC_test_unit ${TBB_LIBRARIES})
-endif()
+ add_executable ( Nerve_GIC_test_unit test_GIC.cpp )
+ target_link_libraries(Nerve_GIC_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Nerve_GIC_test_unit ${TBB_LIBRARIES})
+ endif()
-file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
-gudhi_add_coverage_test(Nerve_GIC_test_unit)
+ gudhi_add_coverage_test(Nerve_GIC_test_unit)
+
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Nerve_GIC/test/test_GIC.cpp b/src/Nerve_GIC/test/test_GIC.cpp
index a8b1e7f7..d633753c 100644
--- a/src/Nerve_GIC/test/test_GIC.cpp
+++ b/src/Nerve_GIC/test/test_GIC.cpp
@@ -24,11 +24,11 @@
#define BOOST_TEST_MODULE "graph_induced_complex"
#include <boost/test/unit_test.hpp>
-#include <cmath> // float comparison
+
#include <limits>
#include <string>
#include <vector>
-#include <algorithm> // std::max
+
#include <gudhi/GIC.h>
#include <gudhi/distance_functions.h>
#include <gudhi/reader_utils.h>
diff --git a/src/Nerve_GIC/utilities/CMakeLists.txt b/src/Nerve_GIC/utilities/CMakeLists.txt
new file mode 100644
index 00000000..7762c8a0
--- /dev/null
+++ b/src/Nerve_GIC/utilities/CMakeLists.txt
@@ -0,0 +1,24 @@
+cmake_minimum_required(VERSION 2.6)
+project(Nerve_GIC_examples)
+
+if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
+
+ add_executable ( Nerve Nerve.cpp )
+ add_executable ( VoronoiGIC VoronoiGIC.cpp )
+
+ if (TBB_FOUND)
+ target_link_libraries(Nerve ${TBB_LIBRARIES})
+ target_link_libraries(VoronoiGIC ${TBB_LIBRARIES})
+ endif()
+
+ file(COPY KeplerMapperVisuFromTxtFile.py km.py DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ # Copy files for not to pollute sources when testing
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/human.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+ add_test(NAME Nerve_GIC_utilities_nerve COMMAND $<TARGET_FILE:Nerve>
+ "human.off" "2" "10" "0.3")
+
+ add_test(NAME Nerve_GIC_utilities_VoronoiGIC COMMAND $<TARGET_FILE:VoronoiGIC>
+ "human.off" "100")
+
+endif (NOT CGAL_VERSION VERSION_LESS 4.8.1)
diff --git a/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py
new file mode 100755
index 00000000..c811f610
--- /dev/null
+++ b/src/Nerve_GIC/utilities/KeplerMapperVisuFromTxtFile.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+
+import km
+import numpy as np
+from collections import defaultdict
+import argparse
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Mathieu Carriere
+
+ Copyright (C) 2017 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Mathieu Carriere"
+__copyright__ = "Copyright (C) 2017 INRIA"
+__license__ = "GPL v3"
+
+parser = argparse.ArgumentParser(description='Creates an html Keppler Mapper '
+ 'file to visualize a SC.txt file.',
+ epilog='Example: '
+ './KeplerMapperVisuFromTxtFile.py '
+ '-f ../../data/points/human.off_sc.txt'
+ '- Constructs an human.off_sc.html file.')
+parser.add_argument("-f", "--file", type=str, required=True)
+
+args = parser.parse_args()
+
+with open(args.file, 'r') as f:
+ network = {}
+ mapper = km.KeplerMapper(verbose=0)
+ data = np.zeros((3,3))
+ projected_data = mapper.fit_transform( data, projection="sum", scaler=None )
+
+ nodes = defaultdict(list)
+ links = defaultdict(list)
+ custom = defaultdict(list)
+
+ dat = f.readline()
+ lens = f.readline()
+ color = f.readline();
+ param = [float(i) for i in f.readline().split(" ")]
+
+ nums = [int(i) for i in f.readline().split(" ")]
+ num_nodes = nums[0]
+ num_edges = nums[1]
+
+ for i in range(0,num_nodes):
+ point = [float(j) for j in f.readline().split(" ")]
+ nodes[ str(int(point[0])) ] = [ int(point[0]), point[1], int(point[2]) ]
+ links[ str(int(point[0])) ] = []
+ custom[ int(point[0]) ] = point[1]
+
+ m = min([custom[i] for i in range(0,num_nodes)])
+ M = max([custom[i] for i in range(0,num_nodes)])
+
+ for i in range(0,num_edges):
+ edge = [int(j) for j in f.readline().split(" ")]
+ links[ str(edge[0]) ].append( str(edge[1]) )
+ links[ str(edge[1]) ].append( str(edge[0]) )
+
+ network["nodes"] = nodes
+ network["links"] = links
+ network["meta"] = lens
+
+ html_output_filename = args.file.rsplit('.', 1)[0] + '.html'
+ mapper.visualize(network, color_function = color, path_html=html_output_filename, title=dat,
+ graph_link_distance=30, graph_gravity=0.1, graph_charge=-120, custom_tooltips=custom, width_html=0,
+ height_html=0, show_tooltips=True, show_title=True, show_meta=True, res=param[0],gain=param[1], minimum=m,maximum=M)
+ message = repr(html_output_filename) + " is generated. You can now use your favorite web browser to visualize it."
+ print(message)
+
+
+ f.close()
diff --git a/src/Nerve_GIC/example/Nerve.cpp b/src/Nerve_GIC/utilities/Nerve.cpp
index 4d5b009b..aefc3874 100644
--- a/src/Nerve_GIC/example/Nerve.cpp
+++ b/src/Nerve_GIC/utilities/Nerve.cpp
@@ -27,8 +27,8 @@
void usage(int nbArgs, char *const progName) {
std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
- std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [--v] \n";
- std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 --v \n";
+ std::cerr << "Usage: " << progName << " filename.off coordinate resolution gain [-v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 2 10 0.3 -v \n";
exit(-1); // ----- >>
}
@@ -72,6 +72,7 @@ int main(int argc, char **argv) {
Gudhi::Simplex_tree<> stree;
SC.create_complex(stree);
+ SC.compute_PD();
// ----------------------------------------------------------------------------
// Display information about the graph induced complex
diff --git a/src/Nerve_GIC/utilities/Nerve.txt b/src/Nerve_GIC/utilities/Nerve.txt
new file mode 100644
index 00000000..839ff45e
--- /dev/null
+++ b/src/Nerve_GIC/utilities/Nerve.txt
@@ -0,0 +1,63 @@
+Min function value = -0.979672 and Max function value = 0.816414
+Interval 0 = [-0.979672, -0.761576]
+Interval 1 = [-0.838551, -0.581967]
+Interval 2 = [-0.658942, -0.402359]
+Interval 3 = [-0.479334, -0.22275]
+Interval 4 = [-0.299725, -0.0431415]
+Interval 5 = [-0.120117, 0.136467]
+Interval 6 = [0.059492, 0.316076]
+Interval 7 = [0.239101, 0.495684]
+Interval 8 = [0.418709, 0.675293]
+Interval 9 = [0.598318, 0.816414]
+Computing preimages...
+Computing connected components...
+.txt generated. It can be visualized with e.g. python KeplerMapperVisuFromTxtFile.py and firefox.
+5 interval(s) in dimension 0:
+ [-0.909111, 0.00817529]
+ [-0.171433, 0.367392]
+ [-0.171433, 0.367392]
+ [-0.909111, 0.745853]
+0 interval(s) in dimension 1:
+Nerve is of dimension 1 - 41 simplices - 21 vertices.
+Iterator on Nerve simplices
+1
+0
+4
+4 0
+2
+2 1
+8
+8 2
+5
+5 4
+9
+9 8
+13
+13 5
+14
+14 9
+19
+19 13
+25
+32
+20
+32 20
+33
+33 25
+26
+26 14
+26 19
+42
+42 26
+34
+34 33
+27
+27 20
+35
+35 27
+35 34
+42 35
+44
+44 35
+54
+54 44 \ No newline at end of file
diff --git a/src/Nerve_GIC/example/VoronoiGIC.cpp b/src/Nerve_GIC/utilities/VoronoiGIC.cpp
index 32431cc2..54bb871e 100644
--- a/src/Nerve_GIC/example/VoronoiGIC.cpp
+++ b/src/Nerve_GIC/utilities/VoronoiGIC.cpp
@@ -27,8 +27,8 @@
void usage(int nbArgs, char *const progName) {
std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n";
- std::cerr << "Usage: " << progName << " filename.off N [--v] \n";
- std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 --v \n";
+ std::cerr << "Usage: " << progName << " filename.off N [-v] \n";
+ std::cerr << " i.e.: " << progName << " ../../data/points/human.off 100 -v \n";
exit(-1); // ----- >>
}
diff --git a/src/Nerve_GIC/utilities/covercomplex.md b/src/Nerve_GIC/utilities/covercomplex.md
new file mode 100644
index 00000000..f33cb2e0
--- /dev/null
+++ b/src/Nerve_GIC/utilities/covercomplex.md
@@ -0,0 +1,62 @@
+
+
+# Cover complex #
+
+
+## Nerve ##
+This program builds the Nerve of a point cloud sampled on an OFF file.
+The cover C comes from the preimages of intervals covering a coordinate function,
+which are then refined into their connected components using the triangulation of the .OFF file.
+
+The program also writes a file SC.txt.
+The first three lines in this file are the location of the input point cloud and the function used to compute the cover.
+The fourth line contains the number of vertices nv and edges ne of the Nerve. The next nv lines represent the vertices.
+Each line contains the vertex ID, the number of data points it contains, and their average color function value.
+Finally, the next ne lines represent the edges, characterized by the ID of their vertices.
+
+**Usage**
+
+`Nerve <OFF input file> coordinate resolution gain [-v]`
+
+where
+
+* `coordinate` is the coordinate function to cover
+* `resolution` is the number of the intervals
+* `gain` is the gain for each interval
+* `-v` is optional, it activates verbose mode.
+
+**Example**
+
+`Nerve ../../data/points/human.off 2 10 0.3`
+
+* Builds the Nerve of a point cloud sampled on a 3D human shape (human.off).
+The cover C comes from the preimages of intervals (10 intervals with gain 0.3) covering the height function (coordinate 2).
+
+`python KeplerMapperVisuFromTxtFile.py -f ../../data/points/human.off_sc.txt`
+
+* Constructs `human.off_sc.html` file. You can now use your favorite web browser to visualize it.
+
+## VoronoiGIC ##
+
+This util builds the Graph Induced Complex (GIC) of a point cloud.
+It subsamples *N* points in the point cloud, which act as seeds of a geodesic Voronoï diagram.
+Each cell of the diagram is then an element of C.
+
+The program also writes a file `*_sc.off`, that is an OFF file that can be visualized with GeomView.
+
+**Usage**
+
+`VoroniGIC <OFF input file> samples_number [-v]`
+
+where
+
+* `samples_number` is the number of samples to take from the point cloud
+* `-v` is optional, it activates verbose mode.
+
+**Example**
+
+`VoroniGIC ../../data/points/human.off 700`
+
+* Builds the Voronoi Graph Induced Complex with 700 subsamples from `human.off` file.
+`../../data/points/human_sc.off` can be visualized with GeomView.
+
diff --git a/src/Nerve_GIC/example/km.py b/src/Nerve_GIC/utilities/km.py
index 53024aab..53024aab 100755
--- a/src/Nerve_GIC/example/km.py
+++ b/src/Nerve_GIC/utilities/km.py
diff --git a/src/Nerve_GIC/example/km.py.COPYRIGHT b/src/Nerve_GIC/utilities/km.py.COPYRIGHT
index bef7b121..bef7b121 100644
--- a/src/Nerve_GIC/example/km.py.COPYRIGHT
+++ b/src/Nerve_GIC/utilities/km.py.COPYRIGHT
diff --git a/src/Persistence_representations/doc/Persistence_representations_doc.h b/src/Persistence_representations/doc/Persistence_representations_doc.h
index 978fb5bd..38bd3a21 100644
--- a/src/Persistence_representations/doc/Persistence_representations_doc.h
+++ b/src/Persistence_representations/doc/Persistence_representations_doc.h
@@ -67,7 +67,7 @@ namespace Persistence_representations {
\li Concept of representation of persistence that allows computations of distances.
\li Concept of representation of persistence that allows computations of scalar products.
\li Concept of representation of persistence that allows vectorization.
- \li Concept of representation of persistence that allows computations of real--valued characteristics of objects.
+ \li Concept of representation of persistence that allows computations of real-valued characteristics of objects.
At the moment an implementation of the following representations of persistence are available (further details of
@@ -250,7 +250,6 @@ namespace Persistence_representations {
absolute value of differences between coordinates. A scalar product is a sum of products of
values at the corresponding positions of two vectors.
- \copyright GNU General Public License v3.
*/
/** @} */ // end defgroup Persistence_representations
diff --git a/src/Persistence_representations/example/CMakeLists.txt b/src/Persistence_representations/example/CMakeLists.txt
index b8ce8ea6..eb3258f8 100644
--- a/src/Persistence_representations/example/CMakeLists.txt
+++ b/src/Persistence_representations/example/CMakeLists.txt
@@ -2,28 +2,23 @@ cmake_minimum_required(VERSION 2.6)
project(Persistence_representations_example)
add_executable ( Persistence_representations_example_landscape_on_grid persistence_landscape_on_grid.cpp )
-target_link_libraries(Persistence_representations_example_landscape_on_grid ${Boost_SYSTEM_LIBRARY})
add_test(NAME Persistence_representations_example_landscape_on_grid
COMMAND $<TARGET_FILE:Persistence_representations_example_landscape_on_grid>)
add_executable ( Persistence_representations_example_landscape persistence_landscape.cpp )
-target_link_libraries(Persistence_representations_example_landscape ${Boost_SYSTEM_LIBRARY})
add_test(NAME Persistence_representations_example_landscape
COMMAND $<TARGET_FILE:Persistence_representations_example_landscape>)
add_executable ( Persistence_representations_example_intervals persistence_intervals.cpp )
-target_link_libraries(Persistence_representations_example_intervals ${Boost_SYSTEM_LIBRARY})
add_test(NAME Persistence_representations_example_intervals
COMMAND $<TARGET_FILE:Persistence_representations_example_intervals>
"${CMAKE_SOURCE_DIR}/data/persistence_diagram/first.pers")
add_executable ( Persistence_representations_example_vectors persistence_vectors.cpp )
-target_link_libraries(Persistence_representations_example_vectors ${Boost_SYSTEM_LIBRARY})
add_test(NAME Persistence_representations_example_vectors
COMMAND $<TARGET_FILE:Persistence_representations_example_vectors>)
add_executable ( Persistence_representations_example_heat_maps persistence_heat_maps.cpp )
-target_link_libraries(Persistence_representations_example_heat_maps ${Boost_SYSTEM_LIBRARY})
add_test(NAME Persistence_representations_example_heat_maps
COMMAND $<TARGET_FILE:Persistence_representations_example_heat_maps>)
diff --git a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
index 450c223c..83b89d0e 100644
--- a/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
+++ b/src/Persistence_representations/include/gudhi/read_persistence_from_file.h
@@ -23,6 +23,8 @@
#ifndef READ_PERSISTENCE_FROM_FILE_H_
#define READ_PERSISTENCE_FROM_FILE_H_
+#include <gudhi/reader_utils.h>
+
#include <iostream>
#include <fstream>
#include <sstream>
@@ -30,7 +32,7 @@
#include <algorithm>
#include <string>
#include <utility>
-#include <gudhi/reader_utils.h>
+#include <limits> // for std::numeric_limits<>
namespace Gudhi {
namespace Persistence_representations {
@@ -72,16 +74,9 @@ std::vector<std::pair<double, double> > read_persistence_intervals_in_one_dimens
std::cout << "COnsidering interval : " << barcode_initial[i].first << " " << barcode_initial[i].second
<< std::endl;
}
- // if ( barcode_initial[i].first == barcode_initial[i].second )
- //{
- // if ( dbg )std::cout << "It has zero length \n";
- // continue;//zero length intervals are not relevant, so we skip all of them.
- //}
- if (barcode_initial[i].first >
- barcode_initial[i]
- .second) // note that in this case barcode_initial[i].second != std::numeric_limits<double>::infinity()
- {
+ if (barcode_initial[i].first > barcode_initial[i].second) {
+ // note that in this case barcode_initial[i].second != std::numeric_limits<double>::infinity()
if (dbg) std::cout << "Swap and enter \n";
// swap them to make sure that birth < death
final_barcode.push_back(std::pair<double, double>(barcode_initial[i].second, barcode_initial[i].first));
diff --git a/src/Persistence_representations/test/CMakeLists.txt b/src/Persistence_representations/test/CMakeLists.txt
index 4483de07..335a71ef 100644
--- a/src/Persistence_representations/test/CMakeLists.txt
+++ b/src/Persistence_representations/test/CMakeLists.txt
@@ -6,38 +6,38 @@ include(GUDHI_test_coverage)
# copy data directory for tests purpose.
file(COPY data DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
add_executable ( Persistence_intervals_test_unit persistence_intervals_test.cpp )
-target_link_libraries(Persistence_intervals_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+target_link_libraries(Persistence_intervals_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
gudhi_add_coverage_test(Persistence_intervals_test_unit)
add_executable (Vector_representation_test_unit vector_representation_test.cpp )
-target_link_libraries(Vector_representation_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+target_link_libraries(Vector_representation_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
gudhi_add_coverage_test(Vector_representation_test_unit)
add_executable (Persistence_lanscapes_test_unit persistence_lanscapes_test.cpp )
-target_link_libraries(Persistence_lanscapes_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+target_link_libraries(Persistence_lanscapes_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
gudhi_add_coverage_test(Persistence_lanscapes_test_unit)
add_executable ( Persistence_lanscapes_on_grid_test_unit persistence_lanscapes_on_grid_test.cpp )
-target_link_libraries(Persistence_lanscapes_on_grid_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+target_link_libraries(Persistence_lanscapes_on_grid_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
gudhi_add_coverage_test(Persistence_lanscapes_on_grid_test_unit)
add_executable (Persistence_heat_maps_test_unit persistence_heat_maps_test.cpp )
-target_link_libraries(Persistence_heat_maps_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+target_link_libraries(Persistence_heat_maps_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
gudhi_add_coverage_test(Persistence_heat_maps_test_unit)
add_executable ( Read_persistence_from_file_test_unit read_persistence_from_file_test.cpp )
-target_link_libraries(Read_persistence_from_file_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+target_link_libraries(Read_persistence_from_file_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
gudhi_add_coverage_test(Read_persistence_from_file_test_unit)
if (NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.8.1)
add_executable (Persistence_intervals_with_distances_test_unit persistence_intervals_with_distances_test.cpp )
- target_link_libraries(Persistence_intervals_with_distances_test_unit ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ target_link_libraries(Persistence_intervals_with_distances_test_unit ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Persistence_intervals_with_distances_test_unit ${TBB_LIBRARIES})
endif(TBB_FOUND)
diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
index ceaea505..4dbe82c7 100644
--- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
+++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h
@@ -248,7 +248,6 @@ Simplex_tree dim: 3
Persistent_cohomology/plain_homology.cpp</a> computes the plain homology of a simple simplicial complex without
filtration values.
- \copyright GNU General Public License v3.
*/
} // namespace persistent_cohomology
diff --git a/src/Rips_complex/doc/Intro_rips_complex.h b/src/Rips_complex/doc/Intro_rips_complex.h
index 81f24c71..d09c92a4 100644
--- a/src/Rips_complex/doc/Intro_rips_complex.h
+++ b/src/Rips_complex/doc/Intro_rips_complex.h
@@ -200,8 +200,6 @@ namespace rips_complex {
* \code Sparse Rips complex is of dimension 2 - 19 simplices - 7 vertices.
* \endcode
*
- * \copyright GNU General Public License v3.
- * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup rips_complex
diff --git a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
index d38808c7..ca3c0327 100644
--- a/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_distance_matrix_persistence.cpp
@@ -1,5 +1,5 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
* Author(s): Pawel Dlotko, Vincent Rouvreau
@@ -36,18 +36,13 @@ using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persis
using Filtration_value = Simplex_tree::Filtration_value;
using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
-using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
using Distance_matrix = std::vector<std::vector<Filtration_value>>;
-void program_options(int argc, char * argv[]
- , std::string & csv_matrix_file
- , std::string & filediag
- , Filtration_value & threshold
- , int & dim_max
- , int & p
- , Filtration_value & min_persistence);
+void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::string& filediag,
+ Filtration_value& threshold, int& dim_max, int& p, Filtration_value& min_persistence);
-int main(int argc, char * argv[]) {
+int main(int argc, char* argv[]) {
std::string csv_matrix_file;
std::string filediag;
Filtration_value threshold;
@@ -88,33 +83,28 @@ int main(int argc, char * argv[]) {
return 0;
}
-void program_options(int argc, char * argv[]
- , std::string & csv_matrix_file
- , std::string & filediag
- , Filtration_value & threshold
- , int & dim_max
- , int & p
- , Filtration_value & min_persistence) {
+void program_options(int argc, char* argv[], std::string& csv_matrix_file, std::string& filediag,
+ Filtration_value& threshold, int& dim_max, int& p, Filtration_value& min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&csv_matrix_file),
- "Name of file containing a distance matrix. Can be square or lower triangular matrix. Separator is ';'.");
+ hidden.add_options()(
+ "input-file", po::value<std::string>(&csv_matrix_file),
+ "Name of file containing a distance matrix. Can be square or lower triangular matrix. Separator is ';'.");
po::options_description visible("Allowed options", 100);
- visible.add_options()
- ("help,h", "produce help message")
- ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-edge-length,r",
- po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
- "Maximal length of an edge for the Rips complex construction.")
- ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
- "Maximal dimension of the Rips complex we want to compute.")
- ("field-charac,p", po::value<int>(&p)->default_value(11),
- "Characteristic p of the coefficient field Z/pZ for computing homology.")
- ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
- "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Rips complex construction.")(
+ "cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Rips complex we want to compute.")(
+ "field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -123,8 +113,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
diff --git a/src/Rips_complex/utilities/rips_persistence.cpp b/src/Rips_complex/utilities/rips_persistence.cpp
index d504798b..8405c014 100644
--- a/src/Rips_complex/utilities/rips_persistence.cpp
+++ b/src/Rips_complex/utilities/rips_persistence.cpp
@@ -1,5 +1,5 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
* Author(s): Clément Maria
@@ -37,19 +37,14 @@ using Simplex_tree = Gudhi::Simplex_tree<Gudhi::Simplex_tree_options_fast_persis
using Filtration_value = Simplex_tree::Filtration_value;
using Rips_complex = Gudhi::rips_complex::Rips_complex<Filtration_value>;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
-using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp >;
+using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<Simplex_tree, Field_Zp>;
using Point = std::vector<double>;
using Points_off_reader = Gudhi::Points_off_reader<Point>;
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , std::string & filediag
- , Filtration_value & threshold
- , int & dim_max
- , int & p
- , Filtration_value & min_persistence);
+void program_options(int argc, char* argv[], std::string& off_file_points, std::string& filediag,
+ Filtration_value& threshold, int& dim_max, int& p, Filtration_value& min_persistence);
-int main(int argc, char * argv[]) {
+int main(int argc, char* argv[]) {
std::string off_file_points;
std::string filediag;
Filtration_value threshold;
@@ -91,33 +86,27 @@ int main(int argc, char * argv[]) {
return 0;
}
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , std::string & filediag
- , Filtration_value & threshold
- , int & dim_max
- , int & p
- , Filtration_value & min_persistence) {
+void program_options(int argc, char* argv[], std::string& off_file_points, std::string& filediag,
+ Filtration_value& threshold, int& dim_max, int& p, Filtration_value& min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&off_file_points),
- "Name of an OFF file containing a point set.\n");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a point set.\n");
po::options_description visible("Allowed options", 100);
- visible.add_options()
- ("help,h", "produce help message")
- ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-edge-length,r",
- po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
- "Maximal length of an edge for the Rips complex construction.")
- ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
- "Maximal dimension of the Rips complex we want to compute.")
- ("field-charac,p", po::value<int>(&p)->default_value(11),
- "Characteristic p of the coefficient field Z/pZ for computing homology.")
- ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
- "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+ visible.add_options()("help,h", "produce help message")(
+ "output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Rips complex construction.")(
+ "cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Rips complex we want to compute.")(
+ "field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -126,8 +115,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
diff --git a/src/Rips_complex/utilities/ripscomplex.md b/src/Rips_complex/utilities/ripscomplex.md
new file mode 100644
index 00000000..3bf10aee
--- /dev/null
+++ b/src/Rips_complex/utilities/ripscomplex.md
@@ -0,0 +1,75 @@
+
+
+# Rips complex #
+
+## rips_persistence ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Rips complex defined on a set of input points, using Euclidean distance. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+
+`rips_persistence [options] <OFF input file>`
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-r [ --max-edge-length ]` (default = inf) Maximal length of an edge for the Rips complex construction.
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Rips complex we want to compute.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+Beware: this program may use a lot of RAM and take a lot of time if `max-edge-length` is set to a large value.
+
+**Example 1 with Z/2Z coefficients**
+
+`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 2`
+
+**Example 2 with Z/3Z coefficients**
+
+`rips_persistence ../../data/points/tore3D_1307.off -r 0.25 -m 0.5 -d 3 -p 3`
+
+
+## rips_distance_matrix_persistence ##
+
+Same as `rips_persistence` but taking a distance matrix as input.
+
+**Usage**
+
+`rips_persistence [options] <CSV input file>`
+
+where
+`<CSV input file>` is the path to the file containing a distance matrix. Can be square or lower triangular matrix. Separator is ';'.
+
+**Example**
+
+`rips_distance_matrix_persistence data/distance_matrix/full_square_distance_matrix.csv -r 15 -d 3 -p 3 -m 0`
+
+
+## sparse_rips_persistence ##
+This program computes the persistent homology with coefficient field *Z/pZ*
+of a sparse (1+epsilon)-approximation of the Rips complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+`p dim birth death`
+
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients (`p` must be a prime number).
+
+**Usage**
+
+`sparse_rips_persistence [options] <OFF input file>`
+
+**Allowed options**
+
+* `-h [ --help ]` Produce help message
+* `-o [ --output-file ]` Name of file in which the persistence diagram is written. Default print in standard output.
+* `-e [ --approximation ]` (default = .5) Epsilon, where the sparse Rips complex is a (1+epsilon)-approximation of the Rips complex.
+* `-d [ --cpx-dimension ]` (default = 1) Maximal dimension of the Rips complex we want to compute.
+* `-p [ --field-charac ]` (default = 11) Characteristic p of the coefficient field Z/pZ for computing homology.
+* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
+
+**Example with Z/2Z coefficients**
+
+`rips_persistence ../../data/points/tore3D_1307.off -e .5 -m .2 -d 3 -p 2`
diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h
index 769491d9..6b80d1c9 100644
--- a/src/Simplex_tree/doc/Intro_simplex_tree.h
+++ b/src/Simplex_tree/doc/Intro_simplex_tree.h
@@ -79,7 +79,6 @@ Number of vertices = 10 Number of simplices = 98 \endcode
* 1 incidence relations in a complex. It is consequently faster when accessing the boundary of a simplex, but is less
* compact and harder to construct from scratch.
*
- * \copyright GNU General Public License v3.
* @}
*/
diff --git a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
index 217e251f..9bd51106 100644
--- a/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
+++ b/src/Simplex_tree/example/cech_complex_cgal_mini_sphere_3d.cpp
@@ -1,5 +1,5 @@
-/* This file is part of the Gudhi Library. The Gudhi library
- * (Geometric Understanding in Higher Dimensions) is a generic C++
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
* library for computational topology.
*
* Author(s): Clément Maria
@@ -33,7 +33,7 @@
#include <string>
#include <vector>
-#include <limits> // infinity
+#include <limits> // infinity
#include <utility> // for pair
#include <map>
@@ -50,15 +50,14 @@ using Vertex_handle = Simplex_tree::Vertex_handle;
using Simplex_handle = Simplex_tree::Simplex_handle;
using Filtration_value = Simplex_tree::Filtration_value;
using Siblings = Simplex_tree::Siblings;
-using Graph_t = boost::adjacency_list < boost::vecS, boost::vecS, boost::undirectedS
-, boost::property < Gudhi::vertex_filtration_t, Filtration_value >
-, boost::property < Gudhi::edge_filtration_t, Filtration_value >
->;
-using Edge_t = std::pair< Vertex_handle, Vertex_handle >;
+using Graph_t = boost::adjacency_list<boost::vecS, boost::vecS, boost::undirectedS,
+ boost::property<Gudhi::vertex_filtration_t, Filtration_value>,
+ boost::property<Gudhi::edge_filtration_t, Filtration_value> >;
+using Edge_t = std::pair<Vertex_handle, Vertex_handle>;
-using Kernel = CGAL::Epick_d< CGAL::Dimension_tag<3> >;
+using Kernel = CGAL::Epick_d<CGAL::Dimension_tag<3> >;
using Point = Kernel::Point_d;
-using Traits = CGAL::Min_sphere_of_points_d_traits_d<Kernel,Filtration_value,3>;
+using Traits = CGAL::Min_sphere_of_points_d_traits_d<Kernel, Filtration_value, 3>;
using Min_sphere = CGAL::Min_sphere_of_spheres_d<Traits>;
using Points_off_reader = Gudhi::Points_off_reader<Point>;
@@ -76,7 +75,7 @@ class Cech_blocker {
std::cout << vertex << ", ";
#endif // DEBUG_TRACES
}
- Min_sphere ms(points.begin(),points.end());
+ Min_sphere ms(points.begin(), points.end());
Filtration_value radius = ms.radius();
#if DEBUG_TRACES
std::cout << "] - radius = " << radius << " - returns " << (radius > threshold_) << std::endl;
@@ -85,24 +84,20 @@ class Cech_blocker {
return (radius > threshold_);
}
Cech_blocker(Simplex_tree& simplex_tree, Filtration_value threshold, const std::vector<Point>& point_cloud)
- : simplex_tree_(simplex_tree),
- threshold_(threshold),
- point_cloud_(point_cloud) { }
+ : simplex_tree_(simplex_tree), threshold_(threshold), point_cloud_(point_cloud) {}
+
private:
Simplex_tree simplex_tree_;
Filtration_value threshold_;
std::vector<Point> point_cloud_;
};
-template< typename InputPointRange>
-Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold);
+template <typename InputPointRange>
+Graph_t compute_proximity_graph(InputPointRange& points, Filtration_value threshold);
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , Filtration_value & threshold
- , int & dim_max);
+void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& threshold, int& dim_max);
-int main(int argc, char * argv[]) {
+int main(int argc, char* argv[]) {
std::string off_file_points;
Filtration_value threshold;
int dim_max;
@@ -115,7 +110,7 @@ int main(int argc, char * argv[]) {
// Compute the proximity graph of the points
Graph_t prox_graph = compute_proximity_graph(off_reader.get_point_cloud(), threshold);
- //Min_sphere sph1(off_reader.get_point_cloud()[0], off_reader.get_point_cloud()[1], off_reader.get_point_cloud()[2]);
+ // Min_sphere sph1(off_reader.get_point_cloud()[0], off_reader.get_point_cloud()[1], off_reader.get_point_cloud()[2]);
// Construct the Rips complex in a Simplex Tree
Simplex_tree st;
// insert the proximity graph in the simplex tree
@@ -135,7 +130,8 @@ int main(int argc, char * argv[]) {
std::cout << "* The complex contains " << st.num_simplices() << " simplices - dimension=" << st.dimension() << "\n";
std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : st.filtration_simplex_range()) {
- std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ std::cout << " "
+ << "[" << st.filtration(f_simplex) << "] ";
for (auto vertex : st.simplex_vertex_range(f_simplex)) {
std::cout << static_cast<int>(vertex) << " ";
}
@@ -145,24 +141,19 @@ int main(int argc, char * argv[]) {
return 0;
}
-void program_options(int argc, char * argv[]
- , std::string & off_file_points
- , Filtration_value & threshold
- , int & dim_max) {
+void program_options(int argc, char* argv[], std::string& off_file_points, Filtration_value& threshold, int& dim_max) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&off_file_points),
- "Name of an OFF file containing a 3d point set.\n");
+ hidden.add_options()("input-file", po::value<std::string>(&off_file_points),
+ "Name of an OFF file containing a 3d point set.\n");
po::options_description visible("Allowed options", 100);
- visible.add_options()
- ("help,h", "produce help message")
- ("max-edge-length,r",
- po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
- "Maximal length of an edge for the Cech complex construction.")
- ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
- "Maximal dimension of the Cech complex we want to compute.");
+ visible.add_options()("help,h", "produce help message")(
+ "max-edge-length,r",
+ po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal length of an edge for the Cech complex construction.")(
+ "cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
+ "Maximal dimension of the Cech complex we want to compute.");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -171,8 +162,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
@@ -194,10 +184,10 @@ void program_options(int argc, char * argv[]
* The type PointCloud furnishes .begin() and .end() methods, that return
* iterators with value_type Point.
*/
-template< typename InputPointRange>
-Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value threshold) {
- std::vector< Edge_t > edges;
- std::vector< Filtration_value > edges_fil;
+template <typename InputPointRange>
+Graph_t compute_proximity_graph(InputPointRange& points, Filtration_value threshold) {
+ std::vector<Edge_t> edges;
+ std::vector<Filtration_value> edges_fil;
Kernel k;
Vertex_handle idx_u, idx_v;
@@ -217,16 +207,13 @@ Graph_t compute_proximity_graph(InputPointRange &points, Filtration_value thresh
++idx_u;
}
- Graph_t skel_graph(edges.begin()
- , edges.end()
- , edges_fil.begin()
- , idx_u); // number of points labeled from 0 to idx_u-1
+ Graph_t skel_graph(edges.begin(), edges.end(), edges_fil.begin(),
+ idx_u); // number of points labeled from 0 to idx_u-1
auto vertex_prop = boost::get(Gudhi::vertex_filtration_t(), skel_graph);
boost::graph_traits<Graph_t>::vertex_iterator vi, vi_end;
- for (std::tie(vi, vi_end) = boost::vertices(skel_graph);
- vi != vi_end; ++vi) {
+ for (std::tie(vi, vi_end) = boost::vertices(skel_graph); vi != vi_end; ++vi) {
boost::put(vertex_prop, *vi, 0.);
}
diff --git a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
index 86bfb8cb..0d458cbd 100644
--- a/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
+++ b/src/Simplex_tree/example/graph_expansion_with_blocker.cpp
@@ -27,8 +27,7 @@
using Simplex_tree = Gudhi::Simplex_tree<>;
using Simplex_handle = Simplex_tree::Simplex_handle;
-int main(int argc, char * const argv[]) {
-
+int main(int argc, char* const argv[]) {
// Construct the Simplex Tree with a 1-skeleton graph example
Simplex_tree simplexTree;
@@ -45,33 +44,32 @@ int main(int argc, char * const argv[]) {
simplexTree.insert_simplex({5, 6}, 10.);
simplexTree.insert_simplex({6}, 10.);
- simplexTree.expansion_with_blockers(3, [&](Simplex_handle sh){
- bool result = false;
- std::cout << "Blocker on [";
- // User can loop on the vertices from the given simplex_handle i.e.
- for (auto vertex : simplexTree.simplex_vertex_range(sh)) {
- // We block the expansion, if the vertex '6' is in the given list of vertices
- if (vertex == 6)
- result = true;
- std::cout << vertex << ", ";
- }
- std::cout << "] ( " << simplexTree.filtration(sh);
- // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
- simplexTree.assign_filtration(sh, simplexTree.filtration(sh) + 1.);
+ simplexTree.expansion_with_blockers(3, [&](Simplex_handle sh) {
+ bool result = false;
+ std::cout << "Blocker on [";
+ // User can loop on the vertices from the given simplex_handle i.e.
+ for (auto vertex : simplexTree.simplex_vertex_range(sh)) {
+ // We block the expansion, if the vertex '6' is in the given list of vertices
+ if (vertex == 6) result = true;
+ std::cout << vertex << ", ";
+ }
+ std::cout << "] ( " << simplexTree.filtration(sh);
+ // User can re-assign a new filtration value directly in the blocker (default is the maximal value of boudaries)
+ simplexTree.assign_filtration(sh, simplexTree.filtration(sh) + 1.);
- std::cout << " + 1. ) = " << result << std::endl;
+ std::cout << " + 1. ) = " << result << std::endl;
- return result;
- });
+ return result;
+ });
std::cout << "********************************************************************\n";
std::cout << "* The complex contains " << simplexTree.num_simplices() << " simplices";
std::cout << " - dimension " << simplexTree.dimension() << "\n";
std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplexTree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplexTree.filtration(f_simplex) << "] ";
- for (auto vertex : simplexTree.simplex_vertex_range(f_simplex))
- std::cout << "(" << vertex << ")";
+ std::cout << " "
+ << "[" << simplexTree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
diff --git a/src/Simplex_tree/example/simple_simplex_tree.cpp b/src/Simplex_tree/example/simple_simplex_tree.cpp
index b6b65b88..828977c2 100644
--- a/src/Simplex_tree/example/simple_simplex_tree.cpp
+++ b/src/Simplex_tree/example/simple_simplex_tree.cpp
@@ -30,10 +30,10 @@
using Simplex_tree = Gudhi::Simplex_tree<>;
using Vertex_handle = Simplex_tree::Vertex_handle;
using Filtration_value = Simplex_tree::Filtration_value;
-using typeVectorVertex = std::vector< Vertex_handle >;
-using typePairSimplexBool = std::pair< Simplex_tree::Simplex_handle, bool >;
+using typeVectorVertex = std::vector<Vertex_handle>;
+using typePairSimplexBool = std::pair<Simplex_tree::Simplex_handle, bool>;
-int main(int argc, char * const argv[]) {
+int main(int argc, char* const argv[]) {
const Filtration_value FIRST_FILTRATION_VALUE = 0.1;
const Filtration_value SECOND_FILTRATION_VALUE = 0.2;
const Filtration_value THIRD_FILTRATION_VALUE = 0.3;
@@ -54,7 +54,7 @@ int main(int argc, char * const argv[]) {
// ++ FIRST
std::cout << " * INSERT 0" << std::endl;
- typeVectorVertex firstSimplexVector = { 0 };
+ typeVectorVertex firstSimplexVector = {0};
typePairSimplexBool returnValue =
simplexTree.insert_simplex(firstSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
@@ -66,9 +66,8 @@ int main(int argc, char * const argv[]) {
// ++ SECOND
std::cout << " * INSERT 1" << std::endl;
- typeVectorVertex secondSimplexVector = { 1 };
- returnValue =
- simplexTree.insert_simplex(secondSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
+ typeVectorVertex secondSimplexVector = {1};
+ returnValue = simplexTree.insert_simplex(secondSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + 1 INSERTED" << std::endl;
@@ -78,9 +77,8 @@ int main(int argc, char * const argv[]) {
// ++ THIRD
std::cout << " * INSERT (0,1)" << std::endl;
- typeVectorVertex thirdSimplexVector = { 0, 1 };
- returnValue =
- simplexTree.insert_simplex(thirdSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
+ typeVectorVertex thirdSimplexVector = {0, 1};
+ returnValue = simplexTree.insert_simplex(thirdSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + (0,1) INSERTED" << std::endl;
@@ -90,9 +88,8 @@ int main(int argc, char * const argv[]) {
// ++ FOURTH
std::cout << " * INSERT 2" << std::endl;
- typeVectorVertex fourthSimplexVector = { 2 };
- returnValue =
- simplexTree.insert_simplex(fourthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
+ typeVectorVertex fourthSimplexVector = {2};
+ returnValue = simplexTree.insert_simplex(fourthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + 2 INSERTED" << std::endl;
@@ -102,9 +99,8 @@ int main(int argc, char * const argv[]) {
// ++ FIFTH
std::cout << " * INSERT (2,0)" << std::endl;
- typeVectorVertex fifthSimplexVector = { 2, 0 };
- returnValue =
- simplexTree.insert_simplex(fifthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
+ typeVectorVertex fifthSimplexVector = {2, 0};
+ returnValue = simplexTree.insert_simplex(fifthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + (2,0) INSERTED" << std::endl;
@@ -114,9 +110,8 @@ int main(int argc, char * const argv[]) {
// ++ SIXTH
std::cout << " * INSERT (2,1)" << std::endl;
- typeVectorVertex sixthSimplexVector = { 2, 1 };
- returnValue =
- simplexTree.insert_simplex(sixthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
+ typeVectorVertex sixthSimplexVector = {2, 1};
+ returnValue = simplexTree.insert_simplex(sixthSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + (2,1) INSERTED" << std::endl;
@@ -126,9 +121,8 @@ int main(int argc, char * const argv[]) {
// ++ SEVENTH
std::cout << " * INSERT (2,1,0)" << std::endl;
- typeVectorVertex seventhSimplexVector = { 2, 1, 0 };
- returnValue =
- simplexTree.insert_simplex(seventhSimplexVector, Filtration_value(THIRD_FILTRATION_VALUE));
+ typeVectorVertex seventhSimplexVector = {2, 1, 0};
+ returnValue = simplexTree.insert_simplex(seventhSimplexVector, Filtration_value(THIRD_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + (2,1,0) INSERTED" << std::endl;
@@ -138,9 +132,8 @@ int main(int argc, char * const argv[]) {
// ++ EIGHTH
std::cout << " * INSERT 3" << std::endl;
- typeVectorVertex eighthSimplexVector = { 3 };
- returnValue =
- simplexTree.insert_simplex(eighthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
+ typeVectorVertex eighthSimplexVector = {3};
+ returnValue = simplexTree.insert_simplex(eighthSimplexVector, Filtration_value(FIRST_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + 3 INSERTED" << std::endl;
@@ -150,9 +143,8 @@ int main(int argc, char * const argv[]) {
// ++ NINETH
std::cout << " * INSERT (3,0)" << std::endl;
- typeVectorVertex ninethSimplexVector = { 3, 0 };
- returnValue =
- simplexTree.insert_simplex(ninethSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
+ typeVectorVertex ninethSimplexVector = {3, 0};
+ returnValue = simplexTree.insert_simplex(ninethSimplexVector, Filtration_value(SECOND_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + (3,0) INSERTED" << std::endl;
@@ -162,7 +154,7 @@ int main(int argc, char * const argv[]) {
// ++ TENTH
std::cout << " * INSERT 0 (already inserted)" << std::endl;
- typeVectorVertex tenthSimplexVector = { 0 };
+ typeVectorVertex tenthSimplexVector = {0};
// With a different filtration value
returnValue = simplexTree.insert_simplex(tenthSimplexVector, Filtration_value(FOURTH_FILTRATION_VALUE));
@@ -174,9 +166,8 @@ int main(int argc, char * const argv[]) {
// ++ ELEVENTH
std::cout << " * INSERT (2,1,0) (already inserted)" << std::endl;
- typeVectorVertex eleventhSimplexVector = { 2, 1, 0 };
- returnValue =
- simplexTree.insert_simplex(eleventhSimplexVector, Filtration_value(FOURTH_FILTRATION_VALUE));
+ typeVectorVertex eleventhSimplexVector = {2, 1, 0};
+ returnValue = simplexTree.insert_simplex(eleventhSimplexVector, Filtration_value(FOURTH_FILTRATION_VALUE));
if (returnValue.second == true) {
std::cout << " + (2,1,0) INSERTED" << std::endl;
@@ -192,9 +183,9 @@ int main(int argc, char * const argv[]) {
std::cout << " - dimension " << simplexTree.dimension() << "\n";
std::cout << "* Iterator on Simplices in the filtration, with [filtration value]:\n";
for (auto f_simplex : simplexTree.filtration_simplex_range()) {
- std::cout << " " << "[" << simplexTree.filtration(f_simplex) << "] ";
- for (auto vertex : simplexTree.simplex_vertex_range(f_simplex))
- std::cout << "(" << vertex << ")";
+ std::cout << " "
+ << "[" << simplexTree.filtration(f_simplex) << "] ";
+ for (auto vertex : simplexTree.simplex_vertex_range(f_simplex)) std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
// [0.1] 0
@@ -217,7 +208,7 @@ int main(int argc, char * const argv[]) {
else
std::cout << "***- NO IT ISN'T\n";
- typeVectorVertex unknownSimplexVector = { 15 };
+ typeVectorVertex unknownSimplexVector = {15};
simplexFound = simplexTree.find(unknownSimplexVector);
std::cout << "**************IS THE SIMPLEX {15} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
@@ -232,7 +223,7 @@ int main(int argc, char * const argv[]) {
else
std::cout << "***- NO IT ISN'T\n";
- typeVectorVertex otherSimplexVector = { 1, 15 };
+ typeVectorVertex otherSimplexVector = {1, 15};
simplexFound = simplexTree.find(otherSimplexVector);
std::cout << "**************IS THE SIMPLEX {15,1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
@@ -240,7 +231,7 @@ int main(int argc, char * const argv[]) {
else
std::cout << "***- NO IT ISN'T\n";
- typeVectorVertex invSimplexVector = { 1, 2, 0 };
+ typeVectorVertex invSimplexVector = {1, 2, 0};
simplexFound = simplexTree.find(invSimplexVector);
std::cout << "**************IS THE SIMPLEX {1,2,0} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
@@ -248,7 +239,7 @@ int main(int argc, char * const argv[]) {
else
std::cout << "***- NO IT ISN'T\n";
- simplexFound = simplexTree.find({ 0, 1 });
+ simplexFound = simplexTree.find({0, 1});
std::cout << "**************IS THE SIMPLEX {0,1} IN THE SIMPLEX TREE ?\n";
if (simplexFound != simplexTree.null_simplex())
std::cout << "***+ YES IT IS!\n";
@@ -256,23 +247,20 @@ int main(int argc, char * const argv[]) {
std::cout << "***- NO IT ISN'T\n";
std::cout << "**************COFACES OF {0,1} IN CODIMENSION 1 ARE\n";
- for (auto& simplex : simplexTree.cofaces_simplex_range(simplexTree.find({0,1}), 1)) {
- for (auto vertex : simplexTree.simplex_vertex_range(simplex))
- std::cout << "(" << vertex << ")";
+ for (auto& simplex : simplexTree.cofaces_simplex_range(simplexTree.find({0, 1}), 1)) {
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
std::cout << "**************STARS OF {0,1} ARE\n";
- for (auto& simplex : simplexTree.star_simplex_range(simplexTree.find({0,1}))) {
- for (auto vertex : simplexTree.simplex_vertex_range(simplex))
- std::cout << "(" << vertex << ")";
+ for (auto& simplex : simplexTree.star_simplex_range(simplexTree.find({0, 1}))) {
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
std::cout << "**************BOUNDARIES OF {0,1,2} ARE\n";
- for (auto& simplex : simplexTree.boundary_simplex_range(simplexTree.find({0,1,2}))) {
- for (auto vertex : simplexTree.simplex_vertex_range(simplex))
- std::cout << "(" << vertex << ")";
+ for (auto& simplex : simplexTree.boundary_simplex_range(simplexTree.find({0, 1, 2}))) {
+ for (auto vertex : simplexTree.simplex_vertex_range(simplex)) std::cout << "(" << vertex << ")";
std::cout << std::endl;
}
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
index 32fe411c..aca2aa57 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
@@ -239,9 +239,6 @@ their collaboration to write the two initial papers
about this data-structure
and also Dominique for leaving him use a prototype.
-
-\copyright GNU General Public License v3.
-
@} */
} // namespace skeleton_blocker
diff --git a/src/Spatial_searching/doc/Intro_spatial_searching.h b/src/Spatial_searching/doc/Intro_spatial_searching.h
index 1ee5e92e..52ed65e4 100644
--- a/src/Spatial_searching/doc/Intro_spatial_searching.h
+++ b/src/Spatial_searching/doc/Intro_spatial_searching.h
@@ -50,8 +50,6 @@ namespace spatial_searching {
*
* \include Spatial_searching/example_spatial_searching.cpp
*
- * \copyright GNU General Public License v3.
- * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup spatial_searching
diff --git a/src/Spatial_searching/include/gudhi/Kd_tree_search.h b/src/Spatial_searching/include/gudhi/Kd_tree_search.h
index ef428002..96bbeb36 100644
--- a/src/Spatial_searching/include/gudhi/Kd_tree_search.h
+++ b/src/Spatial_searching/include/gudhi/Kd_tree_search.h
@@ -271,8 +271,7 @@ class Kd_tree_search {
m_tree.search(it, Fuzzy_sphere(p, radius, eps, m_tree.traits()));
}
- int tree_depth() const
- {
+ int tree_depth() const {
return m_tree.root()->depth();
}
diff --git a/src/Subsampling/doc/Intro_subsampling.h b/src/Subsampling/doc/Intro_subsampling.h
index c84616dd..ab9cdc37 100644
--- a/src/Subsampling/doc/Intro_subsampling.h
+++ b/src/Subsampling/doc/Intro_subsampling.h
@@ -58,8 +58,6 @@ namespace subsampling {
* This example outputs a subset of 100 points picked randomly.
*
* \include Subsampling/example_pick_n_random_points.cpp
- * \copyright GNU General Public License v3.
- * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup subsampling
diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h
index 3d687c1d..00e00c52 100644
--- a/src/Tangential_complex/doc/Intro_tangential_complex.h
+++ b/src/Tangential_complex/doc/Intro_tangential_complex.h
@@ -107,8 +107,6 @@ dimensions are known at compile-time.
\include Tangential_complex/example_with_perturb.cpp
-\copyright GNU General Public License v3.
-\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup tangential_complex
diff --git a/src/Witness_complex/doc/Witness_complex_doc.h b/src/Witness_complex/doc/Witness_complex_doc.h
index 5d5c0735..62203054 100644
--- a/src/Witness_complex/doc/Witness_complex_doc.h
+++ b/src/Witness_complex/doc/Witness_complex_doc.h
@@ -117,9 +117,6 @@ int main(int argc, char * const argv[]) {
\include Witness_complex/example_nearest_landmark_table.cpp
- \copyright GNU General Public License v3.
-
-
*/
#endif // WITNESS_COMPLEX_DOC_H_
diff --git a/src/Witness_complex/example/example_strong_witness_complex_off.cpp b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
index bc069654..346bef6d 100644
--- a/src/Witness_complex/example/example_strong_witness_complex_off.cpp
+++ b/src/Witness_complex/example/example_strong_witness_complex_off.cpp
@@ -39,10 +39,9 @@ using Point_d = typename K::Point_d;
using Witness_complex = Gudhi::witness_complex::Euclidean_strong_witness_complex<K>;
using Point_vector = std::vector<Point_d>;
-int main(int argc, char * const argv[]) {
+int main(int argc, char* const argv[]) {
if (argc != 5) {
- std::cerr << "Usage: " << argv[0]
- << " path_to_point_file number_of_landmarks max_squared_alpha limit_dimension\n";
+ std::cerr << "Usage: " << argv[0] << " path_to_point_file number_of_landmarks max_squared_alpha limit_dimension\n";
return 0;
}
@@ -56,9 +55,9 @@ int main(int argc, char * const argv[]) {
Point_vector point_vector, landmarks;
Gudhi::Points_off_reader<Point_d> off_reader(file_name);
if (!off_reader.is_valid()) {
- std::cerr << "Strong witness complex - Unable to read file " << file_name << "\n";
- exit(-1); // ----- >>
- }
+ std::cerr << "Strong witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
+ }
point_vector = Point_vector(off_reader.get_point_cloud());
std::cout << "Successfully read " << point_vector.size() << " points.\n";
@@ -66,16 +65,15 @@ int main(int argc, char * const argv[]) {
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
- Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
-
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, nbL, Gudhi::subsampling::random_starting_point,
+ std::back_inserter(landmarks));
+
// Compute witness complex
start = clock();
- Witness_complex witness_complex(landmarks,
- point_vector);
+ Witness_complex witness_complex(landmarks, point_vector);
witness_complex.create_complex(simplex_tree, alpha2, lim_dim);
end = clock();
- std::cout << "Strong witness complex took "
- << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
+ std::cout << "Strong witness complex took " << static_cast<double>(end - start) / CLOCKS_PER_SEC << " s. \n";
std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
}
diff --git a/src/Witness_complex/example/example_witness_complex_sphere.cpp b/src/Witness_complex/example/example_witness_complex_sphere.cpp
index a66da3f9..a6e9b11a 100644
--- a/src/Witness_complex/example/example_witness_complex_sphere.cpp
+++ b/src/Witness_complex/example/example_witness_complex_sphere.cpp
@@ -42,27 +42,25 @@
/** Write a gnuplot readable file.
* Data range is a random access range of pairs (arg, value)
*/
-template < typename Data_range >
-void write_data(Data_range & data, std::string filename) {
+template <typename Data_range>
+void write_data(Data_range& data, std::string filename) {
std::ofstream ofs(filename, std::ofstream::out);
- for (auto entry : data)
- ofs << entry.first << ", " << entry.second << "\n";
+ for (auto entry : data) ofs << entry.first << ", " << entry.second << "\n";
ofs.close();
}
-int main(int argc, char * const argv[]) {
+int main(int argc, char* const argv[]) {
using Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>;
using Witness_complex = Gudhi::witness_complex::Euclidean_witness_complex<Kernel>;
if (argc != 2) {
- std::cerr << "Usage: " << argv[0]
- << " number_of_landmarks \n";
+ std::cerr << "Usage: " << argv[0] << " number_of_landmarks \n";
return 0;
}
int number_of_landmarks = atoi(argv[1]);
- std::vector< std::pair<int, double> > l_time;
+ std::vector<std::pair<int, double> > l_time;
// Generate points
for (int nbP = 500; nbP < 10000; nbP += 500) {
@@ -77,16 +75,16 @@ int main(int argc, char * const argv[]) {
// Choose landmarks
start = clock();
// Gudhi::subsampling::pick_n_random_points(point_vector, number_of_landmarks, std::back_inserter(landmarks));
- Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, number_of_landmarks, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), point_vector, number_of_landmarks,
+ Gudhi::subsampling::random_starting_point,
+ std::back_inserter(landmarks));
// Compute witness complex
- Witness_complex witness_complex(landmarks,
- point_vector);
+ Witness_complex witness_complex(landmarks, point_vector);
witness_complex.create_complex(simplex_tree, 0);
end = clock();
double time = static_cast<double>(end - start) / CLOCKS_PER_SEC;
- std::cout << "Witness complex for " << number_of_landmarks << " landmarks took "
- << time << " s. \n";
+ std::cout << "Witness complex for " << number_of_landmarks << " landmarks took " << time << " s. \n";
std::cout << "Number of simplices is: " << simplex_tree.num_simplices() << "\n";
l_time.push_back(std::make_pair(nbP, time));
}
diff --git a/src/Witness_complex/utilities/strong_witness_persistence.cpp b/src/Witness_complex/utilities/strong_witness_persistence.cpp
index e3e0c1ee..2fba631b 100644
--- a/src/Witness_complex/utilities/strong_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/strong_witness_persistence.cpp
@@ -47,16 +47,10 @@ using Filtration_value = SimplexTree::Filtration_value;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<SimplexTree, Field_Zp>;
-void program_options(int argc, char * argv[]
- , int & nbL
- , std::string & file_name
- , std::string & filediag
- , Filtration_value & max_squared_alpha
- , int & p
- , int & dim_max
- , Filtration_value & min_persistence);
-
-int main(int argc, char * argv[]) {
+void program_options(int argc, char* argv[], int& nbL, std::string& file_name, std::string& filediag,
+ Filtration_value& max_squared_alpha, int& p, int& dim_max, Filtration_value& min_persistence);
+
+int main(int argc, char* argv[]) {
std::string file_name;
std::string filediag;
Filtration_value max_squared_alpha;
@@ -70,8 +64,8 @@ int main(int argc, char * argv[]) {
Point_vector witnesses, landmarks;
Gudhi::Points_off_reader<Point_d> off_reader(file_name);
if (!off_reader.is_valid()) {
- std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
- exit(-1); // ----- >>
+ std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
}
witnesses = Point_vector(off_reader.get_point_cloud());
std::cout << "Successfully read " << witnesses.size() << " points.\n";
@@ -79,11 +73,11 @@ int main(int argc, char * argv[]) {
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
- Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point,
+ std::back_inserter(landmarks));
// Compute witness complex
- Strong_witness_complex strong_witness_complex(landmarks,
- witnesses);
+ Strong_witness_complex strong_witness_complex(landmarks, witnesses);
strong_witness_complex.create_complex(simplex_tree, max_squared_alpha, lim_d);
@@ -112,37 +106,28 @@ int main(int argc, char * argv[]) {
return 0;
}
-void program_options(int argc, char * argv[]
- , int & nbL
- , std::string & file_name
- , std::string & filediag
- , Filtration_value & max_squared_alpha
- , int & p
- , int & dim_max
- , Filtration_value & min_persistence) {
+void program_options(int argc, char* argv[], int& nbL, std::string& file_name, std::string& filediag,
+ Filtration_value& max_squared_alpha, int& p, int& dim_max, Filtration_value& min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&file_name),
- "Name of file containing a point set in off format.");
+ hidden.add_options()("input-file", po::value<std::string>(&file_name),
+ "Name of file containing a point set in off format.");
po::options_description visible("Allowed options", 100);
Filtration_value default_alpha = std::numeric_limits<Filtration_value>::infinity();
- visible.add_options()
- ("help,h", "produce help message")
- ("landmarks,l", po::value<int>(&nbL),
- "Number of landmarks to choose from the point cloud.")
- ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
- "Maximal squared relaxation parameter.")
- ("field-charac,p", po::value<int>(&p)->default_value(11),
- "Characteristic p of the coefficient field Z/pZ for computing homology.")
- ("min-persistence,m", po::value<Filtration_value>(&min_persistence)->default_value(0),
- "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals")
- ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(std::numeric_limits<int>::max()),
- "Maximal dimension of the strong witness complex we want to compute.");
+ visible.add_options()("help,h", "produce help message")("landmarks,l", po::value<int>(&nbL),
+ "Number of landmarks to choose from the point cloud.")(
+ "output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
+ "Maximal squared relaxation parameter.")(
+ "field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence)->default_value(0),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals")("cpx-dimension,d", po::value<int>(&dim_max)->default_value(std::numeric_limits<int>::max()),
+ "Maximal dimension of the strong witness complex we want to compute.");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -151,8 +136,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
@@ -170,4 +154,3 @@ void program_options(int argc, char * argv[]
std::abort();
}
}
-
diff --git a/src/Witness_complex/utilities/weak_witness_persistence.cpp b/src/Witness_complex/utilities/weak_witness_persistence.cpp
index a63b0837..23fa93aa 100644
--- a/src/Witness_complex/utilities/weak_witness_persistence.cpp
+++ b/src/Witness_complex/utilities/weak_witness_persistence.cpp
@@ -47,16 +47,10 @@ using Filtration_value = SimplexTree::Filtration_value;
using Field_Zp = Gudhi::persistent_cohomology::Field_Zp;
using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology<SimplexTree, Field_Zp>;
-void program_options(int argc, char * argv[]
- , int & nbL
- , std::string & file_name
- , std::string & filediag
- , Filtration_value & max_squared_alpha
- , int & p
- , int & dim_max
- , Filtration_value & min_persistence);
-
-int main(int argc, char * argv[]) {
+void program_options(int argc, char* argv[], int& nbL, std::string& file_name, std::string& filediag,
+ Filtration_value& max_squared_alpha, int& p, int& dim_max, Filtration_value& min_persistence);
+
+int main(int argc, char* argv[]) {
std::string file_name;
std::string filediag;
Filtration_value max_squared_alpha;
@@ -70,8 +64,8 @@ int main(int argc, char * argv[]) {
Point_vector witnesses, landmarks;
Gudhi::Points_off_reader<Point_d> off_reader(file_name);
if (!off_reader.is_valid()) {
- std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
- exit(-1); // ----- >>
+ std::cerr << "Witness complex - Unable to read file " << file_name << "\n";
+ exit(-1); // ----- >>
}
witnesses = Point_vector(off_reader.get_point_cloud());
std::cout << "Successfully read " << witnesses.size() << " points.\n";
@@ -79,11 +73,11 @@ int main(int argc, char * argv[]) {
// Choose landmarks (decomment one of the following two lines)
// Gudhi::subsampling::pick_n_random_points(point_vector, nbL, std::back_inserter(landmarks));
- Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point, std::back_inserter(landmarks));
+ Gudhi::subsampling::choose_n_farthest_points(K(), witnesses, nbL, Gudhi::subsampling::random_starting_point,
+ std::back_inserter(landmarks));
// Compute witness complex
- Witness_complex witness_complex(landmarks,
- witnesses);
+ Witness_complex witness_complex(landmarks, witnesses);
witness_complex.create_complex(simplex_tree, max_squared_alpha, lim_d);
@@ -112,38 +106,28 @@ int main(int argc, char * argv[]) {
return 0;
}
-
-void program_options(int argc, char * argv[]
- , int & nbL
- , std::string & file_name
- , std::string & filediag
- , Filtration_value & max_squared_alpha
- , int & p
- , int & dim_max
- , Filtration_value & min_persistence) {
+void program_options(int argc, char* argv[], int& nbL, std::string& file_name, std::string& filediag,
+ Filtration_value& max_squared_alpha, int& p, int& dim_max, Filtration_value& min_persistence) {
namespace po = boost::program_options;
po::options_description hidden("Hidden options");
- hidden.add_options()
- ("input-file", po::value<std::string>(&file_name),
- "Name of file containing a point set in off format.");
+ hidden.add_options()("input-file", po::value<std::string>(&file_name),
+ "Name of file containing a point set in off format.");
Filtration_value default_alpha = std::numeric_limits<Filtration_value>::infinity();
po::options_description visible("Allowed options", 100);
- visible.add_options()
- ("help,h", "produce help message")
- ("landmarks,l", po::value<int>(&nbL),
- "Number of landmarks to choose from the point cloud.")
- ("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
- "Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
- "Maximal squared relaxation parameter.")
- ("field-charac,p", po::value<int>(&p)->default_value(11),
- "Characteristic p of the coefficient field Z/pZ for computing homology.")
- ("min-persistence,m", po::value<Filtration_value>(&min_persistence)->default_value(0),
- "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals")
- ("cpx-dimension,d", po::value<int>(&dim_max)->default_value(std::numeric_limits<int>::max()),
- "Maximal dimension of the weak witness complex we want to compute.");
+ visible.add_options()("help,h", "produce help message")("landmarks,l", po::value<int>(&nbL),
+ "Number of landmarks to choose from the point cloud.")(
+ "output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")(
+ "max-sq-alpha,a", po::value<Filtration_value>(&max_squared_alpha)->default_value(default_alpha),
+ "Maximal squared relaxation parameter.")(
+ "field-charac,p", po::value<int>(&p)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")(
+ "min-persistence,m", po::value<Filtration_value>(&min_persistence)->default_value(0),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length "
+ "intervals")("cpx-dimension,d", po::value<int>(&dim_max)->default_value(std::numeric_limits<int>::max()),
+ "Maximal dimension of the weak witness complex we want to compute.");
po::positional_options_description pos;
pos.add("input-file", 1);
@@ -152,8 +136,7 @@ void program_options(int argc, char * argv[]
all.add(visible).add(hidden);
po::variables_map vm;
- po::store(po::command_line_parser(argc, argv).
- options(all).positional(pos).run(), vm);
+ po::store(po::command_line_parser(argc, argv).options(all).positional(pos).run(), vm);
po::notify(vm);
if (vm.count("help") || !vm.count("input-file")) {
diff --git a/src/Witness_complex/utilities/README b/src/Witness_complex/utilities/witnesscomplex.md
index 1141033e..2341759b 100644
--- a/src/Witness_complex/utilities/README
+++ b/src/Witness_complex/utilities/witnesscomplex.md
@@ -1,18 +1,24 @@
-# Witness_complex #
-For more details about the witness complex, please read the [user manual of the package](http://gudhi.gforge.inria.fr/doc/latest/group__witness__complex.html).
-## `weak_witness_persistence` ##
-This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+# Witness complex #
+
+
+For more details about the witness complex, please read the [user manual of the package](/doc/latest/group__witness__complex.html).
+
+## weak_witness_persistence ##
+This program computes the persistent homology with coefficient field *Z/pZ* of a Weak witness complex defined on a set of input points.
+The output diagram contains one bar per line, written with the convention:
`p dim birth death`
-where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature,
+and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+**Usage**
-*Usage*
`weak_witness_persistence [options] <OFF input file>`
-*Allowed options*
+**Allowed options**
* `-h [ --help ]` Produce help message
* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
@@ -22,33 +28,28 @@ where `dim` is the dimension of the homological feature, `birth` and `death` are
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
* `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute.
-*Example*
-`weak_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.006`
+**Example**
-outputs:
-```
-Successfully read 1307 points.
-Ambient dimension is 3.
-The complex contains 732 simplices and has dimension 8
-11 0 0 inf
-11 1 0 inf
-11 2 0.0275251 0.0534586
-11 1 0 0.0239952
-```
+`weak_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.006`
N.B.: output is random as the 20 landmarks are chosen randomly.
-## `strong_witness_persistence` ##
-This program computes the persistent homology with coefficient field *Z/pZ* of a Strong witness complex defined on a set of input points. The output diagram contains one bar per line, written with the convention:
+
+## strong_witness_persistence ##
+
+This program computes the persistent homology with coefficient field *Z/pZ* of a Strong witness complex defined on a set of input points.
+The output diagram contains one bar per line, written with the convention:
`p dim birth death`
-where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature, and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+where `dim` is the dimension of the homological feature, `birth` and `death` are respectively the birth and death of the feature,
+and `p` is the characteristic of the field *Z/pZ* used for homology coefficients.
+
+**Usage**
-*Usage*
`strong_witness_persistence [options] <OFF input file>`
-*Allowed options*
+**Allowed options**
* `-h [ --help ]` Produce help message
* `-l [ --landmarks ]` Number of landmarks to choose from the point cloud.
@@ -58,17 +59,8 @@ where `dim` is the dimension of the homological feature, `birth` and `death` are
* `-m [ --min-persistence ]` (default = 0) Minimal lifetime of homology feature to be recorded. Enter a negative value to see zero length intervals.
* `-d [ --cpx-dimension ]` (default = 2147483647) Maximal dimension of the weak witness complex we want to compute.
-*Example*
-`strong_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.06`
+**Example**
-outputs:
-```
-Successfully read 1307 points.
-Ambient dimension is 3.
-The complex contains 1836 simplices and has dimension 8
-11 0 0 inf
-11 1 0.00674748 inf
-11 2 0.0937751 0.235354
-```
+`strong_witness_persistence data/points/tore3D_1307.off -l 20 -a 0.5 -m 0.06`
N.B.: output is random as the 20 landmarks are chosen randomly.
diff --git a/src/common/doc/examples.h b/src/common/doc/examples.h
new file mode 100644
index 00000000..40f202c7
--- /dev/null
+++ b/src/common/doc/examples.h
@@ -0,0 +1,99 @@
+// List of GUDHI examples - Doxygen needs at least a file tag to analyse comments
+// In user_version, `find . -name "*.cpp"` in example and utilities folders
+/*! @file Examples
+ * @example Alpha_complex/Alpha_complex_from_off.cpp
+ * @example Alpha_complex/Alpha_complex_from_points.cpp
+ * @example Bottleneck_distance/bottleneck_basic_example.cpp
+ * @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
+ * @example Witness_complex/example_nearest_landmark_table.cpp
+ * @example Witness_complex/example_witness_complex_off.cpp
+ * @example Witness_complex/example_witness_complex_sphere.cpp
+ * @example Witness_complex/example_strong_witness_complex_off.cpp
+ * @example Simplex_tree/mini_simplex_tree.cpp
+ * @example Simplex_tree/graph_expansion_with_blocker.cpp
+ * @example Simplex_tree/simple_simplex_tree.cpp
+ * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp
+ * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
+ * @example Simplex_tree/cech_complex_cgal_mini_sphere_3d.cpp
+ * @example Persistent_cohomology/plain_homology.cpp
+ * @example Persistent_cohomology/persistence_from_file.cpp
+ * @example Persistent_cohomology/rips_persistence_step_by_step.cpp
+ * @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
+ * @example Persistent_cohomology/custom_persistence_sort.cpp
+ * @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
+ * @example Persistent_cohomology/rips_multifield_persistence.cpp
+ * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp
+ * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp
+ * @example Skeleton_blocker/Skeleton_blocker_link.cpp
+ * @example Contraction/Garland_heckbert.cpp
+ * @example Contraction/Rips_contraction.cpp
+ * @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp
+ * @example common/example_CGAL_3D_points_off_reader.cpp
+ * @example common/example_vector_double_points_off_reader.cpp
+ * @example common/example_CGAL_points_off_reader.cpp
+ * @example Rips_complex/example_one_skeleton_rips_from_distance_matrix.cpp
+ * @example Rips_complex/example_one_skeleton_rips_from_points.cpp
+ * @example Rips_complex/example_rips_complex_from_csv_distance_matrix_file.cpp
+ * @example Rips_complex/example_rips_complex_from_off_file.cpp
+ * @example Persistence_representations/persistence_intervals.cpp
+ * @example Persistence_representations/persistence_vectors.cpp
+ * @example Persistence_representations/persistence_heat_maps.cpp
+ * @example Persistence_representations/persistence_landscape_on_grid.cpp
+ * @example Persistence_representations/persistence_landscape.cpp
+ * @example Tangential_complex/example_basic.cpp
+ * @example Tangential_complex/example_with_perturb.cpp
+ * @example Subsampling/example_custom_kernel.cpp
+ * @example Subsampling/example_choose_n_farthest_points.cpp
+ * @example Subsampling/example_sparsify_point_set.cpp
+ * @example Subsampling/example_pick_n_random_points.cpp
+ * @example Nerve_GIC/CoordGIC.cpp
+ * @example Nerve_GIC/Nerve.cpp
+ * @example Nerve_GIC/FuncGIC.cpp
+ * @example Nerve_GIC/VoronoiGIC.cpp
+ * @example Spatial_searching/example_spatial_searching.cpp
+ * @example Alpha_complex/alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/alpha_complex_persistence.cpp
+ * @example Alpha_complex/weighted_periodic_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
+ * @example Alpha_complex/exact_alpha_complex_3d_persistence.cpp
+ * @example Bottleneck_distance/bottleneck_distance.cpp
+ * @example Witness_complex/weak_witness_persistence.cpp
+ * @example Witness_complex/strong_witness_persistence.cpp
+ * @example Bitmap_cubical_complex/cubical_complex_persistence.cpp
+ * @example Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp
+ * @example common/off_file_from_shape_generator.cpp
+ * @example Rips_complex/rips_distance_matrix_persistence.cpp
+ * @example Rips_complex/rips_persistence.cpp
+ * @example Persistence_representations/persistence_landscapes_on_grid/create_landscapes_on_grid.cpp
+ * @example Persistence_representations/persistence_landscapes_on_grid/plot_landscapes_on_grid.cpp
+ * @example Persistence_representations/persistence_landscapes_on_grid/compute_scalar_product_of_landscapes_on_grid.cpp
+ * @example Persistence_representations/persistence_landscapes_on_grid/compute_distance_of_landscapes_on_grid.cpp
+ * @example Persistence_representations/persistence_landscapes_on_grid/average_landscapes_on_grid.cpp
+ * @example Persistence_representations/persistence_intervals/compute_birth_death_range_in_persistence_diagram.cpp
+ * @example Persistence_representations/persistence_intervals/compute_number_of_dominant_intervals.cpp
+ * @example Persistence_representations/persistence_intervals/plot_persistence_Betti_numbers.cpp
+ * @example Persistence_representations/persistence_intervals/plot_persistence_intervals.cpp
+ * @example Persistence_representations/persistence_intervals/plot_histogram_of_intervals_lengths.cpp
+ * @example Persistence_representations/persistence_intervals/compute_bottleneck_distance.cpp
+ * @example Persistence_representations/persistence_heat_maps/create_pssk.cpp
+ * @example Persistence_representations/persistence_heat_maps/create_p_h_m_weighted_by_arctan_of_their_persistence.cpp
+ * @example Persistence_representations/persistence_heat_maps/create_p_h_m_weighted_by_squared_diag_distance.cpp
+ * @example Persistence_representations/persistence_heat_maps/compute_distance_of_persistence_heat_maps.cpp
+ * @example Persistence_representations/persistence_heat_maps/compute_scalar_product_of_persistence_heat_maps.cpp
+ * @example Persistence_representations/persistence_heat_maps/create_p_h_m_weighted_by_distance_from_diagonal.cpp
+ * @example Persistence_representations/persistence_heat_maps/average_persistence_heat_maps.cpp
+ * @example Persistence_representations/persistence_heat_maps/plot_persistence_heat_map.cpp
+ * @example Persistence_representations/persistence_heat_maps/create_persistence_heat_maps.cpp
+ * @example Persistence_representations/persistence_vectors/plot_persistence_vectors.cpp
+ * @example Persistence_representations/persistence_vectors/compute_distance_of_persistence_vectors.cpp
+ * @example Persistence_representations/persistence_vectors/average_persistence_vectors.cpp
+ * @example Persistence_representations/persistence_vectors/create_persistence_vectors.cpp
+ * @example Persistence_representations/persistence_vectors/compute_scalar_product_of_persistence_vectors.cpp
+ * @example Persistence_representations/persistence_landscapes/average_landscapes.cpp
+ * @example Persistence_representations/persistence_landscapes/compute_scalar_product_of_landscapes.cpp
+ * @example Persistence_representations/persistence_landscapes/create_landscapes.cpp
+ * @example Persistence_representations/persistence_landscapes/compute_distance_of_landscapes.cpp
+ * @example Persistence_representations/persistence_landscapes/plot_landscapes.cpp
+ */
+
diff --git a/src/common/doc/footer.html b/src/common/doc/footer.html
index 7b4cdc5c..a557922b 100644
--- a/src/common/doc/footer.html
+++ b/src/common/doc/footer.html
@@ -6,24 +6,18 @@
<!--BEGIN PROJECT_NAME--> $projectname
<!--BEGIN PROJECT_NUMBER-->&#160;Version $projectnumber<!--END PROJECT_NUMBER-->
<!--BEGIN PROJECT_BRIEF-->&#160;-&#160;$projectbrief<!--END PROJECT_BRIEF-->
+<!--BEGIN PROJECT_BRIEF-->&#160;-&#160;Copyright : GPL v3<!--END PROJECT_BRIEF-->
<!--END PROJECT_NAME-->
</td>
<td class="network-entypo">
<!--BEGIN GENERATE_TREEVIEW-->
$generatedby
<a href="http://www.doxygen.org/index.html">
- <img class="footer" src="$relpath^doxygen.png" alt="doxygen"/></a> $doxygenversion
+ Doxygen</a> $doxygenversion
<!--END GENERATE_TREEVIEW-->
</td>
</tr>
</table>
-<!--BEGIN !GENERATE_TREEVIEW-->
-<hr class="footer"/><address class="footer"><small> tralala
-$generatedby &#160;<a href="http://www.doxygen.org/index.html">
-<img class="footer" src="$relpath^doxygen.png" alt="doxygen"/>
-</a> $doxygenversion
-</small></address>
-<!--END !GENERATE_TREEVIEW-->
</body>
</html>
diff --git a/src/common/doc/header.html b/src/common/doc/header.html
index 53b5c0a2..9c514381 100644
--- a/src/common/doc/header.html
+++ b/src/common/doc/header.html
@@ -56,6 +56,8 @@ $extrastylesheet
<ul class="dropdown">
<li><a href="http://gudhi.gforge.inria.fr/licensing/">Licensing</a></li>
<li><a href="https://gforge.inria.fr/frs/?group_id=3865" target="_blank">Get the sources</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/file/37113/GUDHI_2.0.0_OSX_UTILS.beta.tar.gz" target="_blank">Utils for Mac OSx</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/file/37112/GUDHI_2.0.0_WIN64_UTILS.beta.zip" target="_blank">Utils for Win x64</a></li>
</ul>
</li>
<li class="divider"></li>
@@ -66,6 +68,8 @@ $extrastylesheet
<li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
<li><a href="http://gudhi.gforge.inria.fr/python/latest/">Python documentation</a></li>
<li><a href="http://gudhi.gforge.inria.fr/python/latest/installation.html">Python installation manual</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/utils/">Utilities</a></li>
+ <li><a href="http://bertrand.michel.perso.math.cnrs.fr/Enseignements/TDA-Gudhi-Python.html" target="_blank">Tutorial</a></li>
</ul>
</li>
<li class="divider"></li>
diff --git a/src/common/doc/installation.h b/src/common/doc/installation.h
new file mode 100644
index 00000000..25675cc5
--- /dev/null
+++ b/src/common/doc/installation.h
@@ -0,0 +1,263 @@
+/*! \page installation GUDHI installation
+ * \tableofcontents
+ * As GUDHI is a header only library, there is no need to install the library.
+ *
+ * Examples of GUDHI headers inclusion can be found in \ref demos.
+ *
+ * \section compiling Compiling
+ * The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> with version 1.48.0 or
+ * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015.
+ *
+ * \subsection demos Demos and examples
+ * To build the demos and examples, run the following commands in a terminal:
+\verbatim cd /path-to-gudhi/
+mkdir build
+cd build/
+cmake ..
+make \endverbatim
+ * A list of examples is available <a href="examples.html">here</a>.
+ *
+ * \subsection testsuites Test suites
+ * To test your build, run the following command in a terminal:
+ * \verbatim make test \endverbatim
+ *
+ * \subsection documentationgeneration Documentation
+ * To generate the documentation, <a target="_blank" href="http://www.doxygen.org/">Doxygen</a> is required.
+ * Run the following command in a terminal:
+\verbatim
+make doxygen
+# Documentation will be generated in the folder YYYY-MM-DD-hh-mm-ss_GUDHI_X.Y.Z/doc/html/
+# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder`
+\endverbatim
+ *
+ * \section optionallibrary Optional third-party library
+ * \subsection gmp GMP
+ * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
+ * arithmetic, operating on signed integers, rational numbers, and floating point numbers.
+ *
+ * The following example requires the <a target="_blank" href="http://gmplib.org/">GNU Multiple Precision Arithmetic
+ * Library</a> (GMP) and will not be built if GMP is not installed:
+ * \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
+ * Persistent_cohomology/rips_multifield_persistence.cpp</a>
+ *
+ * Having GMP version 4.2 or higher installed is recommended.
+ *
+ * \subsection cgal CGAL
+ * The \ref alpha_complex data structure, \ref bottleneck_distance, and few examples requires CGAL, which is a C++
+ * library which provides easy access to efficient and reliable geometric algorithms.
+ *
+ * \note There is no need to install CGAL, you can just <CODE>cmake . && make</CODE> CGAL (or even
+ * <CODE>cmake -DCGAL_HEADER_ONLY=ON .</CODE> for CGAL version &ge; 4.8.0), thereafter you will be able to compile
+ * GUDHI by calling <CODE>cmake -DCGAL_DIR=/your/path/to/CGAL-X.Y .. && make</CODE>
+ *
+ * Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to
+ * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
+ *
+ * The following examples/utilities require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
+ * Library</a> (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed:
+ * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
+ * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
+ *
+ * The following examples/utilities require CGAL version &ge; 4.6.0:
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_strong_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
+ * Witness_complex/example_witness_complex_sphere.cpp</a>
+ *
+ * The following example requires CGAL version &ge; 4.7.0:
+ * \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
+ * Alpha_complex/Alpha_complex_from_off.cpp</a>
+ * \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
+ * Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
+ * Persistent_cohomology/custom_persistence_sort.cpp</a>
+ *
+ * The following example requires CGAL version &ge; 4.8.1:
+ * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
+ * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
+ * \li <a href="_bottleneck_distance_2bottleneck_basic_example_8cpp-example.html">
+ * Bottleneck_distance/bottleneck_basic_example.cpp</a>
+ * \li <a href="_bottleneck_distance_2bottleneck_read_file_8cpp-example.html">
+ * Bottleneck_distance/bottleneck_distance.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_coord_g_i_c_8cpp-example.html">
+ * Nerve_GIC/CoordGIC.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_func_g_i_c_8cpp-example.html">
+ * Nerve_GIC/FuncGIC.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_nerve_8cpp-example.html">
+ * Nerve_GIC/Nerve.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_voronoi_g_i_c_8cpp-example.html">
+ * Nerve_GIC/VoronoiGIC.cpp</a>
+ * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
+ * Spatial_searching/example_spatial_searching.cpp</a>
+ * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
+ * Subsampling/example_choose_n_farthest_points.cpp</a>
+ * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
+ * Subsampling/example_custom_kernel.cpp</a>
+ * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
+ * Subsampling/example_pick_n_random_points.cpp</a>
+ * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
+ * Subsampling/example_sparsify_point_set.cpp</a>
+ * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
+ * Tangential_complex/example_basic.cpp</a>
+ * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
+ * Tangential_complex/example_with_perturb.cpp</a>
+ *
+ * \subsection eigen3 Eigen3
+ * The \ref alpha_complex data structure and few examples requires
+ * <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> is a C++ template library for linear algebra:
+ * matrices, vectors, numerical solvers, and related algorithms.
+ *
+ * The following examples/utilities require the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
+ * built if Eigen3 is not installed:
+ * \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
+ * Alpha_complex/Alpha_complex_from_off.cpp</a>
+ * \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
+ * Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
+ * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
+ * \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
+ * Persistent_cohomology/custom_persistence_sort.cpp</a>
+ * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
+ * Spatial_searching/example_spatial_searching.cpp</a>
+ * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
+ * Subsampling/example_choose_n_farthest_points.cpp</a>
+ * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
+ * Subsampling/example_custom_kernel.cpp</a>
+ * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
+ * Subsampling/example_pick_n_random_points.cpp</a>
+ * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
+ * Subsampling/example_sparsify_point_set.cpp</a>
+ * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
+ * Tangential_complex/example_basic.cpp</a>
+ * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
+ * Tangential_complex/example_with_perturb.cpp</a>
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_strong_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
+ * Witness_complex/example_witness_complex_off.cpp</a>
+ * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
+ * Witness_complex/example_witness_complex_sphere.cpp</a>
+ *
+ * \subsection tbb Threading Building Blocks
+ * <a target="_blank" href="https://www.threadingbuildingblocks.org/">Intel&reg; TBB</a> lets you easily write parallel
+ * C++ programs that take full advantage of multicore performance, that are portable and composable, and that have
+ * future-proof scalability.
+ *
+ * Having Intel&reg; TBB installed is recommended to parallelize and accelerate some GUDHI computations.
+ *
+ * The following examples/utilities are using Intel&reg; TBB if installed:
+ * \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
+ * Alpha_complex/Alpha_complex_from_off.cpp</a>
+ * \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
+ * Alpha_complex/Alpha_complex_from_points.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
+ * Alpha_complex/alpha_complex_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_8cpp-example.html">
+ * Bitmap_cubical_complex/cubical_complex_persistence.cpp</a>
+ * \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_periodic_boundary_conditions_8cpp-example.html">
+ * Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp</a>
+ * \li <a href="_bitmap_cubical_complex_2_random_bitmap_cubical_complex_8cpp-example.html">
+ * Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_coord_g_i_c_8cpp-example.html">
+ * Nerve_GIC/CoordGIC.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_func_g_i_c_8cpp-example.html">
+ * Nerve_GIC/FuncGIC.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_nerve_8cpp-example.html">
+ * Nerve_GIC/Nerve.cpp</a>
+ * \li <a href="_nerve__g_i_c_2_voronoi_g_i_c_8cpp-example.html">
+ * Nerve_GIC/VoronoiGIC.cpp</a>
+ * \li <a href="_simplex_tree_2simple_simplex_tree_8cpp-example.html">
+ * Simplex_tree/simple_simplex_tree.cpp</a>
+ * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
+ * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
+ * \li <a href="_simplex_tree_2simplex_tree_from_cliques_of_graph_8cpp-example.html">
+ * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp</a>
+ * \li <a href="_simplex_tree_2graph_expansion_with_blocker_8cpp-example.html">
+ * Simplex_tree/graph_expansion_with_blocker.cpp</a>
+ * \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
+ * Persistent_cohomology/alpha_complex_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2rips_persistence_via_boundary_matrix_8cpp-example.html">
+ * Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp</a>
+ * \li <a href="_persistent_cohomology_2persistence_from_file_8cpp-example.html">
+ * Persistent_cohomology/persistence_from_file.cpp</a>
+ * \li <a href="_persistent_cohomology_2persistence_from_simple_simplex_tree_8cpp-example.html">
+ * Persistent_cohomology/persistence_from_simple_simplex_tree.cpp</a>
+ * \li <a href="_persistent_cohomology_2plain_homology_8cpp-example.html">
+ * Persistent_cohomology/plain_homology.cpp</a>
+ * \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
+ * Persistent_cohomology/rips_multifield_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2rips_persistence_step_by_step_8cpp-example.html">
+ * Persistent_cohomology/rips_persistence_step_by_step.cpp</a>
+ * \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
+ * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
+ * \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
+ * Persistent_cohomology/custom_persistence_sort.cpp</a>
+ * \li <a href="_rips_complex_2example_one_skeleton_rips_from_points_8cpp-example.html">
+ * Rips_complex/example_one_skeleton_rips_from_points.cpp</a>
+ * \li <a href="_rips_complex_2example_rips_complex_from_off_file_8cpp-example.html">
+ * Rips_complex/example_rips_complex_from_off_file.cpp</a>
+ * \li <a href="_rips_complex_2rips_distance_matrix_persistence_8cpp-example.html">
+ * Rips_complex/rips_distance_matrix_persistence.cpp</a>
+ * \li <a href="_rips_complex_2rips_persistence_8cpp-example.html">
+ * Rips_complex/rips_persistence.cpp</a>
+ * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
+ * Witness_complex/strong_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
+ * Witness_complex/weak_witness_persistence.cpp</a>
+ * \li <a href="_witness_complex_2example_nearest_landmark_table_8cpp-example.html">
+ * Witness_complex/example_nearest_landmark_table.cpp</a>
+ *
+ * \section Contributions Bug reports and contributions
+ * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ *
+ * GUDHI is open to external contributions. If you want to join our development team, please contact us.
+ *
+*/
+
+/*! \page Citation Acknowledging the GUDHI library
+ * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use
+ * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages.
+ * Feel free to contact us in case you have any question or remark on this topic.
+ *
+ * We provide \ref GudhiBibtex entries for the modules of the User and Reference Manual, as well as for publications
+ * directly related to the GUDHI library.
+ * \section GudhiBibtex GUDHI bibtex
+ * \verbinclude biblio/how_to_cite_gudhi.bib
+*/
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index 148ee670..b3e9ea03 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -93,7 +93,7 @@
</td>
</tr>
</table>
- \subsection CoverComplexDataStructure Cover Complexes: Nerves and Graph Induced Complexes
+ \subsection CoverComplexDataStructure Cover Complexes
\image html "gicvisu.jpg" "Graph Induced Complex of a point cloud."
<table border="0">
<tr>
@@ -101,6 +101,7 @@
<b>Author:</b> Mathieu Carri&egrave;re<br>
<b>Introduced in:</b> GUDHI 2.1.0<br>
<b>Copyright:</b> GPL v3<br>
+ <b>Requires:</b> \ref cgal &ge; 4.8.1
</td>
<td width="75%">
Nerves and Graph Induced Complexes are cover complexes, i.e. simplicial complexes that provably contain
@@ -250,305 +251,3 @@
</table>
*/
-
-/*! \page installation GUDHI installation
- * \tableofcontents
- * As GUDHI is a header only library, there is no need to install the library.
- *
- * Examples of GUDHI headers inclusion can be found in \ref demos.
- *
- * \section compiling Compiling
- * The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> with version 1.48.0 or
- * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015.
- *
- * \subsection demos Demos and examples
- * To build the demos and examples, run the following commands in a terminal:
-\verbatim cd /path-to-gudhi/
-mkdir build
-cd build/
-cmake ..
-make \endverbatim
- * A list of examples is available <a href="examples.html">here</a>.
- *
- * \subsection testsuites Test suites
- * To test your build, run the following command in a terminal:
- * \verbatim make test \endverbatim
- *
- * \subsection documentationgeneration Documentation
- * To generate the documentation, <a target="_blank" href="http://www.doxygen.org/">Doxygen</a> is required.
- * Run the following command in a terminal:
-\verbatim
-make doxygen
-# Documentation will be generated in the folder YYYY-MM-DD-hh-mm-ss_GUDHI_X.Y.Z/doc/html/
-# You can customize the directory name by calling `cmake -DUSER_VERSION_DIR=/my/custom/folder`
-\endverbatim
- *
- * \section optionallibrary Optional third-party library
- * \subsection gmp GMP
- * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
- * arithmetic, operating on signed integers, rational numbers, and floating point numbers.
- *
- * The following example requires the <a target="_blank" href="http://gmplib.org/">GNU Multiple Precision Arithmetic
- * Library</a> (GMP) and will not be built if GMP is not installed:
- * \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
- * Persistent_cohomology/rips_multifield_persistence.cpp</a>
- *
- * Having GMP version 4.2 or higher installed is recommended.
- *
- * \subsection cgal CGAL
- * The \ref alpha_complex data structure, \ref bottleneck_distance, and few examples requires CGAL, which is a C++
- * library which provides easy access to efficient and reliable geometric algorithms.
- *
- * \note There is no need to install CGAL, you can just <CODE>cmake . && make</CODE> CGAL (or even
- * <CODE>cmake -DCGAL_HEADER_ONLY=ON .</CODE> for CGAL version &ge; 4.8.0), thereafter you will be able to compile
- * GUDHI by calling <CODE>cmake -DCGAL_DIR=/your/path/to/CGAL-X.Y .. && make</CODE>
- *
- * Having CGAL version 4.4.0 or higher installed is recommended. The procedure to install this library according to
- * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
- *
- * The following examples/utilities require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
- * Library</a> (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed:
- * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
- * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
- *
- * The following examples/utilities require CGAL version &ge; 4.6.0:
- * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
- * Witness_complex/strong_witness_persistence.cpp</a>
- * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
- * Witness_complex/weak_witness_persistence.cpp</a>
- * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
- * Witness_complex/example_strong_witness_complex_off.cpp</a>
- * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
- * Witness_complex/example_witness_complex_off.cpp</a>
- * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
- * Witness_complex/example_witness_complex_sphere.cpp</a>
- *
- * The following example requires CGAL version &ge; 4.7.0:
- * \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_off.cpp</a>
- * \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_points.cpp</a>
- * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
- * Alpha_complex/alpha_complex_persistence.cpp</a>
- * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
- * Persistent_cohomology/custom_persistence_sort.cpp</a>
- *
- * The following example requires CGAL version &ge; 4.8.1:
- * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
- * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
- * \li <a href="_bottleneck_distance_2bottleneck_basic_example_8cpp-example.html">
- * Bottleneck_distance/bottleneck_basic_example.cpp</a>
- * \li <a href="_bottleneck_distance_2bottleneck_read_file_8cpp-example.html">
- * Bottleneck_distance/bottleneck_distance.cpp</a>
- * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
- * Spatial_searching/example_spatial_searching.cpp</a>
- * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
- * Subsampling/example_choose_n_farthest_points.cpp</a>
- * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
- * Subsampling/example_custom_kernel.cpp</a>
- * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
- * Subsampling/example_pick_n_random_points.cpp</a>
- * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
- * Subsampling/example_sparsify_point_set.cpp</a>
- * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
- * Tangential_complex/example_basic.cpp</a>
- * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
- * Tangential_complex/example_with_perturb.cpp</a>
- *
- * \subsection eigen3 Eigen3
- * The \ref alpha_complex data structure and few examples requires
- * <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> is a C++ template library for linear algebra:
- * matrices, vectors, numerical solvers, and related algorithms.
- *
- * The following examples/utilities require the <a target="_blank" href="http://eigen.tuxfamily.org/">Eigen3</a> and will not be
- * built if Eigen3 is not installed:
- * \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_off.cpp</a>
- * \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_points.cpp</a>
- * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
- * Alpha_complex/alpha_complex_persistence.cpp</a>
- * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_bottleneck_distance_2alpha_rips_persistence_bottleneck_distance_8cpp-example.html">
- * Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp.cpp</a>
- * \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
- * Persistent_cohomology/custom_persistence_sort.cpp</a>
- * \li <a href="_spatial_searching_2example_spatial_searching_8cpp-example.html">
- * Spatial_searching/example_spatial_searching.cpp</a>
- * \li <a href="_subsampling_2example_choose_n_farthest_points_8cpp-example.html">
- * Subsampling/example_choose_n_farthest_points.cpp</a>
- * \li <a href="_subsampling_2example_custom_kernel_8cpp-example.html">
- * Subsampling/example_custom_kernel.cpp</a>
- * \li <a href="_subsampling_2example_pick_n_random_points_8cpp-example.html">
- * Subsampling/example_pick_n_random_points.cpp</a>
- * \li <a href="_subsampling_2example_sparsify_point_set_8cpp-example.html">
- * Subsampling/example_sparsify_point_set.cpp</a>
- * \li <a href="_tangential_complex_2example_basic_8cpp-example.html">
- * Tangential_complex/example_basic.cpp</a>
- * \li <a href="_tangential_complex_2example_with_perturb_8cpp-example.html">
- * Tangential_complex/example_with_perturb.cpp</a>
- * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
- * Witness_complex/strong_witness_persistence.cpp</a>
- * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
- * Witness_complex/weak_witness_persistence.cpp</a>
- * \li <a href="_witness_complex_2example_strong_witness_complex_off_8cpp-example.html">
- * Witness_complex/example_strong_witness_complex_off.cpp</a>
- * \li <a href="_witness_complex_2example_witness_complex_off_8cpp-example.html">
- * Witness_complex/example_witness_complex_off.cpp</a>
- * \li <a href="_witness_complex_2example_witness_complex_sphere_8cpp-example.html">
- * Witness_complex/example_witness_complex_sphere.cpp</a>
- *
- * \subsection tbb Threading Building Blocks
- * <a target="_blank" href="https://www.threadingbuildingblocks.org/">Intel&reg; TBB</a> lets you easily write parallel
- * C++ programs that take full advantage of multicore performance, that are portable and composable, and that have
- * future-proof scalability.
- *
- * Having Intel&reg; TBB installed is recommended to parallelize and accelerate some GUDHI computations.
- *
- * The following examples/utilities are using Intel&reg; TBB if installed:
- * \li <a href="_alpha_complex_2_alpha_complex_from_off_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_off.cpp</a>
- * \li <a href="_alpha_complex_2_alpha_complex_from_points_8cpp-example.html">
- * Alpha_complex/Alpha_complex_from_points.cpp</a>
- * \li <a href="_alpha_complex_2alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_alpha_complex_2alpha_complex_persistence_8cpp-example.html">
- * Alpha_complex/alpha_complex_persistence.cpp</a>
- * \li <a href="_alpha_complex_2exact_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/exact_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_alpha_complex_2periodic_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/periodic_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_alpha_complex_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
- * Alpha_complex/weighted_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_8cpp-example.html">
- * Bitmap_cubical_complex/cubical_complex_persistence.cpp</a>
- * \li <a href="_bitmap_cubical_complex_2_bitmap_cubical_complex_periodic_boundary_conditions_8cpp-example.html">
- * Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp</a>
- * \li <a href="_bitmap_cubical_complex_2_random_bitmap_cubical_complex_8cpp-example.html">
- * Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp</a>
- * \li <a href="_simplex_tree_2simple_simplex_tree_8cpp-example.html">
- * Simplex_tree/simple_simplex_tree.cpp</a>
- * \li <a href="_simplex_tree_2example_alpha_shapes_3_simplex_tree_from_off_file_8cpp-example.html">
- * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp</a>
- * \li <a href="_simplex_tree_2simplex_tree_from_cliques_of_graph_8cpp-example.html">
- * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp</a>
- * \li <a href="_simplex_tree_2graph_expansion_with_blocker_8cpp-example.html">
- * Simplex_tree/graph_expansion_with_blocker.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2alpha_complex_persistence_8cpp-example.html">
- * Persistent_cohomology/alpha_complex_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_persistence_via_boundary_matrix_8cpp-example.html">
- * Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp</a>
- * \li <a href="_persistent_cohomology_2persistence_from_file_8cpp-example.html">
- * Persistent_cohomology/persistence_from_file.cpp</a>
- * \li <a href="_persistent_cohomology_2persistence_from_simple_simplex_tree_8cpp-example.html">
- * Persistent_cohomology/persistence_from_simple_simplex_tree.cpp</a>
- * \li <a href="_persistent_cohomology_2plain_homology_8cpp-example.html">
- * Persistent_cohomology/plain_homology.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_multifield_persistence_8cpp-example.html">
- * Persistent_cohomology/rips_multifield_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2rips_persistence_step_by_step_8cpp-example.html">
- * Persistent_cohomology/rips_persistence_step_by_step.cpp</a>
- * \li <a href="_persistent_cohomology_2exact_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/exact_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2weighted_alpha_complex_3d_persistence_8cpp-example.html">
- * Persistent_cohomology/weighted_alpha_complex_3d_persistence.cpp</a>
- * \li <a href="_persistent_cohomology_2custom_persistence_sort_8cpp-example.html">
- * Persistent_cohomology/custom_persistence_sort.cpp</a>
- * \li <a href="_rips_complex_2example_one_skeleton_rips_from_points_8cpp-example.html">
- * Rips_complex/example_one_skeleton_rips_from_points.cpp</a>
- * \li <a href="_rips_complex_2example_rips_complex_from_off_file_8cpp-example.html">
- * Rips_complex/example_rips_complex_from_off_file.cpp</a>
- * \li <a href="_rips_complex_2rips_distance_matrix_persistence_8cpp-example.html">
- * Rips_complex/rips_distance_matrix_persistence.cpp</a>
- * \li <a href="_rips_complex_2rips_persistence_8cpp-example.html">
- * Rips_complex/rips_persistence.cpp</a>
- * \li <a href="_witness_complex_2strong_witness_persistence_8cpp-example.html">
- * Witness_complex/strong_witness_persistence.cpp</a>
- * \li <a href="_witness_complex_2weak_witness_persistence_8cpp-example.html">
- * Witness_complex/weak_witness_persistence.cpp</a>
- * \li <a href="_witness_complex_2example_nearest_landmark_table_8cpp-example.html">
- * Witness_complex/example_nearest_landmark_table.cpp</a>
- *
- * \section Contributions Bug reports and contributions
- * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
- * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
- *
- * GUDHI is open to external contributions. If you want to join our development team, please contact us.
- *
-*/
-
-/*! \page Citation Acknowledging the GUDHI library
- * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use
- * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages.
- * Feel free to contact us in case you have any question or remark on this topic.
- *
- * We provide \ref GudhiBibtex entries for the modules of the User and Reference Manual, as well as for publications
- * directly related to the GUDHI library.
- * \section GudhiBibtex GUDHI bibtex
- * \verbinclude biblio/how_to_cite_gudhi.bib
-*/
-
-// List of GUDHI examples - Doxygen needs at least a file tag to analyse comments
-/*! @file Examples
- * @example Alpha_complex/Alpha_complex_from_off.cpp
- * @example Alpha_complex/Alpha_complex_from_points.cpp
- * @example Alpha_complex/alpha_complex_3d_persistence.cpp
- * @example Alpha_complex/alpha_complex_persistence.cpp
- * @example Alpha_complex/exact_alpha_complex_3d_persistence.cpp
- * @example Alpha_complex/periodic_alpha_complex_3d_persistence.cpp
- * @example Alpha_complex/weighted_alpha_complex_3d_persistence.cpp
- * @example Bottleneck_distance/alpha_rips_persistence_bottleneck_distance.cpp
- * @example Bottleneck_distance/bottleneck_basic_example.cpp
- * @example Bottleneck_distance/bottleneck_distance.cpp
- * @example Bitmap_cubical_complex/cubical_complex_persistence.cpp
- * @example Bitmap_cubical_complex/periodic_cubical_complex_persistence.cpp
- * @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp
- * @example common/example_CGAL_3D_points_off_reader.cpp
- * @example common/example_CGAL_points_off_reader.cpp
- * @example Contraction/Garland_heckbert.cpp
- * @example Contraction/Rips_contraction.cpp
- * @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp
- * @example Persistent_cohomology/persistence_from_file.cpp
- * @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp
- * @example Persistent_cohomology/plain_homology.cpp
- * @example Persistent_cohomology/rips_multifield_persistence.cpp
- * @example Persistent_cohomology/custom_persistence_sort.cpp
- * @example Persistent_cohomology/rips_persistence_step_by_step.cpp
- * @example Rips_complex/example_one_skeleton_rips_from_points.cpp
- * @example Rips_complex/example_rips_complex_from_off_file.cpp
- * @example Rips_complex/rips_persistence.cpp
- * @example Rips_complex/rips_distance_matrix_persistence.cpp
- * @example Simplex_tree/mini_simplex_tree.cpp
- * @example Simplex_tree/simple_simplex_tree.cpp
- * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp
- * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp
- * @example Simplex_tree/graph_expansion_with_blocker.cpp
- * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp
- * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp
- * @example Skeleton_blocker/Skeleton_blocker_link.cpp
- * @example Spatial_searching/example_spatial_searching.cpp
- * @example Subsampling/example_choose_n_farthest_points.cpp
- * @example Subsampling/example_custom_kernel.cpp
- * @example Subsampling/example_pick_n_random_points.cpp
- * @example Subsampling/example_sparsify_point_set.cpp
- * @example Tangential_complex/example_basic.cpp
- * @example Tangential_complex/example_with_perturb.cpp
- * @example Witness_complex/example_nearest_landmark_table.cpp
- * @example Witness_complex/example_strong_witness_complex_off.cpp
- * @example Witness_complex/example_witness_complex_off.cpp
- * @example Witness_complex/example_witness_complex_sphere.cpp
- * @example Witness_complex/weak_witness_persistence.cpp
- * @example Witness_complex/strong_witness_persistence.cpp
- */
-
diff --git a/src/common/include/gudhi/Unitary_tests_utils.h b/src/common/include/gudhi/Unitary_tests_utils.h
index 7ae5d356..8394a062 100644
--- a/src/common/include/gudhi/Unitary_tests_utils.h
+++ b/src/common/include/gudhi/Unitary_tests_utils.h
@@ -25,6 +25,7 @@
#include <boost/test/unit_test.hpp>
#include <iostream>
+#include <limits> // for std::numeric_limits<>
template<typename FloatingType >
void GUDHI_TEST_FLOAT_EQUALITY_CHECK(FloatingType a, FloatingType b,
diff --git a/src/common/utilities/README b/src/common/utilities/README
deleted file mode 100644
index 18fa8cc4..00000000
--- a/src/common/utilities/README
+++ /dev/null
@@ -1,19 +0,0 @@
-# Pointset generator #
-
-## `off_file_from_shape_generator` ##
-
-Generates a pointset and save it in an OFF file. Command-line is:
-`off_file_from_shape_generator on|in sphere|cube|curve|torus|klein <filename> <num_points> <dimension> <parameter1> <parameter2>...`
-
-Warning: "on cube" generator is not available!
-
-Examples:
-
-* Generate an onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2:
-`off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2`
-
-* Generate an inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default):
-`off_file_from_shape_generator in sphere inSphere.off 100 2`
-
-* Generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8:
-`off_file_from_shape_generator in cube inCube.off 10000 3 5.8`
diff --git a/src/common/utilities/pointsetgenerator.md b/src/common/utilities/pointsetgenerator.md
new file mode 100644
index 00000000..284715d4
--- /dev/null
+++ b/src/common/utilities/pointsetgenerator.md
@@ -0,0 +1,33 @@
+
+
+# common #
+
+## off_file_from_shape_generator ##
+
+Generates a pointset and save it in an OFF file. Command-line is:
+
+```
+off_file_from_shape_generator on|in sphere|cube|curve|torus|klein <filename> <num_points> <dimension> <parameter1> <parameter2>...
+```
+
+Warning: "on cube" generator is not available!
+
+**Examples**
+
+```
+off_file_from_shape_generator on sphere onSphere.off 1000 3 15.2
+```
+
+* Generates an onSphere.off file with 1000 points randomized on a sphere of dimension 3 and radius 15.2.
+
+```
+off_file_from_shape_generator in sphere inSphere.off 100 2
+```
+
+* Generates an inSphere.off file with 100 points randomized in a sphere of dimension 2 (circle) and radius 1.0 (default).
+
+```
+off_file_from_shape_generator in cube inCube.off 10000 3 5.8
+```
+
+* Generates a inCube.off file with 10000 points randomized in a cube of dimension 3 and side 5.8.
diff --git a/src/cython/doc/_templates/layout.html b/src/cython/doc/_templates/layout.html
index 243f33c6..8e4eba40 100644
--- a/src/cython/doc/_templates/layout.html
+++ b/src/cython/doc/_templates/layout.html
@@ -198,6 +198,8 @@
<ul class="dropdown">
<li><a href="http://gudhi.gforge.inria.fr/licensing/">Licensing</a></li>
<li><a href="https://gforge.inria.fr/frs/?group_id=3865" target="_blank">Get the sources</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/file/37113/GUDHI_2.0.0_OSX_UTILS.beta.tar.gz" target="_blank">Utils for Mac OSx</a></li>
+ <li><a href="https://gforge.inria.fr/frs/download.php/file/37112/GUDHI_2.0.0_WIN64_UTILS.beta.zip" target="_blank">Utils for Win x64</a></li>
</ul>
</li>
<li class="divider"></li>
@@ -208,6 +210,8 @@
<li><a href="http://gudhi.gforge.inria.fr/doc/latest/installation.html">C++ installation manual</a></li>
<li><a href="http://gudhi.gforge.inria.fr/python/latest/">Python documentation</a></li>
<li><a href="http://gudhi.gforge.inria.fr/python/latest/installation.html">Python installation manual</a></li>
+ <li><a href="http://gudhi.gforge.inria.fr/utils/">Utilities</a></li>
+ <li><a href="http://bertrand.michel.perso.math.cnrs.fr/Enseignements/TDA-Gudhi-Python.html" target="_blank">Tutorial</a></li>
</ul>
</li>
<li class="divider"></li>
@@ -255,7 +259,7 @@
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
{%- else %}
- {% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
+ {% trans copyright=copyright|e %} {{ copyright }}.{% endtrans %}
{%- endif %}
{%- endif %}
{%- if last_updated %}
diff --git a/src/cython/doc/conf.py b/src/cython/doc/conf.py
index 19a880d4..a13c9751 100755
--- a/src/cython/doc/conf.py
+++ b/src/cython/doc/conf.py
@@ -62,7 +62,7 @@ import gudhi
# General information about the project.
project = gudhi.__name__
-copyright = gudhi.__copyright__
+copyright = gudhi.__copyright__ + ' - ' + gudhi.__license__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -158,7 +158,7 @@ html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
diff --git a/src/cython/setup.py.in b/src/cython/setup.py.in
index fefa36bb..c767e93d 100644
--- a/src/cython/setup.py.in
+++ b/src/cython/setup.py.in
@@ -23,7 +23,7 @@ from Cython.Build import cythonize
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
-__author__ = "Vincent Rouvreau"
+__author__ = "GUDHI Editorial Board"
__copyright__ = "Copyright (C) 2016 INRIA"
__license__ = "GPL v3"
@@ -41,7 +41,7 @@ gudhi = Extension(
setup(
name = 'gudhi',
- author='Vincent Rouvreau',
+ author='GUDHI Editorial Board',
author_email='gudhi-contact@lists.gforge.inria.fr',
version='@GUDHI_VERSION@',
url='http://gudhi.gforge.inria.fr/',