summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--biblio/how_to_cite_gudhi.bib44
-rwxr-xr-xscripts/generate_version.sh10
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_points.cpp5
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h33
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp3
-rw-r--r--src/CMakeLists.txt2
-rw-r--r--src/Contraction/include/gudhi/Edge_contraction.h4
-rw-r--r--src/Doxyfile18
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt4
-rw-r--r--src/Persistent_cohomology/example/performance_rips_persistence.cpp81
-rw-r--r--src/Persistent_cohomology/example/plain_homology.cpp80
-rw-r--r--src/Simplex_tree/concept/SimplexTreeOptions.h6
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h16
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h2
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h4
-rw-r--r--src/common/doc/main_page.h186
-rw-r--r--src/common/test/dtoffrw_unit_test.cpp3
17 files changed, 363 insertions, 138 deletions
diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib
new file mode 100644
index 00000000..851dd5d9
--- /dev/null
+++ b/biblio/how_to_cite_gudhi.bib
@@ -0,0 +1,44 @@
+@book{gudhi:urm
+, title = "{GUDHI} User and Reference Manual"
+, author = "{The GUDHI Project}"
+, publisher = "{GUDHI Editorial Board}"
+, year = 2015
+, url = "http://gudhi.gforge.inria.fr/doc/latest/"
+}
+
+@incollection{gudhi:FilteredComplexes
+, author = "Cl\'ement Maria"
+, title = "Filtered Complexes"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group__simplex__tree.html"
+, year = 2015
+}
+
+@incollection{gudhi:PersistentCohomology
+, author = "Cl\'ement Maria"
+, title = "Persistent Cohomology"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group__persistent__cohomology.html"
+, year = 2015
+}
+
+@incollection{gudhi:Contraction
+, author = "David Salinas"
+, title = "Contraction"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group__contr.html"
+, year = 2015
+}
+
+@incollection{gudhi:Skeleton-Blocker
+, author = "David Salinas"
+, title = "Skeleton-Blocker"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group__skbl.html"
+, year = 2015
+}
+
diff --git a/scripts/generate_version.sh b/scripts/generate_version.sh
index 43a54c1c..323396dc 100755
--- a/scripts/generate_version.sh
+++ b/scripts/generate_version.sh
@@ -61,12 +61,13 @@ cp $ROOT_DIR/GUDHIVersion.cmake.in $VERSION_DIR
PACKAGE_INC_DIR="/include"
#PACKAGE_SRC_DIR="/source"
PACKAGE_EX_DIR="/example"
+PACKAGE_CONCEPT_DIR="/concept"
PACKAGE_DOC_DIR="/doc"
for package in `ls $ROOT_DIR/src/`
do
- echo $package
- if [ -d "$ROOT_DIR/src/$package" ]
+ if [ -d "$ROOT_DIR/src/$package" ] && [ $package != "Bottleneck" ]
then
+ echo $package
if [ "$package" == "cmake" ]
then
# SPECIFIC FOR CMAKE MODULES
@@ -91,6 +92,11 @@ do
mkdir -p $VERSION_DIR$PACKAGE_EX_DIR/$package
cp -R $ROOT_DIR/src/$package$PACKAGE_EX_DIR/* $VERSION_DIR$PACKAGE_EX_DIR/$package
fi
+ if [ -d "$ROOT_DIR/src/$package$PACKAGE_CONCEPT_DIR" ]
+ then
+ mkdir -p $VERSION_DIR$PACKAGE_CONCEPT_DIR/$package
+ cp -R $ROOT_DIR/src/$package$PACKAGE_CONCEPT_DIR/* $VERSION_DIR$PACKAGE_CONCEPT_DIR/$package
+ fi
if [ -d "$ROOT_DIR/src/$package$PACKAGE_DOC_DIR" ]
then
mkdir -p $VERSION_DIR$PACKAGE_DOC_DIR/$package
diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
index b160d702..e2610dbd 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
@@ -1,8 +1,8 @@
-#include <stdio.h>
#include <stdlib.h>
#include <CGAL/Delaunay_triangulation.h>
#include <CGAL/Epick_d.h>
+#include <iostream>
#include <string>
#include <vector>
@@ -33,8 +33,7 @@ int main(int argc, char **argv) {
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
double max_alpha_square_value = 1e10;
- Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_points(3, points.size(), points.begin(), points.end(),
- max_alpha_square_value);
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_points(3, points, max_alpha_square_value);
// ----------------------------------------------------------------------------
// Display information about the alpha complex
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index d8568be0..6b47ace7 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -27,7 +27,6 @@
#include <gudhi/graph_simplicial_complex.h>
#include <gudhi/Simplex_tree.h>
-#include <stdio.h>
#include <stdlib.h>
#include <math.h> // isnan, fmax
@@ -107,7 +106,8 @@ class Alpha_complex : public Simplex_tree<> {
*
* @param[in] off_file_name OFF file [path and] name.
*/
- Alpha_complex(const std::string& off_file_name, Filtration_value max_alpha_square)
+ Alpha_complex(const std::string& off_file_name,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
: triangulation_(nullptr) {
Gudhi::Delaunay_triangulation_off_reader<Delaunay_triangulation> off_reader(off_file_name);
if (!off_reader.is_valid()) {
@@ -123,7 +123,8 @@ class Alpha_complex : public Simplex_tree<> {
*
* @param[in] triangulation_ptr Pointer on a Delaunay triangulation.
*/
- Alpha_complex(Delaunay_triangulation* triangulation_ptr, Filtration_value max_alpha_square)
+ Alpha_complex(Delaunay_triangulation* triangulation_ptr,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
: triangulation_(triangulation_ptr) {
set_filtration(max_alpha_square);
init();
@@ -134,18 +135,22 @@ class Alpha_complex : public Simplex_tree<> {
* the Alpha_complex.
*
* @param[in] dimension Dimension of points to be inserted.
- * @param[in] size Number of points to be inserted.
- * @param[in] firstPoint Iterator on the first point to be inserted.
- * @param[in] last Point Iterator on the last point to be inserted.
+ * @param[in] points Range of points to triangulate. Points must be in Kernel::Point_d
+ *
+ * The type InputPointRange must be a range for which std::begin and
+ * std::end return input iterators on a Kernel::Point_d.
*/
- template<typename ForwardIterator >
- Alpha_complex(int dimension, size_type size, ForwardIterator firstPoint, ForwardIterator lastPoint,
- Filtration_value max_alpha_square)
+ template<typename InputPointRange >
+ Alpha_complex(int dimension, const InputPointRange& points,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
: triangulation_(nullptr) {
triangulation_ = new Delaunay_triangulation(dimension);
- size_type inserted = triangulation_->insert(firstPoint, lastPoint);
- if (inserted != size) {
- std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << size << std::endl;
+ auto first = std::begin(points);
+ auto last = std::end(points);
+
+ size_type inserted = triangulation_->insert(first, last);
+ if (inserted != (last -first)) {
+ std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << std::endl;
exit(-1); // ----- >>
}
set_filtration(max_alpha_square);
@@ -292,12 +297,12 @@ class Alpha_complex : public Simplex_tree<> {
// ### Foreach Tau face of Sigma
for (auto f_boundary : boundary_simplex_range(f_simplex)) {
#ifdef DEBUG_TRACES
- std::cout << " | --------------------------------------------------" << std::endl;
+ std::cout << " | --------------------------------------------------\n";
std::cout << " | Tau ";
for (auto vertex : simplex_vertex_range(f_boundary)) {
std::cout << vertex << " ";
}
- std::cout << "is a face of Sigma" << std::endl;
+ std::cout << "is a face of Sigma\n";
std::cout << " | isnan(filtration(Tau)=" << isnan(filtration(f_boundary)) << std::endl;
#endif // DEBUG_TRACES
// ### If filt(Tau) is not NaN
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 5633d2c7..3aa835ec 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -141,8 +141,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
double max_alpha_square_value = 1e10;
- Gudhi::alphacomplex::Alpha_complex<Kernel_s> alpha_complex_from_points(3, points.size(), points.begin(), points.end(),
- max_alpha_square_value);
+ Gudhi::alphacomplex::Alpha_complex<Kernel_s> alpha_complex_from_points(3, points, max_alpha_square_value);
std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 590ae373..cd7f4991 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -77,7 +77,7 @@ else()
# Install the GUDHIConfig.cmake and GUDHIConfigVersion.cmake
install(FILES
- "${PROJECT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/GUDHIConfig.cmake"
+ "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake"
"${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake"
DESTINATION share/gudhi)
diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h
index dfce8d1b..349bb7d8 100644
--- a/src/Contraction/include/gudhi/Edge_contraction.h
+++ b/src/Contraction/include/gudhi/Edge_contraction.h
@@ -37,7 +37,7 @@ namespace Gudhi {
namespace contraction {
-/** \defgroup contr Contraction
+/** \defgroup contr Edge contraction
\author David Salinas
@@ -226,7 +226,7 @@ Time to simplify and enumerate simplices:
\copyright GNU General Public License v3.
-\verbatim Contact: David Salinas, david.salinas@inria.fr \endverbatim
+\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup
} // namespace contraction
diff --git a/src/Doxyfile b/src/Doxyfile
index 90884779..81f55e29 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -38,7 +38,7 @@ PROJECT_NAME = "Gudhi"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = "1.1.0"
+PROJECT_NUMBER = "1.2.0"
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -672,7 +672,8 @@ LAYOUT_FILE =
# search path. Do not use file names with spaces, bibtex cannot handle them. See
# also \cite for info how to create references.
-CITE_BIB_FILES = biblio/bibliography.bib
+CITE_BIB_FILES = biblio/bibliography.bib \
+ biblio/how_to_cite_gudhi.bib
#---------------------------------------------------------------------------
# Configuration options related to warning and progress messages
@@ -811,7 +812,8 @@ EXCLUDE_SYMBOLS =
# that contain example code fragments that are included (see the \include
# command).
-EXAMPLE_PATH = example/common/ \
+EXAMPLE_PATH = biblio/
+ example/common/ \
example/Alpha_complex/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
@@ -832,16 +834,10 @@ EXAMPLE_RECURSIVE = NO
# that contain images that are to be included in the documentation (see the
# \image command).
-<<<<<<< .working
IMAGE_PATH = doc/Skeleton_blocker/ \
doc/Alpha_complex/ \
doc/common/ \
doc/Contraction/
-=======
-IMAGE_PATH = doc/Skeleton_blocker/ \
- doc/common/ \
- doc/Contraction/
->>>>>>> .merge-right.r851
# The INPUT_FILTER tag can be used to specify a program that doxygen should
@@ -1346,7 +1342,7 @@ ECLIPSE_DOC_ID = org.doxygen.Project
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
-DISABLE_INDEX = NO
+DISABLE_INDEX = YES
# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
# structure should be generated to display hierarchical information. If the tag
@@ -1363,7 +1359,7 @@ DISABLE_INDEX = NO
# The default value is: NO.
# This tag requires that the tag GENERATE_HTML is set to YES.
-GENERATE_TREEVIEW = NO
+GENERATE_TREEVIEW = YES
# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
# doxygen will group on one line in the generated HTML documentation.
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index 74c2e778..8caf7d8b 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -5,6 +5,10 @@ project(GUDHIExPersCohom)
add_definitions( -DBOOST_ALL_NO_LIB )
add_definitions( -DBOOST_ALL_DYN_LINK )
+add_executable(plain_homology plain_homology.cpp)
+target_link_libraries(plain_homology ${Boost_SYSTEM_LIBRARY})
+add_test(plain_homology ${CMAKE_CURRENT_BINARY_DIR}/plain_homology)
+
add_executable(persistence_from_simple_simplex_tree persistence_from_simple_simplex_tree.cpp)
target_link_libraries(persistence_from_simple_simplex_tree ${Boost_SYSTEM_LIBRARY})
add_test(persistence_from_simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_simple_simplex_tree 1 0)
diff --git a/src/Persistent_cohomology/example/performance_rips_persistence.cpp b/src/Persistent_cohomology/example/performance_rips_persistence.cpp
index 0e912d57..fc48d6b1 100644
--- a/src/Persistent_cohomology/example/performance_rips_persistence.cpp
+++ b/src/Persistent_cohomology/example/performance_rips_persistence.cpp
@@ -63,10 +63,11 @@ void timing_persistence(FilteredComplex & cpx
*/
int main(int argc, char * argv[]) {
std::chrono::time_point<std::chrono::system_clock> start, end;
- int enlapsed_sec;
+ int elapsed_sec;
+ {
- std::string filepoints = "../examples/Kl.txt";
- Filtration_value threshold = 0.3;
+ std::string filepoints = "../../../data/points/Kl.txt";
+ Filtration_value threshold = 0.27;
int dim_max = 3;
int p = 2;
int q = 1223;
@@ -81,8 +82,8 @@ int main(int argc, char * argv[]) {
Graph_t prox_graph = compute_proximity_graph(points, threshold
, euclidean_distance<Point_t>);
end = std::chrono::system_clock::now();
- enlapsed_sec = std::chrono::duration_cast<std::chrono::seconds>(end - start).count();
- std::cout << "Compute Rips graph in " << enlapsed_sec << " sec.\n";
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << "Compute Rips graph in " << elapsed_sec << " ms.\n";
// Construct the Rips complex in a Simplex Tree
Simplex_tree<> st;
@@ -94,8 +95,8 @@ int main(int argc, char * argv[]) {
st.expansion(dim_max);
end = std::chrono::system_clock::now();
- enlapsed_sec = std::chrono::duration_cast<std::chrono::seconds>(end - start).count();
- std::cout << "Compute Rips complex in " << enlapsed_sec << " sec.\n";
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << "Compute Rips complex in " << elapsed_sec << " ms.\n";
std::cout << " - dimension = " << st.dimension() << std::endl;
std::cout << " - number of simplices = " << st.num_simplices() << std::endl;
@@ -103,15 +104,26 @@ int main(int argc, char * argv[]) {
start = std::chrono::system_clock::now();
st.initialize_filtration();
end = std::chrono::system_clock::now();
- enlapsed_sec = std::chrono::duration_cast<std::chrono::seconds>(end - start).count();
- std::cout << "Order the simplices of the filtration in " << enlapsed_sec << " sec.\n";
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << "Order the simplices of the filtration in " << elapsed_sec << " ms.\n";
+
+ // Copy the keys inside the simplices
+ start = std::chrono::system_clock::now();
+ {
+ int count = 0;
+ for (auto sh : st.filtration_simplex_range())
+ st.assign_key(sh, count++);
+ }
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << "Copied the keys inside the simplices in " << elapsed_sec << " ms.\n";
// Convert the simplex tree into a hasse diagram
start = std::chrono::system_clock::now();
Hasse_complex<> hcpx(st);
end = std::chrono::system_clock::now();
- enlapsed_sec = std::chrono::duration_cast<std::chrono::seconds>(end - start).count();
- std::cout << "Convert the simplex tree into a Hasse diagram in " << enlapsed_sec << " sec.\n";
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << "Convert the simplex tree into a Hasse diagram in " << elapsed_sec << " ms.\n";
std::cout << "Timings when using a simplex tree: \n";
@@ -124,6 +136,11 @@ int main(int argc, char * argv[]) {
timing_persistence(hcpx, q);
timing_persistence(hcpx, p, q);
+ start = std::chrono::system_clock::now();
+ }
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << "Running the complex destructors in " << elapsed_sec << " ms.\n";
return 0;
}
@@ -132,19 +149,32 @@ void
timing_persistence(FilteredComplex & cpx
, int p) {
std::chrono::time_point<std::chrono::system_clock> start, end;
- int enlapsed_sec;
-
+ int elapsed_sec;
+ {
+ start = std::chrono::system_clock::now();
Persistent_cohomology< FilteredComplex, Field_Zp > pcoh(cpx);
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Initialize pcoh in " << elapsed_sec << " ms.\n";
// initializes the coefficient field for homology
+ start = std::chrono::system_clock::now();
pcoh.init_coefficients(p);
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Initialize the coefficient field in " << elapsed_sec << " ms.\n";
start = std::chrono::system_clock::now();
pcoh.compute_persistent_cohomology(INFINITY);
end = std::chrono::system_clock::now();
- enlapsed_sec = std::chrono::duration_cast<std::chrono::seconds>(end - start).count();
- std::cout << " Compute persistent homology in Z/" << p << "Z in " << enlapsed_sec << " sec.\n";
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Compute persistent homology in Z/" << p << "Z in " << elapsed_sec << " ms.\n";
+ start = std::chrono::system_clock::now();
+ }
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Run the persistence destructors in " << elapsed_sec << " ms.\n";
}
template< typename FilteredComplex>
@@ -153,11 +183,19 @@ timing_persistence(FilteredComplex & cpx
, int p
, int q) {
std::chrono::time_point<std::chrono::system_clock> start, end;
- int enlapsed_sec;
-
+ int elapsed_sec;
+ {
+ start = std::chrono::system_clock::now();
Persistent_cohomology< FilteredComplex, Multi_field > pcoh(cpx);
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Initialize pcoh in " << elapsed_sec << " ms.\n";
// initializes the coefficient field for homology
+ start = std::chrono::system_clock::now();
pcoh.init_coefficients(p, q);
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Initialize the coefficient field in " << elapsed_sec << " ms.\n";
// compute persistent homology, disgarding persistent features of life shorter than min_persistence
start = std::chrono::system_clock::now();
@@ -165,7 +203,12 @@ timing_persistence(FilteredComplex & cpx
pcoh.compute_persistent_cohomology(INFINITY);
end = std::chrono::system_clock::now();
- enlapsed_sec = std::chrono::duration_cast<std::chrono::seconds>(end - start).count();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
std::cout << " Compute multi-field persistent homology in all coefficient fields Z/pZ "
- << "with p in [" << p << ";" << q << "] in " << enlapsed_sec << " sec.\n";
+ << "with p in [" << p << ";" << q << "] in " << elapsed_sec << " ms.\n";
+ start = std::chrono::system_clock::now();
+ }
+ end = std::chrono::system_clock::now();
+ elapsed_sec = std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count();
+ std::cout << " Run the persistence destructors in " << elapsed_sec << " ms.\n";
}
diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp
new file mode 100644
index 00000000..e293e013
--- /dev/null
+++ b/src/Persistent_cohomology/example/plain_homology.cpp
@@ -0,0 +1,80 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Marc Glisse
+ *
+ * Copyright (C) 2015 INRIA Saclay - Ile-de-France (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Persistent_cohomology.h>
+
+#include <iostream>
+
+using namespace Gudhi;
+
+struct MyOptions : Simplex_tree_options_full_featured {
+ // Implicitly use 0 as filtration value for all simplices
+ static const bool store_filtration = false;
+ // The persistence algorithm needs this
+ static const bool store_key = true;
+ // I have few vertices
+ typedef short Vertex_handle;
+};
+typedef Simplex_tree<MyOptions> ST;
+
+int main() {
+ ST st;
+
+ /* Complex to build. */
+ /* 1 3 */
+ /* o---o */
+ /* /X\ / */
+ /* o---o o */
+ /* 2 0 4 */
+
+ const short triangle012[] = {0, 1, 2};
+ const short edge03[] = {0, 3};
+ const short edge13[] = {1, 3};
+ const short vertex4[] = {4};
+ st.insert_simplex_and_subfaces(triangle012);
+ st.insert_simplex_and_subfaces(edge03);
+ st.insert_simplex(edge13);
+ st.insert_simplex(vertex4);
+ // FIXME: Remove this line
+ st.set_dimension(2);
+
+ // Sort the simplices in the order of the filtration
+ st.initialize_filtration();
+
+ // Class for homology computation
+ persistent_cohomology::Persistent_cohomology<ST, persistent_cohomology::Field_Zp> pcoh(st);
+
+ // Initialize the coefficient field Z/2Z for homology
+ pcoh.init_coefficients(2);
+
+ // Compute the persistence diagram of the complex
+ pcoh.compute_persistent_cohomology();
+
+ // Print the result. The format is, on each line: 2 dim 0 inf
+ // where 2 represents the field, dim the dimension of the feature.
+ // 2 0 0 inf
+ // 2 0 0 inf
+ // 2 1 0 inf
+ // means that in Z/2Z-homology, the Betti numbers are b0=2 and b1=1.
+ pcoh.output_diagram();
+}
diff --git a/src/Simplex_tree/concept/SimplexTreeOptions.h b/src/Simplex_tree/concept/SimplexTreeOptions.h
index a50a2bf1..add3ebdd 100644
--- a/src/Simplex_tree/concept/SimplexTreeOptions.h
+++ b/src/Simplex_tree/concept/SimplexTreeOptions.h
@@ -34,8 +34,8 @@ struct SimplexTreeOptions {
/// Must be a signed integer type.
typedef SimplexKey Simplex_key;
/// If true, each simplex has extra storage for one `Simplex_key`. Necessary for `Persistent_cohomology`.
- static constexpr bool store_key;
- /// If true, each simplex has extra storage for one `Filtration_value`, and this value is propagated by operations like `Gudhi::Simplex_tree<SimplexTreeOptions>::expansion`. Without it, `Persistent_cohomology` degenerates to computing usual (non-persistent) cohomology.
- static constexpr bool store_filtration;
+ static const bool store_key;
+ /// If true, each simplex has extra storage for one `Filtration_value`, and this value is propagated by operations like `Gudhi::Simplex_tree::expansion`. Without it, `Persistent_cohomology` degenerates to computing usual (non-persistent) cohomology.
+ static const bool store_filtration;
};
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 1ca43ff9..35d839e2 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -778,12 +778,6 @@ class Simplex_tree {
* assigned a Simplex_key corresponding to its order in the filtration (from 0 to m-1 for a
* simplicial complex with m simplices).
*
- * The use of a depth-first traversal of the simplex tree, provided by
- * complex_simplex_range(), combined with
- * a stable sort is meant to optimize the order of simplices with same
- * filtration value. The heuristic consists in inserting the cofaces of a
- * simplex as soon as possible.
- *
* Will be automatically called when calling filtration_simplex_range()
* if the filtration has never been initialized yet. */
void initialize_filtration() {
@@ -792,7 +786,15 @@ class Simplex_tree {
for (Simplex_handle sh : complex_simplex_range())
filtration_vect_.push_back(sh);
- // the stable sort ensures the ordering heuristic
+ /* We use stable_sort here because with libstdc++ it is faster than sort.
+ * is_before_in_filtration is now a total order, but we used to call
+ * stable_sort for the following heuristic:
+ * The use of a depth-first traversal of the simplex tree, provided by
+ * complex_simplex_range(), combined with a stable sort is meant to
+ * optimize the order of simplices with same filtration value. The
+ * heuristic consists in inserting the cofaces of a simplex as soon as
+ * possible.
+ */
std::stable_sort(filtration_vect_.begin(), filtration_vect_.end(),
is_before_in_filtration(this));
}
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
index 792a7994..3be480fd 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
@@ -241,7 +241,7 @@ their collaboration to write the two initial papers
\copyright GNU General Public License v3.
-\verbatim Contact: David Salinas, david.salinas@inria.fr \endverbatim
+\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
*/
/** @} */ // end defgroup
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h
index 07f371a2..d26d12b0 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h
@@ -1018,7 +1018,7 @@ class Skeleton_blocker_complex {
}
//@}
- /** @Simplification operations
+ /** @name Simplification operations
*/
//@{
@@ -1131,7 +1131,7 @@ class Skeleton_blocker_complex {
}
//@}
- /** @Edge contraction operations
+ /** @name Edge contraction operations
*/
//@{
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index 770d2216..0fd9a93e 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -1,74 +1,120 @@
-/**
-\mainpage
-
-\image html "Gudhi_banner.jpg" "" width=20cm
-
-The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic C++ library for
-topological analysis of high-dimensional data whose goal is to provide robust, efficient, flexible and easy to use
-implementations of
-state-of-the-art algorithms and data structures for computational topology.
-This library is part of the <a href="https://project.inria.fr/gudhi/">Gudhi project</a>.
-
-The current release of the library allows to use several data-structures for simplicial complexes :
-simplex tree, Hasse diagram or skeleton-blocker. Several operations can then be done on top of these
-representations such as persistent homology computation or simplification.
-All data-structures are generic and several of their aspects (such as stored elements, policies)
-can be parameterized via template classes.
-We refer to
-\cite gudhilibrary_ICMS14
-for a detailed description of the design of the library.
-
-\section installation Gudhi installation
-
-As Gudhi is a header only library, there is no need to install the library.
-
-Examples of Gudhi headers inclusion can be found in \ref demos.
-
-
-\section compiling Compiling
-
-The library uses c++11 and requires <a href="http://www.boost.org/">Boost</a> with version 1.48.0 or more recent.
-It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2013.
-
-
-\subsection gmp GMP:
-The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
-arithmetic, operating on signed integers, rational numbers, and floating point numbers.
-
-The following examples require the <a href="http://gmplib.org/">GNU Multiple Precision Arithmetic Library</a> (GMP)
-and will not be built if GMP is not installed:
- - Persistent_cohomology/rips_multifield_persistence
- - Simplex_tree/simplex_tree_from_alpha_shapes_3
-
-Having GMP version 4.2 or higher installed is recommended.
-
-\subsection cgal CGAL:
-CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms.
-
-The following example requires the <a href="http://www.cgal.org/">Computational Geometry Algorithms Library</a> (CGAL)
-and will not be built if CGAL is not installed:
- - Simplex_tree/simplex_tree_from_alpha_shapes_3
- - Alpha_complex/Alpha_complex_from_off
- - Alpha_complex/Alpha_complex_from_points
-
-Having CGAL version 4.7 or higher installed is recommended. The procedure to install this library according to
-your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
-
-\subsection demos Demos and examples
-
-To build the demos and libraries, run the following commands in a terminal:
-
-\verbatim
-cd /path-to-gudhi/
-mkdir build
-cd build/
-cmake ..
-make
-\endverbatim
+/*! \mainpage
+ * \image html "Gudhi_banner.jpg" "" width=20cm
+ *
+ * \section Introduction Introduction
+ * The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic open source C++ library for
+ * Computational Topology and Topological Data Analysis
+ * (<a class="el" target="_blank" href="https://en.wikipedia.org/wiki/Topological_data_analysis">TDA</a>).
+ * The GUDHI library is developed as part of the
+ * <a class="el" target="_blank" href="https://project.inria.fr/gudhi/">GUDHI project</a> supported by the European
+ * Research Council. The GUDHI library intends to help the development of new algorithmic solutions in TDA and their
+ * transfer to applications. It provides robust, efficient, flexible and easy to use implementations of
+ * state-of-the-art algorithms and data structures.
+ *
+ * The current release of the GUDHI library includes:
+ *
+ * \li Data structures to represent, construct and manipulate simplicial complexes.
+ * \li Algorithms to compute persistent homology and multi-field persistent homology.
+ * \li Simplication of simplicial complexes by edge contraction.
+ *
+ * All data-structures are generic and several of their aspects can be parameterized via template classes.
+ * We refer to \cite gudhilibrary_ICMS14 for a detailed description of the design of the library.
+ *
+ * The library is available <a class="el" target="_blank" href="https://gforge.inria.fr/frs/?group_id=3865">here</a>
+ * and the documentation is available at this <a class="el" href="http://gudhi.gforge.inria.fr/doc/latest/">
+ * webpage</a>.
+ *
+ * The library comes with data sets, \ref demos and \ref testsuites.
+ *
+ * Gudhi is also accessible though the
+ * <a class="el" target="_blank" href="https://cran.r-project.org/web/packages/TDA/index.html">R package TDA</a>
+ * (Statistical Tools for Topological Data Analysis).
+ *
+ * The development of the GUDHI library is steered by an Editorial Board composed of:
+ *
+ * \li <a class="el" target="_blank" href="http://www-sop.inria.fr/members/Jean-Daniel.Boissonnat/">
+ * Jean-Daniel Boissonnat</a> | INRIA Sophia Antipolis - Méditerranée
+ * \li <a class="el" target="_blank" href="http://geometrica.saclay.inria.fr/team/Marc.Glisse/">Marc Glisse</a> | INRIA Saclay - Ile de France
+ * \li Clément Jamin | INRIA Sophia Antipolis - Méditerranée
+ * \li Vincent Rouvreau | INRIA Saclay - Ile de France
+ *
+*/
-\details
+/*! \page installation Gudhi installation
+ * As Gudhi is a header only library, there is no need to install the library.
+ *
+ * Examples of Gudhi headers inclusion can be found in \ref demos.
+ *
+ * \section compiling Compiling
+ * The library uses c++11 and requires <a target="_blank" href="http://www.boost.org/">Boost</a> with version 1.48.0 or
+ * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2013.
+ *
+ * \subsection gmp GMP:
+ * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision
+ * arithmetic, operating on signed integers, rational numbers, and floating point numbers.
+ *
+ * The following example requires the <a target="_blank" href="http://gmplib.org/">GNU Multiple Precision Arithmetic
+ * Library</a> (GMP) and will not be built if GMP is not installed:
+ * \li Persistent_cohomology/rips_multifield_persistence
+ * Having GMP version 4.2 or higher installed is recommended.
+ *
+ * \subsection cgal CGAL:
+ * CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms.
+ *
+ * The following examples require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
+ * Library</a> (CGAL) and will not be built if CGAL is not installed:
+ * \li GudhUI
+ * \li Persistent_cohomology/alpha_shapes_persistence
+ * \li Simplex_tree/simplex_tree_from_alpha_shapes_3
+ * \li Alpha_complex/Alpha_complex_from_off
+ * \li Alpha_complex/Alpha_complex_from_points
+ *
+ * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to
+ * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
+ *
+ * \subsection demos Demos and examples
+ * To build the demos and libraries, run the following commands in a terminal:
+ * \verbatim
+ * cd /path-to-gudhi/
+ * mkdir build
+ * cd build/
+ * cmake ..
+ * make
+ * \endverbatim
+ *
+ * \subsection testsuites Test suites
+ * To test your build, run the following command in a terminal:
+ * \verbatim
+ * make test
+ * \endverbatim
+ *
+ * \section Contributions Bug reports and contributions
+ * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to:
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ *
+ * Gudhi is open to external contributions. If you want to join our development team, please contact us.
+ *
+*/
-\copyright GNU General Public License v3.
-\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+/*! \page Upcoming Upcoming
+ *
+ * The library is under active development. New packages to be released next include:
+ * \li Alpha complex.
+ * \li Bottleneck distance.
+ * \li Zig zag persistence.
+ * \li Witness complex.
+ * \li Tangential complex.
+ * \li Clustering.
+*/
+/*! \page Citation Acknowledging the GUDHI library
+ * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use
+ * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages.
+ * Feel free to contact us in case you have any question or remark on this topic.
+ *
+ * We provide \ref GudhiBibtex entries for the modules of the User and Reference Manual, as well as for publications
+ * directly related to the GUDHI library.
+ * \section GudhiBibtex GUDHI bibtex
+ * \verbinclude biblio/how_to_cite_gudhi.bib
*/
+
diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp
index cc6f283e..20094229 100644
--- a/src/common/test/dtoffrw_unit_test.cpp
+++ b/src/common/test/dtoffrw_unit_test.cpp
@@ -26,8 +26,9 @@
#include <CGAL/Delaunay_triangulation.h>
#include <CGAL/Epick_d.h>
-#include <stdio.h>
#include <stdlib.h>
+
+#include <iostream>
#include <string>
#define BOOST_TEST_DYN_LINK