summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CMakeLists.txt34
-rw-r--r--biblio/how_to_cite_cgal.bib947
-rw-r--r--biblio/how_to_cite_gudhi.bib9
-rwxr-xr-xdata/points/alphacomplexdoc.off10
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h167
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.ipe438
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.pngbin0 -> 49973 bytes
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc_135.ipe514
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc_135.pngbin0 -> 80794 bytes
-rw-r--r--src/Alpha_complex/doc/alpha_complex_representation.ipe321
-rw-r--r--src/Alpha_complex/doc/alpha_complex_representation.pngbin0 -> 16737 bytes
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_off.cpp44
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_points.cpp63
-rw-r--r--src/Alpha_complex/example/CMakeLists.txt42
-rw-r--r--src/Alpha_complex/example/alphaoffreader_for_doc_32.txt22
-rw-r--r--src/Alpha_complex/example/alphaoffreader_for_doc_60.txt27
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h386
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp239
-rw-r--r--src/Alpha_complex/test/CMakeLists.txt35
-rw-r--r--src/Alpha_complex/test/README12
-rw-r--r--src/CMakeLists.txt16
-rw-r--r--src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h2
-rw-r--r--src/Contraction/include/gudhi/Edge_contraction.h2
-rw-r--r--src/Contraction/include/gudhi/Skeleton_blocker_contractor.h2
-rw-r--r--src/Doxyfile8
-rw-r--r--src/GudhUI/CMakeLists.txt2
-rw-r--r--src/GudhUI/model/Model.h2
-rw-r--r--src/GudhUI/utils/Bar_code_persistence.h85
-rw-r--r--src/GudhUI/utils/Persistence_compute.h15
-rw-r--r--src/GudhUI/view/FirstCoordProjector.h5
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt76
-rw-r--r--src/Persistent_cohomology/example/alpha_complex_persistence.cpp115
-rw-r--r--src/Persistent_cohomology/example/alpha_shapes_persistence.cpp14
-rw-r--r--src/Persistent_cohomology/example/rips_persistence.cpp3
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h8
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h129
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h4
-rw-r--r--src/Simplex_tree/test/simplex_tree_unit_test.cpp267
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h2
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h2
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h2
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h2
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h2
-rw-r--r--src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h2
-rw-r--r--src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp2
-rw-r--r--src/common/doc/main_page.h4
-rw-r--r--src/common/example/CMakeLists.txt40
-rw-r--r--src/common/example/Delaunay_triangulation_off_rw.cpp54
-rw-r--r--src/common/example/dtoffrw_alphashapedoc_result.off15
-rw-r--r--src/common/example/dtoffrw_alphashapedoc_result.txt2
-rw-r--r--src/common/include/gudhi/Debug_utils.h (renamed from src/common/include/gudhi/Utils.h)41
-rw-r--r--src/common/include/gudhi/Delaunay_triangulation_off_io.h308
-rw-r--r--src/common/include/gudhi/Off_reader.h48
-rw-r--r--src/common/include/gudhi/distance_functions.h4
-rw-r--r--src/common/include/gudhi/reader_utils.h4
-rw-r--r--src/common/test/CMakeLists.txt46
-rw-r--r--src/common/test/README14
-rw-r--r--src/common/test/dtoffrw_alphashapedoc_result.off15
-rw-r--r--src/common/test/dtoffrw_unit_test.cpp90
59 files changed, 4648 insertions, 116 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index edf7e63f..93e72dd2 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -2,24 +2,28 @@ cmake_minimum_required(VERSION 2.6)
project(GUDHIdev)
include(CMakeGUDHIVersion.txt)
-# Generate GUDHI official version file
-configure_file(GUDHIVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIVersion.cmake" @ONLY)
-find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer program_options thread REQUIRED)
+if (NOT CMAKE_BUILD_TYPE)
+ # Set default build type to Release
+ set(CMAKE_BUILD_TYPE "Release")
+endif()
+
+enable_testing()
-set(CMAKE_PREFIX_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/")
set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/")
-message("CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}")
message("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}")
-enable_testing()
+# Generate GUDHI official version file
+configure_file(GUDHIVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIVersion.cmake" @ONLY)
+
+find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer program_options thread REQUIRED)
if(MSVC)
# Turn off some VC++ warnings
SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -std=c++11 -Wall -Wpedantic -Wsign-compare")
- set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -ggdb -O0")
+ set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -ggdb -O1")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}")
endif()
@@ -49,6 +53,11 @@ if (GPROF_PATH)
message("gprof found in ${GPROF_PATH}")
endif()
+FIND_PROGRAM( DIFF_PATH diff )
+if (DIFF_PATH)
+ message("diff found in ${DIFF_PATH}")
+endif()
+
if(NOT Boost_FOUND)
message(FATAL_ERROR "NOTICE: This demo requires Boost and will not be compiled.")
@@ -64,7 +73,14 @@ else()
message(STATUS "boost include dirs:" ${Boost_INCLUDE_DIRS})
message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
+ if (DEBUG_TRACES)
+ # For programs to be more verbose
+ message(STATUS "DEBUG_TRACES are activated")
+ add_definitions(-DDEBUG_TRACES)
+ endif()
+
include_directories(src/common/include/)
+ include_directories(src/Alpha_complex/include/)
include_directories(src/Bottleneck/include/)
include_directories(src/Contraction/include/)
include_directories(src/Hasse_complex/include/)
@@ -72,6 +88,8 @@ else()
include_directories(src/Simplex_tree/include/)
include_directories(src/Skeleton_blocker/include/)
+ add_subdirectory(src/common/example)
+ add_subdirectory(src/common/test)
add_subdirectory(src/Simplex_tree/test)
add_subdirectory(src/Simplex_tree/example)
add_subdirectory(src/Persistent_cohomology/test)
@@ -79,6 +97,8 @@ else()
add_subdirectory(src/Skeleton_blocker/test)
add_subdirectory(src/Skeleton_blocker/example)
add_subdirectory(src/Contraction/example)
+ add_subdirectory(src/Alpha_complex/example)
+ add_subdirectory(src/Alpha_complex/test)
# data points generator
add_subdirectory(data/points/generator)
diff --git a/biblio/how_to_cite_cgal.bib b/biblio/how_to_cite_cgal.bib
new file mode 100644
index 00000000..7336ee81
--- /dev/null
+++ b/biblio/how_to_cite_cgal.bib
@@ -0,0 +1,947 @@
+@book{ cgal:eb-15b
+, title = "{CGAL} User and Reference Manual"
+, author = "{The CGAL Project}"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, year = 2015
+, url = "http://doc.cgal.org/4.7/Manual/packages.html"
+}
+@incollection{cgal:h-af-15b
+, author = "Michael Hemmer"
+, title = "Algebraic Foundations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlgebraicFoundationsSummary"
+, year = 2015
+}
+
+@incollection{cgal:hhkps-nt-15b
+, author = "Michael Hemmer and Susan Hert and Sylvain Pion and Stefan Schirra"
+, title = "Number Types"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNumberTypesSummary"
+, year = 2015
+}
+
+@incollection{cgal:h-ma-15b
+, author = "Michael Hemmer and Sylvain Pion"
+, title = "Modular Arithmetic"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgModularArithmeticSummary"
+, year = 2015
+}
+
+@incollection{cgal:h-p-15b
+, author = "Michael Hemmer"
+, title = "Polynomial"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolynomialSummary"
+, year = 2015
+}
+
+@incollection{cgal:bht-ak-15b
+, author = "Eric Berberich and Michael Hemmer and Michael Kerber and Sylvain Lazard and Luis Pe{\~n}aranda and Monique Teillaud"
+, title = "Algebraic Kernel"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlgebraicKerneldSummary"
+, year = 2015
+}
+
+@incollection{cgal:h-msms-15b
+, author = "Michael Hoffmann"
+, title = "Monotone and Sorted Matrix Search"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMatrixSearchSummary"
+, year = 2015
+}
+
+@incollection{cgal:fgsw-lqps-15b
+, author = "Kaspar Fischer and Bernd G{\"a}rtner and Sven Sch{\"o}nherr and Frans Wessendorp"
+, title = "Linear and Quadratic Programming Solver"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgQPSolverSummary"
+, year = 2015
+}
+
+@incollection{cgal:bfghhkps-lgk23-15b
+, author = "Herv{\'e} Br{\"o}nnimann and Andreas Fabri and Geert-Jan Giezeman and Susan Hert and Michael Hoffmann and Lutz Kettner and Sylvain Pion and Stefan Schirra"
+, title = "{2D} and {3D} Linear Geometry Kernel"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKernel23Summary"
+, year = 2015
+}
+
+@incollection{cgal:s-gkd-15b
+, author = "Michael Seel"
+, title = "{dD} Geometry Kernel"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKernelDSummary"
+, year = 2015
+}
+
+@incollection{cgal:cpt-cgk2-15b
+, author = "Pedro Machado Manh{\~a}es de Castro and Sylvain Pion and Monique Teillaud"
+, title = "{2D} Circular Geometry Kernel"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgCircularKernel2Summary"
+, year = 2015
+}
+
+@incollection{cgal:cclt-sgk3-15b
+, author = "Pedro Machado Manh{\~a}es de Castro and Fr{\'e}d{\'e}ric Cazals and S{\'e}bastien Loriot and Monique Teillaud"
+, title = "{3D} Spherical Geometry Kernel"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSphericalKernel3Summary"
+, year = 2015
+}
+
+@incollection{cgal:hs-chep2-15b
+, author = "Susan Hert and Stefan Schirra"
+, title = "{2D} Convex Hulls and Extreme Points"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexHull2Summary"
+, year = 2015
+}
+
+@incollection{cgal:hs-ch3-15b
+, author = "Susan Hert and Stefan Schirra"
+, title = "{3D} Convex Hulls"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexHull3Summary"
+, year = 2015
+}
+
+@incollection{cgal:hs-chdt3-15b
+, author = "Susan Hert and Michael Seel"
+, title = "{dD} Convex Hulls and Delaunay Triangulations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexHullDSummary"
+, year = 2015
+}
+
+@incollection{cgal:gw-p2-15b
+, author = "Geert-Jan Giezeman and Wieger Wesselink"
+, title = "{2D} Polygons"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolygon2Summary"
+, year = 2015
+}
+
+@incollection{cgal:fwzh-rbso2-15b
+, author = "Efi Fogel and Ophir Setter and Ron Wein and Guy Zucker and Baruch Zukerman and Dan Halperin"
+, title = "{2D} Regularized Boolean Set-Operations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBooleanSetOperations2Summary"
+, year = 2015
+}
+
+@incollection{cgal:s-bonp2-15b
+, author = "Michael Seel"
+, title = "{2D} Boolean Operations on Nef Polygons"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNef2Summary"
+, year = 2015
+}
+
+@incollection{cgal:hk-bonpes2-15b
+, author = "Peter Hachenberger and Lutz Kettner"
+, title = "{2D} Boolean Operations on Nef Polygons Embedded on the Sphere"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNefS2Summary"
+, year = 2015
+}
+
+@incollection{cgal:h-pp2-15b
+, author = "Susan Hert"
+, title = "{2D} Polygon Partitioning"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolygonPartitioning2Summary"
+, year = 2015
+}
+
+@incollection{cgal:c-sspo2-15b
+, author = "Fernando Cacciola"
+, title = "{2D} Straight Skeleton and Polygon Offsetting"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgStraightSkeleton2Summary"
+, year = 2015
+}
+
+@incollection{cgal:w-rms2-15b
+, author = "Ron Wein and Alon Baram and Eyal Flato and Efi Fogel and Michael Hemmer and Sebastian Morr"
+, title = "{2D} Minkowski Sums"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMinkowskiSum2Summary"
+, year = 2015
+}
+
+@incollection{cgal:f-ps2-15b
+, author = "Andreas Fabri"
+, title = "{2D} Polyline Simplification"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolylineSimplification2Summary"
+, year = 2015
+}
+
+@incollection{hhb-visibility-2-15b
+, author = "Michael Hemmer and Kan Huang and Francisc Bungiu and Ning Xu"
+, title = "{2D} Visibility Computation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgVisibility_2Summary"
+, year = 2015
+}
+
+@incollection{cgal:k-ps-15b
+, author = "Lutz Kettner"
+, title = "{3D} Polyhedral Surface"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolyhedronSummary"
+, year = 2015
+}
+
+@incollection{cgal:k-hds-15b
+, author = "Lutz Kettner"
+, title = "Halfedge Data Structures"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgHDSSummary"
+, year = 2015
+}
+
+@incollection{cgal:bsmf-sm-15b
+, author = "Mario Botsch and Daniel Sieger and Philipp Moeller and Andreas Fabri"
+, title = "Surface Mesh"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMeshSummary"
+, year = 2015
+}
+
+@incollection{cgal:d-cm-15b
+, author = "Guillaume Damiand"
+, title = "Combinatorial Maps"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgCombinatorialMapsSummary"
+, year = 2015
+}
+
+@incollection{cgal:d-lcc-12-15b
+, author = "Guillaume Damiand"
+, title = "Linear Cell Complex"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgLinearCellComplexSummary"
+, year = 2015
+}
+
+@incollection{cgal:hk-bonp3-15b
+, author = "Peter Hachenberger and Lutz Kettner"
+, title = "{3D} Boolean Operations on Nef Polyhedra"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNef3Summary"
+, year = 2015
+}
+
+@incollection{cgal:h-emspe-15b
+, author = "Peter Hachenberger"
+, title = "Convex Decomposition of Polyhedra"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexDecomposition3Summary"
+, year = 2015
+}
+
+@incollection{cgal:h-msp3-15b
+, author = "Peter Hachenberger"
+, title = "{3D} Minkowski Sum of Polyhedra"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMinkowskiSum3Summary"
+, year = 2015
+}
+
+@incollection{cgal:wfzh-a2-15b
+, author = "Ron Wein and Eric Berberich and Efi Fogel and Dan Halperin and Michael Hemmer and Oren Salzman and Baruch Zukerman"
+, title = "{2D} Arrangements"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgArrangement2Summary"
+, year = 2015
+}
+
+@incollection{cgal:wfz-ic2-15b
+, author = "Baruch Zukerman and Ron Wein and Efi Fogel"
+, title = "{2D} Intersection of Curves"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgIntersectionOfCurves2Summary"
+, year = 2015
+}
+
+@incollection{cgal:p-sr2-15b
+, author = "Eli Packer"
+, title = "{2D} Snap Rounding"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSnapRounding2Summary"
+, year = 2015
+}
+
+@incollection{cgal:w-e2-15b
+, author = "Ron Wein"
+, title = "{2D} Envelopes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgEnvelope2Summary"
+, year = 2015
+}
+
+@incollection{cgal:mwz-e3-15b
+, author = "Dan Halperin and Michal Meyerovitch and Ron Wein and Baruch Zukerman"
+, title = "{3D} Envelopes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgEnvelope3Summary"
+, year = 2015
+}
+
+@incollection{cgal:y-t2-15b
+, author = "Mariette Yvinec"
+, title = "{2D} Triangulation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTriangulation2Summary"
+, year = 2015
+}
+
+@incollection{cgal:py-tds2-15b
+, author = "Sylvain Pion and Mariette Yvinec"
+, title = "{2D} Triangulation Data Structure"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTDS2Summary"
+, year = 2015
+}
+
+@incollection{cgal:k-pt2-13-15b
+, author = "Nico Kruithof"
+, title = "{2D} Periodic Triangulations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPeriodic2Triangulation2Summary"
+, year = 2015
+}
+
+@incollection{cgal:pt-t3-15b
+, author = "Cl{\'e}ment Jamin and Sylvain Pion and Monique Teillaud"
+, title = "{3D} Triangulations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTriangulation3Summary"
+, year = 2015
+}
+
+@incollection{cgal:pt-tds3-15b
+, author = "Cl{\'e}ment Jamin and Sylvain Pion and Monique Teillaud"
+, title = "{3D} Triangulation Data Structure"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTDS3Summary"
+, year = 2015
+}
+
+@incollection{cgal:ct-pt3-15b
+, author = "Manuel Caroli and Monique Teillaud"
+, title = "{3D} Periodic Triangulations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPeriodic3Triangulation3Summary"
+, year = 2015
+}
+
+@incollection{cgal:hdj-t-15b
+, author = "Samuel Hornus and Olivier Devillers and Cl{\'e}ment Jamin"
+, title = "{dD} Triangulations"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTriangulationsSummary"
+, year = 2015
+}
+
+@incollection{cgal:d-as2-15b
+, author = "Tran Kai Frank Da"
+, title = "{2D} Alpha Shapes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlphaShape2Summary"
+, year = 2015
+}
+
+@incollection{cgal:dy-as3-15b
+, author = "Tran Kai Frank Da and S{\'e}bastien Loriot and Mariette Yvinec"
+, title = "{3D} Alpha Shapes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlphaShapes3Summary"
+, year = 2015
+}
+
+@incollection{cgal:k-sdg2-15b
+, author = "Menelaos Karavelas"
+, title = "{2D} Segment Delaunay Graphs"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSegmentDelaunayGraph2Summary"
+, year = 2015
+}
+
+@incollection{cgal:cdp-sdglinf2-15b
+, author = "Panagiotis Cheilaris and Sandeep Kumar Dey and Evanthia Papadopoulou"
+, title = "L Infinity Segment Delaunay Graphs"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSDGLinfSummary"
+, year = 2015
+}
+
+@incollection{cgal:ky-ag2-15b
+, author = "Menelaos Karavelas and Mariette Yvinec"
+, title = "{2D} Apollonius Graphs (Delaunay Graphs of Disks)"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgApolloniusGraph2Summary"
+, year = 2015
+}
+
+@incollection{cgal:k-vda2-15b
+, author = "Menelaos Karavelas"
+, title = "{2D} Voronoi Diagram Adaptor"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgVoronoiDiagramAdaptor2Summary"
+, year = 2015
+}
+
+@incollection{cgal:r-ctm2-15b
+, author = "Laurent Rineau"
+, title = "{2D} Conforming Triangulations and Meshes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMesh2Summary"
+, year = 2015
+}
+
+@incollection{cgal:ry-smg-15b
+, author = "Laurent Rineau and Mariette Yvinec"
+, title = "{3D} Surface Mesh Generation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMesher3Summary"
+, year = 2015
+}
+
+@incollection{cgal:asg-srps-15b
+, author = "Pierre Alliez and Laurent Saboret and Ga{\"e}l Guennebaud"
+, title = "Surface Reconstruction from Point Sets"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceReconstructionFromPointSetsSummary"
+, year = 2015
+}
+
+@incollection{cgal:ssr3-15b
+, author = "Thijs van Lankveld"
+, title = "Scale-Space Surface Reconstruction"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgScaleSpaceReconstruction3Summary"
+, year = 2015
+}
+
+@incollection{cgal:dc-afsr-15b
+, author = "Tran Kai Frank Da and David Cohen-Steiner"
+, title = "Advancing Front Surface Reconstruction"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAdvancingFrontSurfaceReconstructionSummary"
+, year = 2015
+}
+
+@incollection{cgal:k-ssm3-15b
+, author = "Nico Kruithof"
+, title = "{3D} Skin Surface Meshing"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSkinSurface3Summary"
+, year = 2015
+}
+
+@incollection{cgal:rty-m3-15b
+, author = "Pierre Alliez and Cl{\'e}ment Jamin and Laurent Rineau and St{\'e}phane Tayeb and Jane Tournois and Mariette Yvinec"
+, title = "{3D} Mesh Generation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMesh_3Summary"
+, year = 2015
+}
+
+@incollection{cgal:lty-pmp-15b
+, author = "S{\'e}bastien Loriot and Jane Tournois and Ilker O. Yaz"
+, title = "Polygon Mesh Processing"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolygonMeshProcessingSummary"
+, year = 2015
+}
+
+@incollection{cgal:s-ssm2-15b
+, author = "Le-Jeng Andy Shiue"
+, title = "{3D} Surface Subdivision Methods"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceSubdivisionMethods3Summary"
+, year = 2015
+}
+
+@incollection{cgal:y-smsimpl-15b
+, author = "Ilker O. Yaz and S{\'e}bastien Loriot"
+, title = "Triangulated Surface Mesh Segmentation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceSegmentationSummary"
+, year = 2015
+}
+
+@incollection{cgal:c-tsms-12-15b
+, author = "Fernando Cacciola"
+, title = "Triangulated Surface Mesh Simplification"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMeshSimplificationSummary"
+, year = 2015
+}
+
+@incollection{cgal:lsxy-tsmd-15b
+, author = "S{\'e}bastien Loriot and Olga Sorkine-Hornung and Yin Xu and Ilker O. Yaz"
+, title = "Triangulated Surface Mesh Deformation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceModelingSummary"
+, year = 2015
+}
+
+@incollection{cgal:sal-pptsm2-15b
+, author = "Laurent Saboret and Pierre Alliez and Bruno L{\'e}vy"
+, title = "Planar Parameterization of Triangulated Surface Meshes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceParameterizationSummary"
+, year = 2015
+}
+
+@incollection{cgal:klcdv-tsmsp-15b
+, author = "Stephen Kiazyk and S{\'e}bastien Loriot and {\'E}ric Colin de Verdi{\`e}re"
+, title = "Triangulated Surface Mesh Shortest Paths"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMeshShortestPathSummary"
+, year = 2015
+}
+
+@incollection{cgal:glt-tsms-15b
+, author = "Xiang Gao and S{\'e}bastien Loriot and Andrea Tagliasacchi"
+, title = "Triangulated Surface Mesh Skeletonization"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMeanCurvatureSkeleton3Summary"
+, year = 2015
+}
+
+@incollection{cgal:cp-arutsm-15b
+, author = "Marc Pouget and Fr{\'e}d{\'e}ric Cazals"
+, title = "Approximation of Ridges and Umbilics on Triangulated Surface Meshes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgRidges_3Summary"
+, year = 2015
+}
+
+@incollection{cgal:pc-eldp-15b
+, author = "Marc Pouget and Fr{\'e}d{\'e}ric Cazals"
+, title = "Estimation of Local Differential Properties of Point-Sampled Surfaces"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgJet_fitting_3Summary"
+, year = 2015
+}
+
+@incollection{cgal:ass-psp-15b
+, author = "Pierre Alliez and Cl{\'e}ment Jamin and Quentin M{\'e}rigot and Jocelyn Meyron and Laurent Saboret and Nader Salman and Shihao Wu"
+, title = "Point Set Processing"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPointSetProcessingSummary"
+, year = 2015
+}
+
+@incollection{cgal:ovja-pssd-15b
+, author = "Sven Oesau and Yannick Verdie and Cl{\'e}ment Jamin and Pierre Alliez"
+, title = "Point Set Shape Detection"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPointSetShapeDetection3Summary"
+, year = 2015
+}
+
+@incollection{cgal:m-ps-15b
+, author = "Abdelkrim Mebarki"
+, title = "{2D} Placement of Streamlines"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPlacementOfStreamlines2Summary"
+, year = 2015
+}
+
+@incollection{cgal:b-ss2-15b
+, author = "Matthias B{\"a}sken"
+, title = "{2D} Range and Neighbor Search"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPointSet2Summary"
+, year = 2015
+}
+
+@incollection{cgal:f-isl-15b
+, author = "Andreas Fabri"
+, title = "Interval Skip List"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgIntervalSkipListSummary"
+, year = 2015
+}
+
+@incollection{cgal:tf-ssd-15b
+, author = "Hans Tangelder and Andreas Fabri"
+, title = "{dD} Spatial Searching"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSpatialSearchingDSummary"
+, year = 2015
+}
+
+@incollection{cgal:n-rstd-15b
+, author = "Gabriele Neyer"
+, title = "{dD} Range and Segment Trees"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgRangeSegmentTreesDSummary"
+, year = 2015
+}
+
+@incollection{cgal:kmz-isiobd-15b
+, author = "Lutz Kettner and Andreas Meyer and Afra Zomorodian"
+, title = "Intersecting Sequences of {dD} Iso-oriented Boxes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBoxIntersectionDSummary"
+, year = 2015
+}
+
+@incollection{cgal:atw-aabb-15b
+, author = "Pierre Alliez and St{\'e}phane Tayeb and Camille Wormser"
+, title = "{3D} Fast Intersection and Distance Computation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAABB_treeSummary"
+, year = 2015
+}
+
+@incollection{cgal:dd-ss-15b
+, author = "Christophe Delage and Olivier Devillers"
+, title = "Spatial Sorting"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSpatialSortingSummary"
+, year = 2015
+}
+
+@incollection{cgal:fghhs-bv-15b
+, author = "Kaspar Fischer and Bernd G{\"a}rtner and Thomas Herrmann and Michael Hoffmann and Sven Sch{\"o}nherr"
+, title = "Bounding Volumes"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBoundingVolumesSummary"
+, year = 2015
+}
+
+@incollection{cgal:hp-ia-15b
+, author = "Michael Hoffmann and Eli Packer"
+, title = "Inscribed Areas"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgInscribedAreasSummary"
+, year = 2015
+}
+
+@incollection{cgal:fghhs-od-15b
+, author = "Kaspar Fischer and Bernd G{\"a}rtner and Thomas Herrmann and Michael Hoffmann and Sven Sch{\"o}nherr"
+, title = "Optimal Distances"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgOptimalDistancesSummary"
+, year = 2015
+}
+
+@incollection{cgal:ap-pcad-15b
+, author = "Pierre Alliez and Sylvain Pion and Ankit Gupta"
+, title = "Principal Component Analysis"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPrincipalComponentAnalysisDSummary"
+, year = 2015
+}
+
+@incollection{cgal:f-i-15b
+, author = "Julia Fl{\"o}totto"
+, title = "{2D} and Surface Function Interpolation"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgInterpolation2Summary"
+, year = 2015
+}
+
+@incollection{cgal:abha-gbc-15b
+, author = "Dmitry Anisimov and David Bommes and Kai Hormann and Pierre Alliez"
+, title = "{2D} Generalized Barycentric Coordinates"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBarycentric_coordinates_2Summary"
+, year = 2015
+}
+
+@incollection{cgal:r-kds-15b
+, author = "Daniel Russel"
+, title = "Kinetic Data Structures"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKdsSummary"
+, year = 2015
+}
+
+@incollection{cgal:r-kdsf-15b
+, author = "Daniel Russel"
+, title = "Kinetic Framework"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKdsFrameworkSummary"
+, year = 2015
+}
+
+@incollection{cgal:hkpw-se-15b
+, author = "Michael Hoffmann and Lutz Kettner and Sylvain Pion and Ron Wein"
+, title = "STL Extensions for {CGAL}"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgStlExtensionSummary"
+, year = 2015
+}
+
+@incollection{cgal:cfw-cbgl-15b
+, author = "Andreas Fabri and Fernando Cacciola and Philipp Moeller and Ron Wein"
+, title = "{CGAL} and the {Boost} Graph Library"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBGLSummary"
+, year = 2015
+}
+
+@incollection{cgal:fs-cbpm-15b
+, author = "Andreas Fabri and Laurent Saboret"
+, title = "{CGAL} and {Boost} Property Maps"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgProperty_mapSummary"
+, year = 2015
+}
+
+@incollection{cgal:dksy-hc-15b
+, author = "Olivier Devillers and Lutz Kettner and Sylvain Pion and Michael Seel and Mariette Yvinec"
+, title = "Handles and Circulators"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgHandlesAndCirculatorsSummary"
+, year = 2015
+}
+
+@incollection{cgal:dhhk-gog-15b
+, author = "Pedro M. M. de Castro and Olivier Devillers and Susan Hert and Michael Hoffmann and Lutz Kettner and Sven Sch{\"o}nherr and Alexandru Tifrea"
+, title = "Geometric Object Generators"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgGeneratorsSummary"
+, year = 2015
+}
+
+@incollection{cgal:kps-pthum-15b
+, author = "Lutz Kettner and Sylvain Pion and Michael Seel"
+, title = "Profiling tools, Hash Map, Union-find, Modifiers"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgProfilingToolsSummary"
+, year = 2015
+}
+
+@incollection{cgal:fgk-ios-12-15b
+, author = "Andreas Fabri and Geert-Jan Giezeman and Lutz Kettner"
+, title = "IO Streams"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgIOstreamsSummary"
+, year = 2015
+}
+
+@incollection{cgal:fp-gv-15b
+, author = "Andreas Fabri and Sylvain Pion"
+, title = "Geomview"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgGeomviewSummary"
+, year = 2015
+}
+
+@incollection{cgal:fr-cqgvf-15b
+, author = "Andreas Fabri and Laurent Rineau"
+, title = "{CGAL} and the {Qt} Graphics View Framework"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgGraphicsViewSummary"
+, year = 2015
+}
+
+@incollection{cgal:lp-gi-15b
+, author = "Olivier Devillers and S{\'e}bastien Loriot and Sylvain Pion"
+, title = "{CGAL} Ipelets"
+, publisher = "{CGAL Editorial Board}"
+, edition = "{4.7}"
+, booktitle = "{CGAL} User and Reference Manual"
+, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgCGALIpeletsSummary"
+, year = 2015
+}
diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib
index 851dd5d9..c899d9fc 100644
--- a/biblio/how_to_cite_gudhi.bib
+++ b/biblio/how_to_cite_gudhi.bib
@@ -42,3 +42,12 @@
, year = 2015
}
+@incollection{gudhi:Alpha complex
+, author = "Vincent Rouvreau"
+, title = "Alpha complex"
+, publisher = "{GUDHI Editorial Board}"
+, booktitle = "{GUDHI} User and Reference Manual"
+, url = "http://gudhi.gforge.inria.fr/doc/latest/group__alpha__complex.html"
+, year = 2015
+}
+
diff --git a/data/points/alphacomplexdoc.off b/data/points/alphacomplexdoc.off
new file mode 100755
index 00000000..bb790193
--- /dev/null
+++ b/data/points/alphacomplexdoc.off
@@ -0,0 +1,10 @@
+nOFF
+2 7 0 0
+1.0 1.0
+7.0 0.0
+4.0 6.0
+9.0 6.0
+0.0 14.0
+2.0 19.0
+9.0 17.0
+
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
new file mode 100644
index 00000000..12d62ac0
--- /dev/null
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -0,0 +1,167 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef INTRO_ALPHA_COMPLEX_H_
+#define INTRO_ALPHA_COMPLEX_H_
+
+// needs namespace for Doxygen to link on classes
+namespace Gudhi {
+// needs namespace for Doxygen to link on classes
+namespace alphacomplex {
+
+/** \defgroup alpha_complex Alpha complex
+ *
+ * \author Vincent Rouvreau
+ *
+ * @{
+ *
+ * \section definition Definition
+ *
+ * Alpha_complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
+ * constructed from the finite cells of a Delaunay Triangulation.
+ *
+ * The filtration value of each simplex is computed from the circumradius of the simplex if it is Gabriel or
+ * from the alpha value of the simplex cofaces that make it not Gabriel.
+ *
+ * All simplices that have a filtration value strictly greater than a given alpha square value are not inserted into
+ * the simplex.
+ *
+ * \image html "alpha_complex_representation.png" "Alpha simplicial complex representation"
+ *
+ * Alpha_complex is constructing a `Simplex_tree` using <a target="_blank"
+ * href="http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations">Delaunay Triangulation</a>
+ * \cite cgal:hdj-t-15b from <a target="_blank" href="http://www.cgal.org/">CGAL</a> (the Computational Geometry
+ * Algorithms Library \cite cgal:eb-15b).
+ *
+ * The complex is a template class requiring a <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/index.html#Chapter_dD_Geometry_Kernel">dD Geometry Kernel</a>
+ * \cite cgal:s-gkd-15b from CGAL as template.
+ *
+ * \section pointsexample Example from points
+ *
+ * This example builds the Delaunay triangulation from the given points in a 2D static kernel, and initializes the
+ * alpha complex with it.
+ *
+ * Then, it is asked to display information about the alpha complex.
+ *
+ * \include Alpha_complex_from_points.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./alphapoints 60.0
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include alphaoffreader_for_doc_60.txt
+ *
+ * \section algorithm Algorithm
+ *
+ * \subsection datastructure Data structure
+ *
+ * In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation.
+ * (The filtration value is set to NaN, which stands for unknown value):
+ * \image html "alpha_complex_doc.png" "Simplex tree structure construction example"
+ *
+ * \subsection filtrationcomputation Filtration value computation algorithm
+ *
+ * \f{algorithm}{
+ * \caption{Filtration value computation algorithm}\label{alpha}
+ * \begin{algorithmic}
+ * \For{i : dimension $\rightarrow$ 0}
+ * \ForAll{$\sigma$ of dimension i}
+ * \If {filtration($\sigma$) is NaN}
+ * \State filtration($\sigma$) = $\alpha^2(\sigma)$
+ * \EndIf
+ * \ForAll{$\tau$ face of $\sigma$} \Comment{propagate alpha filtration value}
+ * \If {filtration($\tau$) is not NaN}
+ * \State filtration($\tau$) = min (filtration($\tau$), filtration($\sigma$))
+ * \Else
+ * \If {$\tau$ is not Gabriel for $\sigma$}
+ * \State filtration($\tau$) = filtration($\sigma$)
+ * \EndIf
+ * \EndIf
+ * \EndFor
+ * \EndFor
+ * \EndFor
+ * \State make\_filtration\_non\_decreasing()
+ * \State prune\_above\_filtration()
+ * \end{algorithmic}
+ * \f}
+ *
+ * \subsubsection dimension2 Dimension 2
+ *
+ * From the example above, it means the algorithm looks into each triangle ([1,2,3], [2,3,4], [1,3,5], ...),
+ * computes the filtration value of the triangle, and then propagates the filtration value as described
+ * here :
+ * \image html "alpha_complex_doc_135.png" "Filtration value propagation example"
+ *
+ * \subsubsection dimension1 Dimension 1
+ *
+ * Then, the algorithm looks into each edge ([1,2], [2,3], [1,3], ...),
+ * computes the filtration value of the edge (in this case, propagation will have no effect).
+ *
+ * \subsubsection dimension0 Dimension 0
+ *
+ * Finally, the algorithm looks into each vertex ([1], [2], [3], [4], [5], [6] and [7]) and
+ * sets the filtration value (0 in case of a vertex - propagation will have no effect).
+ *
+ * \subsubsection nondecreasing Non decreasing filtration values
+ *
+ * As Alpha square value computed from CGAL is an approximation, we have to make filtration non decreasing while
+ * increasing the dimension for our simplicial complex to be valid (cf.
+ * `Simplex_tree::make_filtration_non_decreasing()`).
+ *
+ * \subsubsection pruneabove Prune above given filtration value
+ *
+ * The simplex tree is pruned from the given maximum alpha square value (cf. `Simplex_tree::prune_above_filtration()`).
+ * In this example, the value is given by the user as argument of the program.
+ *
+ *
+ * \section offexample Example from OFF file
+ *
+ * This example builds the Delaunay triangulation in a dynamic kernel, and initializes the alpha complex with it.
+ *
+ *
+ * Then, it is asked to display information about the alpha complex.
+ *
+ * \include Alpha_complex_from_off.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./alphaoffreader ../../data/points/alphacomplexdoc.off 32.0
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include alphaoffreader_for_doc_32.txt
+ *
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup alpha_complex
+
+} // namespace alphacomplex
+
+} // namespace Gudhi
+
+#endif // INTRO_ALPHA_COMPLEX_H_
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe
new file mode 100644
index 00000000..e74f9bc4
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe
@@ -0,0 +1,438 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20151130095407"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<path layer="alpha" matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 -260 0" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="118.196" height="8.307" depth="2.32" valign="baseline" size="large">Delaunay triangulation</text>
+<text matrix="1 0 0 1 -242.155 -3.50128" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -239.3 -10.1537" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="360 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="360 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="360 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="370 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="380 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="380 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -10 -40" transformations="translations" pos="400 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="390 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="400 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="410 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -10 -40" transformations="translations" pos="430 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="370 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="380 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="430 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -20 -40" transformations="translations" pos="460 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="430 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="450 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="460 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="520 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="300 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 100 -40" transformations="translations" pos="350 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="350 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="350 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="380 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<path matrix="1 0 0 1 90 -40" stroke="black">
+4 0 0 4 320 704 e
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black">
+322.919 706.788 m
+317.189 701.058 l
+317.189 701.203 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black">
+317.551 706.934 m
+322.629 701.058 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 120 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 160 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 210 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+220 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+230 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+220 660 m
+220 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 660 m
+230 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+260 680 m
+260 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+260 660 m
+260 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+300 680 m
+300 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+300 680 m
+290 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+290 660 m
+290 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+300 660 m
+300 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+330 680 m
+330 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+350 680 m
+350 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+350 660 m
+350 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+240 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+270 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+310 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+330 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+350 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+380 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+400 690 l
+</path>
+<path matrix="1 0 0 1 50 0" stroke="black">
+240 620 m
+220 600 l
+</path>
+<path matrix="1 0 0 1 50 0" stroke="black">
+240 620 m
+220 640 l
+</path>
+<text transformations="translations" pos="180 620" stroke="black" type="label" width="97.274" height="6.926" depth="1.93" valign="baseline">Simplex tree structure</text>
+<path stroke="black">
+280 630 m
+170 630 l
+</path>
+<path stroke="black">
+280 610 m
+170 610 l
+</path>
+<use matrix="1 0 0 1 -239.3 -10.1537" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+</page>
+</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png
new file mode 100644
index 00000000..c9eab275
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc.png
Binary files differ
diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.ipe b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe
new file mode 100644
index 00000000..5d1d29d4
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe
@@ -0,0 +1,514 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20151130095019"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<path layer="alpha" matrix="1 0 0 1 0 80" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="darkcyan" pen="heavier">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="54.628" height="8.965" depth="2.99" valign="baseline" size="large">Cell [4,2,0]</text>
+<text matrix="1 0 0 1 -2.15463 76.4987" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 0.700256 69.8463" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="darkcyan">
+77.2727 0 0 77.2727 243.636 591.818 e
+</path>
+<path matrix="1 0 0 1 0 80" stroke="darkcyan">
+243.428 591.569 m
+186.061 643.28 l
+</path>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="18.785" height="4.294" depth="1.49" valign="baseline">$\alpha_{420}$</text>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="0.582962 0 0 1 -211.265 -209.555" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="231.798" height="8.965" depth="2.99" valign="baseline" size="large">[2,0] is Gabriel $\rightarrow$ $\alpha_{20}$ is not$\\$
+modified (NaN)
+</text>
+<text matrix="1 0 0 1 -266.155 -165.501" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -264 -172" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -263.3 -172.154" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 -166.834 -240.52" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="14.814" height="4.294" depth="1.49" valign="baseline">$\alpha_{20}$</text>
+<path matrix="1 0 0 1 -264 -162" stroke="darkcyan" pen="heavier">
+290 530 m
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="darkcyan">
+29.1548 0 0 29.1548 305 555 e
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="darkcyan">
+304.883 555.015 m
+334.509 555.015 l
+</path>
+<path matrix="1 0 0 1 -37.2997 -163.65" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 -199.21 -189.117" transformations="translations" pos="380 530" stroke="darkred" type="label" width="168.308" height="8.965" depth="2.99" valign="baseline" size="large">[0,4] is not Gabriel $\rightarrow$ $\alpha_{40} = \alpha_{420}$</text>
+<text matrix="1 0 0 1 -40.1546 -167.501" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -37.2997 -174.154" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 52.4654 -193.97" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="14.814" height="4.294" depth="1.49" valign="baseline">$\alpha_{40}$</text>
+<path matrix="1 0 0 1 -38 -164" stroke="darkcyan" pen="heavier">
+290 530 m
+280 660 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 123.845 -165.501" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 126.7 -172.154" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 225.859 -165.729" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="14.814" height="4.294" depth="1.49" valign="baseline">$\alpha_{42}$</text>
+<text matrix="1 0 0 1 122 -164" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<path stroke="darkcyan" pen="heavier">
+406.093 497.775 m
+446.094 418.092 l
+</path>
+<path stroke="darkcyan">
+44.5799 0 0 44.5799 425.934 457.774 e
+</path>
+<path stroke="darkcyan">
+425.854 457.774 m
+470.795 457.774 l
+</path>
+<text matrix="1 0 0 1 -48.9756 -209.799" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="231.798" height="8.965" depth="2.99" valign="baseline" size="large">[2,4] is Gabriel $\rightarrow$ $\alpha_{42}$ is not modified (NaN)
+</text>
+<path stroke="darkblue" arrow="normal/normal">
+205.028 596.091 m
+110.946 544.02 l
+</path>
+<path stroke="darkblue" arrow="normal/normal">
+280.768 588.99 m
+280.768 547.57 l
+</path>
+<path stroke="darkblue" arrow="normal/normal">
+341.123 594.316 m
+413.904 554.079 l
+</path>
+<text matrix="1 0 0 1 39.645 -2.36686" transformations="translations" pos="199.703 569.464" stroke="darkblue" type="label" width="93.206" height="7.473" depth="2.49" valign="baseline">For all faces of [4,2,0]</text>
+<text matrix="1 0 0 1 -93.391 2.68003" transformations="translations" pos="104.437 300.174" stroke="black" type="label" width="208.621" height="6.926" depth="1.93" valign="baseline">N.B. : is Gabriel on a single point has no sense.</text>
+<text matrix="1 0 0 1 -36.9231 10" transformations="translations" pos="48 784" stroke="black" type="label" width="118.324" height="7.473" depth="2.49" valign="baseline">Dimension =2 - $\sigma$ = [4,2,0]</text>
+<path stroke="darkcyan">
+247.333 430.892 m
+311.764 430.892 l
+</path>
+<use matrix="1 0 0 1 0.700256 69.8463" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="243.636 591.818" size="normal" stroke="darkcyan" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -263.3 -172.154" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="305 555" size="normal" stroke="darkcyan" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -37.2997 -174.154" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<use name="mark/fdisk(sfx)" pos="247 431" size="normal" stroke="darkcyan" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="darkred" fill="white"/>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="310.693 578.759" stroke="darkred" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<path matrix="1 0 0 1 -38 -164" stroke="darkred" pen="heavier">
+65.192 0 0 65.192 285 595 e
+</path>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126.7 -172.154" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use name="mark/fdisk(sfx)" pos="425.934 457.774" size="normal" stroke="darkcyan" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+</page>
+</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.png b/src/Alpha_complex/doc/alpha_complex_doc_135.png
new file mode 100644
index 00000000..ef7187f7
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc_135.png
Binary files differ
diff --git a/src/Alpha_complex/doc/alpha_complex_representation.ipe b/src/Alpha_complex/doc/alpha_complex_representation.ipe
new file mode 100644
index 00000000..8687d694
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_representation.ipe
@@ -0,0 +1,321 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70107" creator="Ipe 7.1.10">
+<info created="D:20150603143945" modified="D:20151127174742"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="small" value="\small"/>
+<textsize name="tiny" value="\tiny"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<path layer="alpha" fill="lightblue">
+109.771 601.912 m
+159.595 601.797 l
+140.058 541.915 l
+h
+</path>
+<path fill="lightblue">
+79.8776 552.169 m
+109.756 601.699 l
+139.812 542.209 l
+h
+</path>
+<path fill="lightblue">
+69.8453 682.419 m
+159.925 712.208 l
+90.12 732.039 l
+h
+</path>
+<text matrix="1 0 0 1 -230.178 22.1775" transformations="translations" pos="380 530" stroke="seagreen" type="label" width="76.735" height="8.307" depth="2.32" valign="baseline" size="large">Alpha complex</text>
+<text matrix="1 0 0 1 -212.333 18.6762" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -210.178 22.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -210.178 22.1775" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -210.178 22.1775" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -210.178 22.1775" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -209.478 12.0238" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -210.178 22.1775" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 31.9779 -58.7483" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+60 710 m
+40 660 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+40 660 m
+130 690 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+130 690 m
+60 710 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+40 660 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+80 580 m
+130 580 l
+130 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+130 580 m
+110 520 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+110 520 m
+50 530 l
+50 530 l
+50 530 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+50 530 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 29.8225 22.1775" stroke="black" pen="heavier">
+130 580 m
+130 690 l
+</path>
+<use matrix="1 0 0 1 142.618 -109.867" name="mark/fdisk(sfx)" pos="108.285 743.72" size="normal" stroke="darkgray" fill="white"/>
+<path matrix="1 0 0 1 142.618 -109.867" stroke="darkgray">
+108.275 743.531 m
+166.45 743.531 l
+</path>
+<text matrix="1 0 0 1 142.618 -109.867" transformations="translations" pos="127.397 746.763" stroke="darkgray" type="label" width="6.41" height="4.289" depth="0" valign="baseline">$\alpha$</text>
+<use matrix="1 0 0 1 -209.478 12.0238" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 -40 -8" stroke="black" pen="heavier">
+150.038 609.9 m
+179.929 549.727 l
+</path>
+<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -210.178 22.1775" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+</page>
+</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_representation.png b/src/Alpha_complex/doc/alpha_complex_representation.png
new file mode 100644
index 00000000..06e54c06
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_representation.png
Binary files differ
diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
new file mode 100644
index 00000000..4f381892
--- /dev/null
+++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
@@ -0,0 +1,44 @@
+#include <iostream>
+#include <string>
+
+#include <gudhi/Alpha_complex.h>
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value" << std::endl;
+ std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 3) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ std::string off_file_name(argv[1]);
+ double alpha_square_max_value = atof(argv[2]);
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from an OFF file
+ // ----------------------------------------------------------------------------
+ typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name, alpha_square_max_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
+ " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
+ alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ return 0;
+}
diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
new file mode 100644
index 00000000..00e988a6
--- /dev/null
+++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
@@ -0,0 +1,63 @@
+#include <stdlib.h>
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include <gudhi/Alpha_complex.h>
+
+typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel;
+typedef Kernel::Point_d Point;
+typedef std::vector<Point> Vector_of_points;
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " alpha_square_max_value" << std::endl;
+ std::cerr << " i.e.: " << progName << " 32.0" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 2) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ double alpha_square_max_value = atof(argv[1]);
+
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+ points.push_back(Point(1.0, 1.0));
+ points.push_back(Point(7.0, 0.0));
+ points.push_back(Point(4.0, 6.0));
+ points.push_back(Point(9.0, 6.0));
+ points.push_back(Point(0.0, 14.0));
+ points.push_back(Point(2.0, 19.0));
+ points.push_back(Point(9.0, 17.0));
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_points(points, alpha_square_max_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_points.dimension() <<
+ " - " << alpha_complex_from_points.num_simplices() << " simplices - " <<
+ alpha_complex_from_points.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ return 0;
+}
diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt
new file mode 100644
index 00000000..33ff6805
--- /dev/null
+++ b/src/Alpha_complex/example/CMakeLists.txt
@@ -0,0 +1,42 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIAlphaShapesExample)
+
+# need CGAL 4.7
+# cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../..
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+
+ add_executable ( alphaoffreader Alpha_complex_from_off.cpp )
+ target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+
+ add_executable ( alphapoints Alpha_complex_from_points.cpp )
+ target_link_libraries(alphapoints ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+endif()
diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt
new file mode 100644
index 00000000..553431a9
--- /dev/null
+++ b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt
@@ -0,0 +1,22 @@
+Alpha complex is of dimension 2 - 20 simplices - 7 vertices.
+Iterator on alpha complex simplices in the filtration order, with [filtration value]:
+ ( 0 ) -> [0]
+ ( 1 ) -> [0]
+ ( 2 ) -> [0]
+ ( 3 ) -> [0]
+ ( 4 ) -> [0]
+ ( 5 ) -> [0]
+ ( 6 ) -> [0]
+ ( 5 4 ) -> [6.25]
+ ( 4 1 ) -> [20]
+ ( 4 2 ) -> [8.5]
+ ( 6 2 ) -> [9.25]
+ ( 6 5 ) -> [10]
+ ( 6 4 ) -> [11.25]
+ ( 6 5 4 ) -> [12.5]
+ ( 6 4 2 ) -> [12.9959]
+ ( 3 0 ) -> [13.25]
+ ( 4 1 ) -> [20]
+ ( 1 0 ) -> [22.7367]
+ ( 3 1 0 ) -> [22.7367]
+ ( 5 0 ) -> [30.25]
diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt
new file mode 100644
index 00000000..71f29a00
--- /dev/null
+++ b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt
@@ -0,0 +1,27 @@
+Alpha complex is of dimension 2 - 25 simplices - 7 vertices.
+Iterator on alpha complex simplices in the filtration order, with [filtration value]:
+ ( 0 ) -> [0]
+ ( 1 ) -> [0]
+ ( 2 ) -> [0]
+ ( 3 ) -> [0]
+ ( 4 ) -> [0]
+ ( 5 ) -> [0]
+ ( 6 ) -> [0]
+ ( 3 2 ) -> [6.25]
+ ( 5 4 ) -> [7.25]
+ ( 2 0 ) -> [8.5]
+ ( 1 0 ) -> [9.25]
+ ( 3 1 ) -> [10]
+ ( 2 1 ) -> [11.25]
+ ( 3 2 1 ) -> [12.5]
+ ( 2 1 0 ) -> [12.9959]
+ ( 6 5 ) -> [13.25]
+ ( 4 2 ) -> [20]
+ ( 6 4 ) -> [22.7367]
+ ( 6 5 4 ) -> [22.7367]
+ ( 6 3 ) -> [30.25]
+ ( 6 2 ) -> [36.5]
+ ( 6 3 2 ) -> [36.5]
+ ( 6 4 2 ) -> [37.2449]
+ ( 4 0 ) -> [59.7107]
+ ( 4 2 0 ) -> [59.7107]
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
new file mode 100644
index 00000000..9f931066
--- /dev/null
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -0,0 +1,386 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef ALPHA_COMPLEX_H_
+#define ALPHA_COMPLEX_H_
+
+// to construct a simplex_tree from Delaunay_triangulation
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Debug_utils.h>
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+
+#include <stdlib.h>
+#include <math.h> // isnan, fmax
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+#include <limits> // NaN
+#include <map>
+#include <utility> // std::pair
+#include <stdexcept>
+
+namespace Gudhi {
+
+namespace alphacomplex {
+
+/**
+ * \brief Alpha complex data structure.
+ *
+ * \details
+ * The data structure can be constructed from a CGAL Delaunay triangulation (for more informations on CGAL Delaunay
+ * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) or from
+ * an OFF file (cf. Delaunay_triangulation_off_reader).
+ *
+ * Please refer to \ref alpha_complex for examples.
+ *
+ */
+template<class Kernel>
+class Alpha_complex : public Simplex_tree<> {
+ private:
+ // From Simplex_tree
+ // Type required to insert into a simplex_tree (with or without subfaces).
+ typedef std::vector<Vertex_handle> Vector_vertex;
+
+ // Simplex_result is the type returned from simplex_tree insert function.
+ typedef typename std::pair<Simplex_handle, bool> Simplex_result;
+
+ // Delaunay_triangulation type required to create an alpha-complex.
+ typedef typename CGAL::Delaunay_triangulation<Kernel> Delaunay_triangulation;
+
+ typedef typename Kernel::Compute_squared_radius_d Squared_Radius;
+ typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel;
+ typedef typename Kernel::Point_dimension_d Point_Dimension;
+
+ typedef typename Kernel::Point_d Point_d;
+
+ // Type required to compute squared radius, or side of bounded sphere on a vector of points.
+ typedef typename std::vector<Point_d> Vector_of_CGAL_points;
+
+ // Vertex_iterator type from CGAL.
+ typedef typename Delaunay_triangulation::Vertex_iterator CGAL_vertex_iterator;
+
+ // size_type type from CGAL.
+ typedef typename Delaunay_triangulation::size_type size_type;
+
+ // Double map type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.
+ typedef typename std::map< CGAL_vertex_iterator, Vertex_handle > Map_vertex_iterator_to_handle;
+ //typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Map_vertex_handle_to_iterator;
+ typedef typename std::vector< CGAL_vertex_iterator > Vector_vertex_iterator;
+
+ private:
+ /** \brief Map to switch from CGAL vertex iterator to simplex tree vertex handle.*/
+ Map_vertex_iterator_to_handle vertex_iterator_to_handle_;
+ /** \brief Vertex iterator vector to switch from simplex tree vertex handle to CGAL vertex iterator.
+ * Vertex handles are inserted sequentially, starting at 0.*/
+ Vector_vertex_iterator vertex_handle_to_iterator_;
+ /** \brief Pointer on the CGAL Delaunay triangulation.*/
+ Delaunay_triangulation* triangulation_;
+ /** \brief Kernel for triangulation_ functions access.*/
+ Kernel kernel_;
+
+ public:
+ /** \brief Alpha_complex constructor from an OFF file name.
+ * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize
+ * the Alpha_complex.
+ *
+ * @param[in] off_file_name OFF file [path and] name.
+ * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
+ */
+ Alpha_complex(const std::string& off_file_name,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ : triangulation_(nullptr) {
+ Gudhi::Delaunay_triangulation_off_reader<Delaunay_triangulation> off_reader(off_file_name);
+ if (!off_reader.is_valid()) {
+ std::cerr << "Alpha_complex - Unable to read file " << off_file_name << "\n";
+ exit(-1); // ----- >>
+ }
+ triangulation_ = off_reader.get_complex();
+ init(max_alpha_square);
+ }
+
+ /** \brief Alpha_complex constructor from a Delaunay triangulation.
+ *
+ * @param[in] triangulation_ptr Pointer on a <a target="_blank"
+ * href="http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations">
+ * CGAL::Delaunay_triangulation<Kernel></a> \cite cgal:hdj-t-15b.
+ * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
+ */
+ Alpha_complex(Delaunay_triangulation* triangulation_ptr,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ : triangulation_(triangulation_ptr) {
+ init(max_alpha_square);
+ }
+
+ /** \brief Alpha_complex constructor from a list of points.
+ *
+ * @param[in] points Range of points to triangulate. Points must be in Kernel::Point_d
+ * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
+ *
+ * The type InputPointRange must be a range for which std::begin and
+ * std::end return input iterators on a Kernel::Point_d.
+ * \warning In debug mode, the exception std::invalid_argument is thrown if an empty input point range is passed as
+ * argument.
+ */
+ template<typename InputPointRange >
+ Alpha_complex(const InputPointRange& points,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ : triangulation_(nullptr) {
+ auto first = std::begin(points);
+ auto last = std::end(points);
+
+ GUDHI_CHECK((first == last),
+ std::invalid_argument ("Alpha_complex::Alpha_complex(InputPointRange) - Empty input point range"));
+
+ if (first != last) {
+ // point_dimension function initialization
+ Point_Dimension point_dimension = kernel_.point_dimension_d_object();
+
+ // Delaunay triangulation is point dimension minus one.
+ triangulation_ = new Delaunay_triangulation(point_dimension(*first) - 1);
+
+ size_type inserted = triangulation_->insert(first, last);
+ if (inserted != (last -first)) {
+ std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << "\n";
+ exit(-1); // ----- >>
+ }
+ init(max_alpha_square);
+ }
+ }
+
+ /** \brief Alpha_complex destructor.
+ *
+ * @warning Deletes the Delaunay triangulation.
+ */
+ ~Alpha_complex() {
+ delete triangulation_;
+ }
+
+ /** \brief get_point returns the point corresponding to the vertex given as parameter.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The point found.
+ * @warning Exception std::out_of_range is thrown in case vertex is not found.
+ */
+ Point_d get_point(Vertex_handle vertex) const {
+ return vertex_handle_to_iterator_.at(vertex)->point();
+ }
+
+ private:
+ /** \brief Initialize the Alpha_complex from the Delaunay triangulation.
+ *
+ * @param[in] max_alpha_square maximum for alpha square value.
+ *
+ * @warning Delaunay triangulation must be already constructed with at least one vertex and dimension must be more
+ * than 0.
+ *
+ * Initialization can be launched once.
+ */
+ void init(Filtration_value max_alpha_square) {
+ if (triangulation_ == nullptr) {
+ std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation\n";
+ return; // ----- >>
+ }
+ if (triangulation_->number_of_vertices() < 1) {
+ std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices\n";
+ return; // ----- >>
+ }
+ if (triangulation_->maximal_dimension() < 1) {
+ std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation\n";
+ return; // ----- >>
+ }
+ if (num_vertices() > 0) {
+ std::cerr << "Alpha_complex init - Cannot init twice\n";
+ return; // ----- >>
+ }
+
+ set_dimension(triangulation_->maximal_dimension());
+ // set_filtration to +inf for prune_above_filtration to be done (if necessary)
+ set_filtration(std::numeric_limits<Filtration_value>::infinity());
+
+ // --------------------------------------------------------------------------------------------
+ // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
+ // Start to insert at handle = 0 - default integer value
+ Vertex_handle vertex_handle = Vertex_handle();
+ // Loop on triangulation vertices list
+ for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
+ if (!triangulation_->is_infinite(*vit)) {
+#ifdef DEBUG_TRACES
+ std::cout << "Vertex insertion - " << vertex_handle << " -> " << vit->point() << std::endl;
+#endif // DEBUG_TRACES
+
+ vertex_iterator_to_handle_.emplace(vit, vertex_handle);
+ vertex_handle_to_iterator_.push_back(vit);
+ vertex_handle++;
+ }
+ }
+ // --------------------------------------------------------------------------------------------
+
+ // --------------------------------------------------------------------------------------------
+ // Simplex_tree construction from loop on triangulation finite full cells list
+ for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
+ Vector_vertex vertexVector;
+#ifdef DEBUG_TRACES
+ std::cout << "Simplex_tree insertion ";
+#endif // DEBUG_TRACES
+ for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
+ if (*vit != nullptr) {
+#ifdef DEBUG_TRACES
+ std::cout << " " << vertex_iterator_to_handle_[*vit];
+#endif // DEBUG_TRACES
+ // Vector of vertex construction for simplex_tree structure
+ vertexVector.push_back(vertex_iterator_to_handle_[*vit]);
+ }
+ }
+#ifdef DEBUG_TRACES
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+ // Insert each simplex and its subfaces in the simplex tree - filtration is NaN
+ Simplex_result insert_result = insert_simplex_and_subfaces(vertexVector,
+ std::numeric_limits<double>::quiet_NaN());
+ }
+ // --------------------------------------------------------------------------------------------
+
+ // --------------------------------------------------------------------------------------------
+ // ### For i : d -> 0
+ for (int decr_dim = dimension(); decr_dim >= 0; decr_dim--) {
+ // ### Foreach Sigma of dim i
+ for (auto f_simplex : skeleton_simplex_range(decr_dim)) {
+ int f_simplex_dim = dimension(f_simplex);
+ if (decr_dim == f_simplex_dim) {
+ Vector_of_CGAL_points pointVector;
+#ifdef DEBUG_TRACES
+ std::cout << "Sigma of dim " << decr_dim << " is";
+#endif // DEBUG_TRACES
+ for (auto vertex : simplex_vertex_range(f_simplex)) {
+ pointVector.push_back(get_point(vertex));
+#ifdef DEBUG_TRACES
+ std::cout << " " << vertex;
+#endif // DEBUG_TRACES
+ }
+#ifdef DEBUG_TRACES
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+ // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma)
+ if (isnan(filtration(f_simplex))) {
+ Filtration_value alpha_complex_filtration = 0.0;
+ // No need to compute squared_radius on a single point - alpha is 0.0
+ if (f_simplex_dim > 0) {
+ // squared_radius function initialization
+ Squared_Radius squared_radius = kernel_.compute_squared_radius_d_object();
+
+ alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end());
+ }
+ assign_filtration(f_simplex, alpha_complex_filtration);
+#ifdef DEBUG_TRACES
+ std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl;
+#endif // DEBUG_TRACES
+ }
+ propagate_alpha_filtration(f_simplex, decr_dim);
+ }
+ }
+ }
+ // --------------------------------------------------------------------------------------------
+
+ // --------------------------------------------------------------------------------------------
+ // As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension
+ make_filtration_non_decreasing();
+ // Remove all simplices that have a filtration value greater than max_alpha_square
+ prune_above_filtration(max_alpha_square);
+ // --------------------------------------------------------------------------------------------
+ }
+
+ template<typename Simplex_handle>
+ void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) {
+ // ### Foreach Tau face of Sigma
+ for (auto f_boundary : boundary_simplex_range(f_simplex)) {
+#ifdef DEBUG_TRACES
+ std::cout << " | --------------------------------------------------\n";
+ std::cout << " | Tau ";
+ for (auto vertex : simplex_vertex_range(f_boundary)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << "is a face of Sigma\n";
+ std::cout << " | isnan(filtration(Tau)=" << isnan(filtration(f_boundary)) << std::endl;
+#endif // DEBUG_TRACES
+ // ### If filt(Tau) is not NaN
+ if (!isnan(filtration(f_boundary))) {
+ // ### filt(Tau) = fmin(filt(Tau), filt(Sigma))
+ Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex));
+ assign_filtration(f_boundary, alpha_complex_filtration);
+#ifdef DEBUG_TRACES
+ std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl;
+#endif // DEBUG_TRACES
+ // ### Else
+ } else {
+ // No need to compute is_gabriel for dimension <= 2
+ // i.e. : Sigma = (3,1) => Tau = 1
+ if (decr_dim > 1) {
+ // insert the Tau points in a vector for is_gabriel function
+ Vector_of_CGAL_points pointVector;
+ Vertex_handle vertexForGabriel = Vertex_handle();
+ for (auto vertex : simplex_vertex_range(f_boundary)) {
+ pointVector.push_back(get_point(vertex));
+ }
+ // Retrieve the Sigma point that is not part of Tau - parameter for is_gabriel function
+ Point_d point_for_gabriel;
+ for (auto vertex : simplex_vertex_range(f_simplex)) {
+ point_for_gabriel = get_point(vertex);
+ if (std::find(pointVector.begin(), pointVector.end(), point_for_gabriel) == pointVector.end()) {
+ // vertex is not found in Tau
+ vertexForGabriel = vertex;
+ // No need to continue loop
+ break;
+ }
+ }
+ // is_gabriel function initialization
+ Is_Gabriel is_gabriel = kernel_.side_of_bounded_sphere_d_object();
+ bool is_gab = is_gabriel(pointVector.begin(), pointVector.end(), point_for_gabriel)
+ != CGAL::ON_BOUNDED_SIDE;
+#ifdef DEBUG_TRACES
+ std::cout << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << vertexForGabriel << std::endl;
+#endif // DEBUG_TRACES
+ // ### If Tau is not Gabriel of Sigma
+ if (false == is_gab) {
+ // ### filt(Tau) = filt(Sigma)
+ Filtration_value alpha_complex_filtration = filtration(f_simplex);
+ assign_filtration(f_boundary, alpha_complex_filtration);
+#ifdef DEBUG_TRACES
+ std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl;
+#endif // DEBUG_TRACES
+ }
+ }
+ }
+ }
+ }
+};
+
+} // namespace alphacomplex
+
+} // namespace Gudhi
+
+#endif // ALPHA_COMPLEX_H_
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
new file mode 100644
index 00000000..2912019d
--- /dev/null
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -0,0 +1,239 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "alpha_complex"
+#include <boost/test/unit_test.hpp>
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <cmath> // float comparison
+#include <limits>
+#include <string>
+#include <vector>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+#include <gudhi/Alpha_complex.h>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d;
+// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
+
+BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of an alpha-complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double max_alpha_square_value = 60.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ max_alpha_square_value << "==========" << std::endl;
+
+ Gudhi::alphacomplex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name, max_alpha_square_value);
+
+ const int DIMENSION = 2;
+ std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+
+ const int NUMBER_OF_VERTICES = 7;
+ std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+
+ const int NUMBER_OF_SIMPLICES = 25;
+ std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+
+}
+
+BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file_filtered) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of an alpha-complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double max_alpha_square_value = 59.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ max_alpha_square_value << "==========" << std::endl;
+
+ Gudhi::alphacomplex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name, max_alpha_square_value);
+
+ const int DIMENSION = 2;
+ std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+
+ const int NUMBER_OF_VERTICES = 7;
+ std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+
+ const int NUMBER_OF_SIMPLICES = 23;
+ std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+}
+
+bool are_almost_the_same(float a, float b) {
+ return std::fabs(a - b) < std::numeric_limits<float>::epsilon();
+}
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<4> > Kernel_s;
+typedef Kernel_s::Point_d Point;
+typedef std::vector<Point> Vector_of_points;
+
+
+bool is_point_in_list(Vector_of_points points_list, Point point) {
+ for (auto& point_in_list : points_list) {
+ if (point_in_list == point) {
+ return true; // point found
+ }
+ }
+ return false; // point not found
+}
+
+BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+ std::vector<double> coords = { 0.0, 0.0, 0.0, 1.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 0.0, 1.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 1.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 1.0, 0.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ Gudhi::alphacomplex::Alpha_complex<Kernel_s> alpha_complex_from_points(points);
+
+ std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
+
+ // Another way to check num_simplices
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ int num_simplices = 0;
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ num_simplices++;
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ BOOST_CHECK(num_simplices == 15);
+ std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_simplices() == 15);
+
+ std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
+ std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ switch (alpha_complex_from_points.dimension(f_simplex)) {
+ case 0:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ break;
+ case 1:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ break;
+ case 2:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 2.0/3.0));
+ break;
+ case 3:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 3.0/4.0));
+ break;
+ default:
+ BOOST_CHECK(false); // Shall not happen
+ break;
+ }
+ }
+
+ Point p0 = alpha_complex_from_points.get_point(0);
+ std::cout << "alpha_complex_from_points.get_point(0)=" << p0 << std::endl;
+ BOOST_CHECK(4 == p0.dimension());
+ BOOST_CHECK(is_point_in_list(points, p0));
+
+ Point p1 = alpha_complex_from_points.get_point(1);
+ std::cout << "alpha_complex_from_points.get_point(1)=" << p1 << std::endl;
+ BOOST_CHECK(4 == p1.dimension());
+ BOOST_CHECK(is_point_in_list(points, p1));
+
+ Point p2 = alpha_complex_from_points.get_point(2);
+ std::cout << "alpha_complex_from_points.get_point(2)=" << p2 << std::endl;
+ BOOST_CHECK(4 == p2.dimension());
+ BOOST_CHECK(is_point_in_list(points, p2));
+
+ Point p3 = alpha_complex_from_points.get_point(3);
+ std::cout << "alpha_complex_from_points.get_point(3)=" << p3 << std::endl;
+ BOOST_CHECK(4 == p3.dimension());
+ BOOST_CHECK(is_point_in_list(points, p3));
+
+ // Test to the limit
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(4), std::out_of_range);
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(-1), std::out_of_range);
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(1234), std::out_of_range);
+
+ // Test after prune_above_filtration
+ alpha_complex_from_points.prune_above_filtration(0.6);
+ // Another way to check num_simplices
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ num_simplices = 0;
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ num_simplices++;
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ BOOST_CHECK(num_simplices == 10);
+ std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_simplices() == 10);
+
+ std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
+ std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ switch (alpha_complex_from_points.dimension(f_simplex)) {
+ case 0:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ break;
+ case 1:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ break;
+ default:
+ BOOST_CHECK(false); // Shall not happen
+ break;
+ }
+ }
+
+}
diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt
new file mode 100644
index 00000000..fa24e1b1
--- /dev/null
+++ b/src/Alpha_complex/test/CMakeLists.txt
@@ -0,0 +1,35 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIAlphaComplexTest)
+
+# need CGAL 4.7
+# cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../..
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+ include_directories (BEFORE "../../include")
+
+ add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp )
+ target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+ add_test(AlphaComplexUT ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/AlphaComplexUT.xml --log_level=test_suite --report_level=no)
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex feature. Version 4.6.0 is required.")
+ endif ()
+endif()
+
diff --git a/src/Alpha_complex/test/README b/src/Alpha_complex/test/README
new file mode 100644
index 00000000..45b87d91
--- /dev/null
+++ b/src/Alpha_complex/test/README
@@ -0,0 +1,12 @@
+To compile:
+***********
+
+cmake .
+make
+
+To launch with details:
+***********************
+
+./AlphaComplexUnitTest --report_level=detailed --log_level=all
+
+ ==> echo $? returns 0 in case of success (non-zero otherwise)
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 3c38483c..962afeb2 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -3,15 +3,17 @@ project(GUDHI)
include("CMakeGUDHIVersion.txt")
+if (NOT CMAKE_BUILD_TYPE)
+ # Set default build type to Release
+ set(CMAKE_BUILD_TYPE "Release")
+endif()
+
enable_testing()
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/")
find_package(Boost REQUIRED COMPONENTS system filesystem program_options chrono timer REQUIRED)
-if (NOT CMAKE_BUILD_TYPE)
- set(CMAKE_BUILD_TYPE "Release")
-endif()
if(MSVC)
SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018")
else()
@@ -44,14 +46,22 @@ else()
INCLUDE_DIRECTORIES(${Boost_INCLUDE_DIRS})
LINK_DIRECTORIES(${Boost_LIBRARY_DIRS})
+ if (DEBUG_TRACES)
+ message(STATUS "DEBUG_TRACES are activated")
+ # For programs to be more verbose
+ add_definitions(-DDEBUG_TRACES)
+ endif()
+
#---------------------------------------------------------------------------------------
# Gudhi compilation part
include_directories(include)
+ add_subdirectory(example/common)
add_subdirectory(example/Simplex_tree)
add_subdirectory(example/Persistent_cohomology)
add_subdirectory(example/Skeleton_blocker)
add_subdirectory(example/Contraction)
+ add_subdirectory(example/Alpha_complex)
# data points generator
add_subdirectory(data/points/generator)
diff --git a/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h b/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h
index 919df243..250bba27 100644
--- a/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h
+++ b/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h
@@ -23,8 +23,8 @@
#ifndef CONTRACTION_POLICIES_LINK_CONDITION_VALID_CONTRACTION_H_
#define CONTRACTION_POLICIES_LINK_CONDITION_VALID_CONTRACTION_H_
-#include <gudhi/Utils.h>
#include <gudhi/Contraction/policies/Valid_contraction_policy.h>
+#include <gudhi/Debug_utils.h>
namespace Gudhi {
diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h
index ee3e3de1..a4c786be 100644
--- a/src/Contraction/include/gudhi/Edge_contraction.h
+++ b/src/Contraction/include/gudhi/Edge_contraction.h
@@ -30,7 +30,7 @@
#include <gudhi/Contraction/policies/Valid_contraction_policy.h>
#include <gudhi/Contraction/policies/Dummy_valid_contraction.h>
#include <gudhi/Contraction/policies/Link_condition_valid_contraction.h>
-#include <gudhi/Utils.h>
+#include <gudhi/Debug_utils.h>
namespace Gudhi {
diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h
index d6350a2c..f650473a 100644
--- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h
+++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h
@@ -37,7 +37,7 @@
#include <gudhi/Contraction/policies/Contraction_visitor.h>
#include <gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h>
-#include <gudhi/Utils.h>
+#include <gudhi/Debug_utils.h>
#include <boost/scoped_array.hpp>
diff --git a/src/Doxyfile b/src/Doxyfile
index faa0d3fe..9cd73444 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -673,6 +673,7 @@ LAYOUT_FILE =
# also \cite for info how to create references.
CITE_BIB_FILES = biblio/bibliography.bib \
+ biblio/how_to_cite_cgal.bib \
biblio/how_to_cite_gudhi.bib
#---------------------------------------------------------------------------
@@ -812,7 +813,9 @@ EXCLUDE_SYMBOLS =
# that contain example code fragments that are included (see the \include
# command).
-EXAMPLE_PATH = biblio/
+EXAMPLE_PATH = biblio/ \
+ example/common/ \
+ example/Alpha_complex/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -833,6 +836,7 @@ EXAMPLE_RECURSIVE = NO
# \image command).
IMAGE_PATH = doc/Skeleton_blocker/ \
+ doc/Alpha_complex/ \
doc/common/ \
doc/Contraction/
@@ -1596,7 +1600,7 @@ PAPER_TYPE = a4
# If left blank no extra packages will be included.
# This tag requires that the tag GENERATE_LATEX is set to YES.
-EXTRA_PACKAGES = amsfonts amsmath amssymb
+EXTRA_PACKAGES = amsfonts amsmath amssymb algorithm algpseudocode
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
# generated LaTeX document. The header should contain everything until the first
diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt
index 71f4fd1a..24cb0be1 100644
--- a/src/GudhUI/CMakeLists.txt
+++ b/src/GudhUI/CMakeLists.txt
@@ -78,6 +78,8 @@ if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND )
target_link_libraries( GudhUI ${QT_LIBRARIES} ${QGLVIEWER_LIBRARIES} )
target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
+###############################################################################
+
else()
message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt4, and will not be compiled.")
endif()
diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h
index 07a67a0c..99a82eba 100644
--- a/src/GudhUI/model/Model.h
+++ b/src/GudhUI/model/Model.h
@@ -71,7 +71,7 @@ class CGAL_geometric_flag_complex_wrapper {
void maximal_face(std::vector<int> vertices) {
if (!load_only_points_) {
- std::cout << "size:" << vertices.size() << std::endl;
+ //std::cout << "size:" << vertices.size() << std::endl;
for (int i = 0; i < vertices.size(); ++i)
for (int j = i + 1; j < vertices.size(); ++j)
complex_.add_edge(Vertex_handle(vertices[i]), Vertex_handle(vertices[j]));
diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h
new file mode 100644
index 00000000..a4cd8156
--- /dev/null
+++ b/src/GudhUI/utils/Bar_code_persistence.h
@@ -0,0 +1,85 @@
+#include <math.h> // isfinite
+
+#include <QtGui/QApplication>
+
+#include <QGraphicsView>
+#include <QGraphicsScene>
+#include <QPointF>
+#include <QVector>
+#include <QGraphicsTextItem>
+
+#include <iostream>
+#include <vector>
+#include <limits> // NaN, infinity
+#include <utility> // for pair
+
+class Bar_code_persistence {
+ private:
+ typedef std::vector<std::pair<double, double>> Persistence;
+ Persistence persistence_vector;
+ double min_birth;
+ double max_death;
+
+ public:
+
+ Bar_code_persistence()
+ : min_birth(std::numeric_limits<double>::quiet_NaN()),
+ max_death(std::numeric_limits<double>::quiet_NaN()) { }
+
+ void insert(double birth, double death) {
+ persistence_vector.push_back(std::make_pair(birth, death));
+ if (std::isfinite(birth)) {
+ if ((birth < min_birth) || (std::isnan(min_birth)))
+ min_birth = birth;
+ if ((birth > max_death) || (std::isnan(max_death)))
+ max_death = birth;
+ }
+ if (std::isfinite(death))
+ if ((death > max_death) || (std::isnan(max_death)))
+ max_death = death;
+ }
+
+ void show(const std::string& window_title) {
+ // Create a view, put a scene in it
+ QGraphicsView * view = new QGraphicsView();
+ QGraphicsScene * scene = new QGraphicsScene();
+ view->setScene(scene);
+ double ratio = 600.0 / (max_death - min_birth);
+ //std::cout << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl;
+
+ double height = 0.0, birth = 0.0, death = 0.0;
+ int pers_num = 1;
+ for (auto& persistence : persistence_vector) {
+ height = 5.0 * pers_num;
+ //std::cout << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl;
+ if (std::isfinite(persistence.first))
+ birth = ((persistence.first - min_birth) * ratio) + 50.0;
+ else
+ birth = 0.0;
+
+ if (std::isfinite(persistence.second))
+ death = ((persistence.second - min_birth) * ratio) + 50.0;
+ else
+ death = 700.0;
+
+ scene->addLine(birth, height, death, height, QPen(Qt::blue, 2));
+ pers_num++;
+ }
+ height += 10.0;
+ // scale line
+ scene->addLine(0, height, 700.0, height, QPen(Qt::black, 1));
+ int modulo = 0;
+ for (double scale = 50.0; scale < 700.0; scale += 50.0) {
+ modulo++;
+ // scale small dash
+ scene->addLine(scale, height - 3.0, scale, height + 3.0, QPen(Qt::black, 1));
+ // scale text
+ QString scale_value = QString::number(((scale - 50.0) / ratio) + min_birth);
+ QGraphicsTextItem* dimText = scene->addText(scale_value, QFont("Helvetica", 8));
+ dimText->setPos(scale - (3.0 * scale_value.size()), height + 9.0 * (modulo % 2));
+ }
+ view->setWindowTitle(window_title.c_str());
+ // Show the view
+ view->show();
+ }
+};
diff --git a/src/GudhUI/utils/Persistence_compute.h b/src/GudhUI/utils/Persistence_compute.h
index 0b9961d3..1f04cc6b 100644
--- a/src/GudhUI/utils/Persistence_compute.h
+++ b/src/GudhUI/utils/Persistence_compute.h
@@ -46,10 +46,6 @@ struct Persistence_params {
* Show persistence into output stream
*/
template<typename SkBlComplex> class Persistence_compute {
- private:
- SkBlComplex& complex_;
- std::ostream& stream_;
-
public:
typedef typename SkBlComplex::Vertex_handle Vertex_handle;
typedef typename SkBlComplex::Edge_handle Edge_handle;
@@ -61,9 +57,7 @@ template<typename SkBlComplex> class Persistence_compute {
* double threshold
* int p for coefficient Z_p
*/
- Persistence_compute(SkBlComplex& complex, std::ostream& stream, const Persistence_params& params) :
- // double threshold = 0.5,unsigned dim_max = 8):
- complex_(complex), stream_(stream) {
+ Persistence_compute(SkBlComplex& complex, std::ostream& stream, const Persistence_params& params) {
// for now everything is copied, todo boost adapt iterators to points of SkBlComplex instead of copying to an
// initial vector
typedef std::vector<double> Point_t;
@@ -87,10 +81,11 @@ template<typename SkBlComplex> class Persistence_compute {
pcoh.init_coefficients(params.p);
// put params.min_pers
pcoh.compute_persistent_cohomology(params.min_pers);
- stream_ << "persistence: \n";
- stream_ << "p dimension birth death: \n";
+ stream << "persistence: \n";
+ stream << "p dimension birth death: \n";
- pcoh.output_diagram(stream_);
+ pcoh.output_diagram(stream);
+
}
};
diff --git a/src/GudhUI/view/FirstCoordProjector.h b/src/GudhUI/view/FirstCoordProjector.h
index 529d2d42..3ceda3f5 100644
--- a/src/GudhUI/view/FirstCoordProjector.h
+++ b/src/GudhUI/view/FirstCoordProjector.h
@@ -32,8 +32,11 @@ class FirstCoordProjector3D : public Projector3D {
typedef Projector3D::Point_3 Point_3;
Point_3 operator()(const Point& p) const {
- assert(p.dimension() >= 3);
+ if (p.dimension() >= 3)
return Point_3(p.x(), p.y(), p.z());
+ else if (p.dimension() >= 2)
+ return Point_3(p.x(), p.y(), 0.0);
+
}
};
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index 95506631..5fec8ad4 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -16,7 +16,7 @@ add_test(persistence_from_simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/persis
add_executable(rips_persistence rips_persistence.cpp)
target_link_libraries(rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
-add_test(rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 3 -m 100)
+add_test(rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.25 -d 3 -p 3 -m 100)
add_executable(parallel_rips_persistence parallel_rips_persistence.cpp)
target_link_libraries(parallel_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
@@ -27,27 +27,59 @@ add_executable(persistence_from_file persistence_from_file.cpp)
target_link_libraries(persistence_from_file ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 2 -m 0)
add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100)
-
+
if(GMPXX_FOUND AND GMP_FOUND)
- message("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
- message("GMP_LIBRARIES = ${GMP_LIBRARIES}")
-
- add_executable(rips_multifield_persistence rips_multifield_persistence.cpp )
- target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
- add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 2 -q 71 -m 100)
-
- add_executable ( performance_rips_persistence performance_rips_persistence.cpp )
- target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
-
- if(CGAL_FOUND)
- if (CMAKE_BUILD_TYPE MATCHES Debug)
- # For programs to be more verbose
- add_definitions(-DDEBUG_TRACES)
- endif()
- add_executable(alpha_shapes_persistence alpha_shapes_persistence.cpp)
- target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY})
- add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5)
- #add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100)
- endif()
+ message("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}")
+ message("GMP_LIBRARIES = ${GMP_LIBRARIES}")
+
+ add_executable(rips_multifield_persistence rips_multifield_persistence.cpp )
+ target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
+ add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 71 -m 100)
+
+ add_executable ( performance_rips_persistence performance_rips_persistence.cpp )
+ target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES})
+
+ if(CGAL_FOUND)
+ add_executable(alpha_shapes_persistence alpha_shapes_persistence.cpp)
+ target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY})
+ add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5)
+ #add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100)
+
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+
+ add_executable (alpha_complex_persistence alpha_complex_persistence.cpp)
+ target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY})
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+ else()
+ # message(WARNING "CGAL not found.")
+ endif()
endif()
diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
new file mode 100644
index 00000000..0dabdeac
--- /dev/null
+++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
@@ -0,0 +1,115 @@
+#include <iostream>
+#include <string>
+
+#include <boost/program_options.hpp>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+#include <gudhi/Alpha_complex.h>
+#include <gudhi/Persistent_cohomology.h>
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , std::string & output_file_diag
+ , Filtration_value & alpha_square_max_value
+ , int & coeff_field_characteristic
+ , Filtration_value & min_persistence);
+
+int main(int argc, char **argv) {
+ std::string off_file_points;
+ std::string output_file_diag;
+ Filtration_value alpha_square_max_value;
+ int coeff_field_characteristic;
+ Filtration_value min_persistence;
+
+ program_options(argc, argv, off_file_points, output_file_diag, alpha_square_max_value, coeff_field_characteristic, min_persistence);
+
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from an OFF file
+ // ----------------------------------------------------------------------------
+ typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_points, alpha_square_max_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
+ " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
+ alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
+
+ // Sort the simplices in the order of the filtration
+ alpha_complex_from_file.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alphacomplex::Alpha_complex<Kernel>,
+ Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ // Output the diagram in filediag
+ if (output_file_diag.empty()) {
+ pcoh.output_diagram();
+ } else {
+ std::cout << "Result in file: " << output_file_diag << std::endl;
+ std::ofstream out(output_file_diag);
+ pcoh.output_diagram(out);
+ out.close();
+ }
+
+ return 0;
+}
+
+void program_options(int argc, char * argv[]
+ , std::string & off_file_points
+ , std::string & output_file_diag
+ , Filtration_value & alpha_square_max_value
+ , int & coeff_field_characteristic
+ , Filtration_value & min_persistence) {
+ namespace po = boost::program_options;
+ po::options_description hidden("Hidden options");
+ hidden.add_options()
+ ("input-file", po::value<std::string>(&off_file_points),
+ "Name of file containing a point set. Format is one point per line: X1 ... Xd ");
+
+ po::options_description visible("Allowed options", 100);
+ visible.add_options()
+ ("help,h", "produce help message")
+ ("output-file,o", po::value<std::string>(&output_file_diag)->default_value(std::string()),
+ "Name of file in which the persistence diagram is written. Default print in std::cout")
+ ("max-alpha-square-value,r", po::value<Filtration_value>(&alpha_square_max_value)->default_value(std::numeric_limits<Filtration_value>::infinity()),
+ "Maximal alpha square value for the Alpha complex construction.")
+ ("field-charac,p", po::value<int>(&coeff_field_characteristic)->default_value(11),
+ "Characteristic p of the coefficient field Z/pZ for computing homology.")
+ ("min-persistence,m", po::value<Filtration_value>(&min_persistence),
+ "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals");
+
+ po::positional_options_description pos;
+ pos.add("input-file", 1);
+
+ po::options_description all;
+ all.add(visible).add(hidden);
+
+ po::variables_map vm;
+ po::store(po::command_line_parser(argc, argv).
+ options(all).positional(pos).run(), vm);
+ po::notify(vm);
+
+ if (vm.count("help") || !vm.count("input-file")) {
+ std::cout << std::endl;
+ std::cout << "Compute the persistent homology with coefficient field Z/pZ \n";
+ std::cout << "of an Alpha complex defined on a set of input points.\n \n";
+ std::cout << "The output diagram contains one bar per line, written with the convention: \n";
+ std::cout << " p dim b d \n";
+ std::cout << "where dim is the dimension of the homological feature,\n";
+ std::cout << "b and d are respectively the birth and death of the feature and \n";
+ std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl;
+
+ std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl;
+ std::cout << visible << std::endl;
+ std::abort();
+ }
+}
diff --git a/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp b/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp
index 92c0b065..ac208957 100644
--- a/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp
+++ b/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp
@@ -125,6 +125,12 @@ void usage(char * const progName) {
}
int main(int argc, char * const argv[]) {
+ // program args management
+ if (argc != 4) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
+ usage(argv[0]);
+ }
+
int coeff_field_characteristic = 0;
int returnedScanValue = sscanf(argv[2], "%d", &coeff_field_characteristic);
if ((returnedScanValue == EOF) || (coeff_field_characteristic <= 0)) {
@@ -139,12 +145,6 @@ int main(int argc, char * const argv[]) {
usage(argv[0]);
}
- // program args management
- if (argc != 4) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
-
// Read points from file
std::string filegraph = argv[1];
std::list<Point_3> lp;
@@ -240,7 +240,7 @@ int main(int argc, char * const argv[]) {
}
}
// Construction of the simplex_tree
- Filtration_value filtr = std::sqrt(*the_alpha_value_iterator);
+ Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator);
#ifdef DEBUG_TRACES
std::cout << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
diff --git a/src/Persistent_cohomology/example/rips_persistence.cpp b/src/Persistent_cohomology/example/rips_persistence.cpp
index 2d926a0d..cab49395 100644
--- a/src/Persistent_cohomology/example/rips_persistence.cpp
+++ b/src/Persistent_cohomology/example/rips_persistence.cpp
@@ -30,6 +30,7 @@
#include <string>
#include <vector>
+#include <limits> // infinity
using namespace Gudhi;
using namespace Gudhi::persistent_cohomology;
@@ -115,7 +116,7 @@ void program_options(int argc, char * argv[]
("help,h", "produce help message")
("output-file,o", po::value<std::string>(&filediag)->default_value(std::string()),
"Name of file in which the persistence diagram is written. Default print in std::cout")
- ("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(0),
+ ("max-edge-length,r", po::value<Filtration_value>(&threshold)->default_value(std::numeric_limits<Filtration_value>::infinity()),
"Maximal length of an edge for the Rips complex construction.")
("cpx-dimension,d", po::value<int>(&dim_max)->default_value(1),
"Maximal dimension of the Rips complex we want to compute.")
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
index 643b810c..19417ace 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
@@ -722,6 +722,14 @@ class Persistent_cohomology {
}
}
+ void get_persistence(std::vector<std::pair<double, double>>& persistence) {
+ cmp_intervals_by_length cmp(cpx_);
+ std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp);
+ for (auto pair : persistent_pairs_) {
+ persistence.push_back(std::make_pair(cpx_->filtration(get<0>(pair)), cpx_->filtration(get<1>(pair))));
+ }
+ }
+
void write_output_diagram(std::string diagram_name) {
std::ofstream diagram_out(diagram_name.c_str());
cmp_intervals_by_length cmp(cpx_);
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 708cdef9..3ba838a7 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -30,6 +30,7 @@
#include <gudhi/reader_utils.h>
#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Debug_utils.h>
#include <boost/container/flat_map.hpp>
#include <boost/iterator/transform_iterator.hpp>
@@ -43,10 +44,11 @@
#include <utility>
#include <vector>
#include <functional> // for greater<>
+#include <stdexcept>
+#include <limits> // Inf
#include <initializer_list>
namespace Gudhi {
-
/** \defgroup simplex_tree Filtered Complexes
*
* A simplicial complex \f$\mathbf{K}\f$
@@ -446,6 +448,15 @@ class Simplex_tree {
}
}
+ /** \brief Sets the filtration value of a simplex.
+ *
+ * No action if called on the null_simplex*/
+ void assign_filtration(Simplex_handle sh, Filtration_value fv) {
+ if (sh != null_simplex()) {
+ sh->second.assign_filtration(fv);
+ }
+ }
+
/** \brief Returns an upper bound of the filtration values of the simplices. */
Filtration_value filtration() const {
return threshold_;
@@ -515,7 +526,7 @@ class Simplex_tree {
return dimension_;
}
- /** \brief Returns true iff the node in the simplex tree pointed by
+ /** \brief Returns true if the node in the simplex tree pointed by
* sh has children.*/
bool has_children(Simplex_handle sh) const {
return (sh->second.children()->parent() == sh->first);
@@ -718,7 +729,7 @@ class Simplex_tree {
} else if (the_simplex.size() == 1) {
// When reaching the end of recursivity, vector of simplices shall be empty and filled on back recursive
if ((to_be_inserted.size() != 0) || (to_be_propagated.size() != 0)) {
- std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Error vector not empty" << std::endl;
+ std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Error vector not empty\n";
exit(-1);
}
std::vector<Vertex_handle> first_simplex(1, the_simplex.back());
@@ -727,7 +738,7 @@ class Simplex_tree {
insert_result = insert_vertex_vector(first_simplex, filtration);
} else {
- std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Recursivity error" << std::endl;
+ std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Recursivity error\n";
exit(-1);
}
return insert_result;
@@ -740,7 +751,6 @@ class Simplex_tree {
sh->second.assign_key(key);
}
- public:
/** Returns the two Simplex_handle corresponding to the endpoints of
* and edge. sh must point to a 1-dimensional simplex. This is an
* optimized version of the boundary computation. */
@@ -1105,6 +1115,114 @@ class Simplex_tree {
os << filtration(sh) << " \n";
}
}
+
+ public:
+ /** \brief Browse the simplex tree to ensure the filtration is not decreasing.
+ * The simplex tree is browsed starting from the root until the leaf, and the filtration values are set with their
+ * parent value (increased), in case the values are decreasing.
+ * @return The filtration modification information.
+ * \warning Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()`
+ * function is not launching `initialize_filtration()` but returns the filtration modification information. If the
+ * complex has changed , please call `initialize_filtration()` to recompute it.
+ */
+ bool make_filtration_non_decreasing() {
+ bool modified = false;
+ for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh) {
+ if (has_children(sh)) {
+ modified |= rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration());
+ }
+ }
+ return modified;
+ }
+
+ private:
+ /** \brief Recursively Browse the simplex tree to ensure the filtration is not decreasing.
+ * @param[in] sib Siblings to be parsed.
+ * @param[in] upper_filtration Upper level filtration value in the simplex tree.
+ * @return The filtration modification information in order to trigger initialize_filtration.
+ */
+ bool rec_make_filtration_non_decreasing(Siblings * sib, Filtration_value upper_filtration) {
+ bool modified = false;
+ for (auto sh = sib->members().begin(); sh != sib->members().end(); ++sh) {
+ if (sh->second.filtration() < upper_filtration) {
+ // Store the filtration modification information
+ modified = true;
+ sh->second.assign_filtration(upper_filtration);
+ }
+ if (has_children(sh)) {
+ modified |= rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration());
+ }
+ }
+ // Make the modified information to be traced by upper call
+ return modified;
+ }
+
+ public:
+ /** \brief Prune above filtration value given as parameter.
+ * @param[in] filtration Maximum threshold value.
+ * \warning threshold_ is set from filtration given as parameter.
+ * \warning The filtration must be valid. If the filtration has not been initialized yet, the method initializes it
+ * (i.e. order the simplices). If the complex has changed since the last time the filtration was initialized, please
+ * call `initialize_filtration()` to recompute it.
+ */
+ void prune_above_filtration(Filtration_value filtration) {
+std::cout << "prune_above_filtration - filtration=" << filtration << std::endl;
+ // No action if filtration is not stored
+ if (Options::store_filtration) {
+ if (filtration < threshold_) {
+ threshold_ = filtration;
+ // Initialize filtration_vect_ if required
+ if (filtration_vect_.empty()) {
+ initialize_filtration();
+ }
+
+ std::vector<std::vector<Vertex_handle>> simplex_list_to_removed;
+ // Loop in reverse mode until threshold is reached
+ // Do not erase while looping, because removing is shifting data in a flat_map
+ for (auto f_simplex = filtration_vect_.rbegin();
+ (f_simplex != filtration_vect_.rend()) && ((*f_simplex)->second.filtration() > threshold_);
+ f_simplex++) {
+ std::vector<Vertex_handle> simplex_to_remove;
+ for (auto vertex : simplex_vertex_range(*f_simplex))
+ simplex_to_remove.insert(simplex_to_remove.begin(), vertex);
+ simplex_list_to_removed.push_back(simplex_to_remove);
+ }
+ for (auto simplex_to_remove : simplex_list_to_removed) {
+ Simplex_handle sh = find_simplex(simplex_to_remove);
+ if (sh != null_simplex())
+ remove_maximal_simplex(sh);
+ }
+ // Re-initialize filtration_vect_ if dta were removed, because removing is shifting data in a flat_map
+ if (simplex_list_to_removed.size() > 0)
+ initialize_filtration();
+ }
+ }
+ }
+
+ /** \brief Remove a maximal simplex.
+ * @param[in] sh Simplex handle on the maximal simplex to remove.
+ * \pre Please check the simplex has no coface before removing it.
+ * \warning In debug mode, the exception std::invalid_argument is thrown if sh has children.
+ * \warning Be aware that removing is shifting data in a flat_map (initialize_filtration to be done).
+ */
+ void remove_maximal_simplex(Simplex_handle sh) {
+ // Guarantee the simplex has no children
+ GUDHI_CHECK(has_children(sh),
+ std::invalid_argument ("Simplex_tree::remove_maximal_simplex - argument has children"));
+
+ // Simplex is a leaf, it means the child is the Siblings owning the leaf
+ Siblings* child = sh->second.children();
+
+ if ((child->size() > 1) || (child == root())) {
+ // Not alone, just remove it from members
+ // Special case when child is the root of the simplex tree, just remove it from members
+ child->erase(sh->first);
+ } else {
+ // Sibling is emptied : must be deleted, and its parent must point on his own Sibling
+ child->oncles()->members().at(child->parent()).assign_children(child->oncles());
+ delete child;
+ }
+ }
private:
Vertex_handle null_vertex_;
@@ -1120,7 +1238,6 @@ class Simplex_tree {
};
// Print a Simplex_tree in os.
-
template<typename...T>
std::ostream& operator<<(std::ostream & os, Simplex_tree<T...> & st) {
for (auto sh : st.filtration_simplex_range()) {
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h
index 072afc8d..8942d92c 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h
@@ -116,6 +116,10 @@ class Simplex_tree_siblings {
return members_.size();
}
+ void erase(const Vertex_handle vh) {
+ members_.erase(vh);
+ }
+
Simplex_tree_siblings * oncles_;
Vertex_handle parent_;
Dictionary members_;
diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
index 874c3363..1a050a25 100644
--- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
@@ -795,3 +795,270 @@ BOOST_AUTO_TEST_CASE(non_contiguous) {
test_simplex_is_vertex(st, *++i, 3);
BOOST_CHECK(++i == std::end(b));
}
+
+BOOST_AUTO_TEST_CASE(make_filtration_non_decreasing) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "MAKE FILTRATION NON DECREASING" << std::endl;
+ typedef Simplex_tree<> typeST;
+ typeST st;
+
+ st.insert_simplex_and_subfaces({2, 1, 0}, 4.0);
+ st.insert_simplex_and_subfaces({3, 0}, 3.0);
+ st.insert_simplex_and_subfaces({3, 4, 5}, 2.0);
+ // Because of non decreasing property of simplex tree, { 0 } , { 1 } and { 0, 1 } are going to be set from value 4.0
+ // to 1.0
+ st.insert_simplex_and_subfaces({0, 1, 6, 7}, 1.0);
+
+ /* Inserted simplex: */
+ /* 1 6 */
+ /* o---o */
+ /* /X\7/ */
+ /* o---o---o---o */
+ /* 2 0 3\X/4 */
+ /* o */
+ /* 5 */
+
+ // FIXME
+ st.set_dimension(3);
+
+ // Copy constructor
+ typeST st_copy = st;
+
+ // Check default insertion ensures the filtration values are non decreasing
+ BOOST_CHECK(!st.make_filtration_non_decreasing());
+ // Check the simplex tree is not modified by the function
+ BOOST_CHECK(st == st_copy);
+
+ // Make { 0, 1 } decreasing
+ st.assign_filtration(st.find({0, 1}), 0.5);
+ // Make { 1, 6, 7 } decreasing
+ st.assign_filtration(st.find({1, 6, 7}), 0.4);
+ // Make { 3, 4 } decreasing
+ st.assign_filtration(st.find({3, 4}), 0.3);
+ // Make { 4, 5 } decreasing
+ st.assign_filtration(st.find({4, 5}), 0.1);
+
+ // Check the filtration values were decreasing
+ BOOST_CHECK(st.make_filtration_non_decreasing());
+ // Check the simplex tree has been modified by the function to retrieve the initial simplex tree
+ BOOST_CHECK(st == st_copy);
+
+ // Change { 0, 3 }, but still non decreasing
+ st.assign_filtration(st.find({0, 3}), 1.01);
+ // Change { 0, 1, 6, 7 }, but still non decreasing
+ st.assign_filtration(st.find({0, 1, 6, 7}), 1.201);
+ // Change { 1, 2 }, but still non decreasing
+ st.assign_filtration(st.find({1, 2}), 1.05);
+ // Change { 4 }, but still non decreasing
+ st.assign_filtration(st.find({4}), 1.123);
+
+ // Check the filtration values are non decreasing
+ BOOST_CHECK(!st.make_filtration_non_decreasing());
+ // Check the simplex tree has been modified from the original
+ BOOST_CHECK(st != st_copy);
+
+}
+
+struct MyOptions : Simplex_tree_options_full_featured {
+ // Not doing persistence, so we don't need those
+ static const bool store_key = false;
+ static const bool store_filtration = false;
+ // I have few vertices
+ typedef short Vertex_handle;
+};
+
+BOOST_AUTO_TEST_CASE(remove_maximal_simplex) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "REMOVE MAXIMAL SIMPLEX" << std::endl;
+
+
+ typedef Simplex_tree<MyOptions> miniST;
+ miniST st;
+
+ // FIXME
+ st.set_dimension(3);
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7});
+ st.insert_simplex_and_subfaces({3, 4, 5});
+
+ // Constructs a copy at this state for further test purpose
+ miniST st_pruned = st;
+
+ st.insert_simplex_and_subfaces({3, 0});
+ st.insert_simplex_and_subfaces({2, 1, 0});
+
+ // Constructs a copy at this state for further test purpose
+ miniST st_complete = st;
+ // st_complete and st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+ // st_pruned:
+ // 1 6
+ // o---o
+ // \7/
+ // o o---o
+ // 0 3\X/4
+ // o
+ // 5
+
+#ifdef GUDHI_DEBUG
+ std::cout << "Check exception throw in debug mode" << std::endl;
+ // throw excpt because sh has children
+ BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({0, 1, 6})), std::invalid_argument);
+ BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({3})), std::invalid_argument);
+ BOOST_CHECK(st == st_complete);
+#endif
+
+ st.remove_maximal_simplex(st.find({0, 2}));
+ st.remove_maximal_simplex(st.find({0, 1, 2}));
+ st.remove_maximal_simplex(st.find({1, 2}));
+ st.remove_maximal_simplex(st.find({2}));
+ st.remove_maximal_simplex(st.find({0, 3}));
+
+ BOOST_CHECK(st == st_pruned);
+ // Remove all, but as the simplex tree is not storing filtration, there is no modification
+ st.prune_above_filtration(0.0);
+ BOOST_CHECK(st == st_pruned);
+
+ miniST st_wo_seven;
+ // FIXME
+ st_wo_seven.set_dimension(3);
+
+ st_wo_seven.insert_simplex_and_subfaces({0, 1, 6});
+ st_wo_seven.insert_simplex_and_subfaces({3, 4, 5});
+ // st_wo_seven:
+ // 1 6
+ // o---o
+ // \X/
+ // o o---o
+ // 0 3\X/4
+ // o
+ // 5
+
+ // Remove all 7 to test the both remove_maximal_simplex cases (when _members is empty or not)
+ st.remove_maximal_simplex(st.find({0, 1, 6, 7}));
+ st.remove_maximal_simplex(st.find({0, 1, 7}));
+ st.remove_maximal_simplex(st.find({0, 6, 7}));
+ st.remove_maximal_simplex(st.find({0, 7}));
+ st.remove_maximal_simplex(st.find({1, 6, 7}));
+ st.remove_maximal_simplex(st.find({1, 7}));
+ st.remove_maximal_simplex(st.find({6, 7}));
+ st.remove_maximal_simplex(st.find({7}));
+
+ BOOST_CHECK(st == st_wo_seven);
+}
+
+BOOST_AUTO_TEST_CASE(prune_above_filtration) {
+ std::cout << "********************************************************************" << std::endl;
+ std::cout << "PRUNE ABOVE FILTRATION" << std::endl;
+ typedef Simplex_tree<> typeST;
+ typeST st;
+
+ // FIXME
+ st.set_dimension(3);
+
+ st.insert_simplex_and_subfaces({0, 1, 6, 7}, 1.0);
+ st.insert_simplex_and_subfaces({3, 4, 5}, 2.0);
+ st.set_filtration(6.0);
+
+ // Constructs a copy at this state for further test purpose
+ typeST st_pruned = st;
+ st_pruned.initialize_filtration(); // reset
+
+ st.insert_simplex_and_subfaces({3, 0}, 3.0);
+ st.insert_simplex_and_subfaces({2, 1, 0}, 4.0);
+
+ // Constructs a copy at this state for further test purpose
+ typeST st_complete = st;
+ // st_complete and st:
+ // 1 6
+ // o---o
+ // /X\7/
+ // o---o---o---o
+ // 2 0 3\X/4
+ // o
+ // 5
+ // st_pruned:
+ // 1 6
+ // o---o
+ // \7/
+ // o o---o
+ // 0 3\X/4
+ // o
+ // 5
+
+ // Check the no action cases
+ // greater than initial filtration value
+ st.prune_above_filtration(10.0);
+ BOOST_CHECK(st == st_complete);
+ // equal to initial filtration value
+ st.prune_above_filtration(6.0);
+ BOOST_CHECK(st == st_complete);
+ // lower than initial filtration value, but still greater than the maximum filtration value
+ st_complete.set_filtration(5.0);
+ st.prune_above_filtration(5.0);
+ BOOST_CHECK(st == st_complete);
+
+ // Display the Simplex_tree
+ std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl;
+ std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ // Check the pruned cases
+ // Set the st_pruned filtration for operator==
+ st_pruned.set_filtration(2.5);
+ st.prune_above_filtration(2.5);
+ BOOST_CHECK(st == st_pruned);
+
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at 2.5 contains " << st.num_simplices() << " simplices" << std::endl;
+ std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ st_pruned.set_filtration(2.0);
+ st.prune_above_filtration(2.0);
+ BOOST_CHECK(st == st_pruned);
+
+ typeST st_empty;
+ // FIXME
+ st_empty.set_dimension(3);
+ st.prune_above_filtration(0.0);
+
+ // Display the Simplex_tree
+ std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices" << std::endl;
+ std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl;
+ std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl;
+ for (auto f_simplex : st.filtration_simplex_range()) {
+ std::cout << " " << "[" << st.filtration(f_simplex) << "] ";
+ for (auto vertex : st.simplex_vertex_range(f_simplex)) {
+ std::cout << (int) vertex << " ";
+ }
+ std::cout << std::endl;
+ }
+
+ BOOST_CHECK(st == st_empty);
+
+ // Test case to the limit
+ st.prune_above_filtration(-1.0);
+ st_empty.set_filtration(-1.0);
+ BOOST_CHECK(st == st_empty);
+}
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
index 615b3a81..1f495a7a 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h
@@ -31,7 +31,7 @@
#include <gudhi/Skeleton_blocker/Skeleton_blocker_simple_traits.h>
#include <gudhi/Skeleton_blocker/Skeleton_blocker_simple_geometric_traits.h>
-#include <gudhi/Utils.h> // xxx
+#include <gudhi/Debug_utils.h>
namespace Gudhi {
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h
index 50a83345..0dd5dd54 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h
@@ -25,7 +25,7 @@
#include <gudhi/Skeleton_blocker_complex.h>
#include <gudhi/Skeleton_blocker/Skeleton_blocker_simplex.h>
-#include <gudhi/Utils.h>
+#include <gudhi/Debug_utils.h>
#include <map>
#include <vector>
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h
index ce565166..b8d99201 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h
@@ -25,7 +25,7 @@
#include <gudhi/Skeleton_blocker_link_complex.h>
#include <gudhi/Skeleton_blocker/Skeleton_blocker_link_superior.h>
#include <gudhi/Skeleton_blocker/internal/Trie.h>
-#include <gudhi/Utils.h>
+#include <gudhi/Debug_utils.h>
#include <boost/iterator/iterator_facade.hpp>
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h
index 5b774f25..fd85f0c2 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h
@@ -31,7 +31,7 @@
#include <gudhi/Skeleton_blocker/Skeleton_blocker_complex_visitor.h>
#include <gudhi/Skeleton_blocker/internal/Top_faces.h>
#include <gudhi/Skeleton_blocker/internal/Trie.h>
-#include <gudhi/Utils.h>
+#include <gudhi/Debug_utils.h>
#include <boost/graph/adjacency_list.hpp>
#include <boost/graph/connected_components.hpp>
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h
index 5c898152..040f7950 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h
@@ -22,9 +22,9 @@
#ifndef SKELETON_BLOCKER_GEOMETRIC_COMPLEX_H_
#define SKELETON_BLOCKER_GEOMETRIC_COMPLEX_H_
-#include <gudhi/Utils.h>
#include <gudhi/Skeleton_blocker_complex.h>
#include <gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h>
+#include <gudhi/Debug_utils.h>
namespace Gudhi {
diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h
index 85a6b0b5..e3c16d88 100644
--- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h
+++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h
@@ -22,8 +22,8 @@
#ifndef SKELETON_BLOCKER_LINK_COMPLEX_H_
#define SKELETON_BLOCKER_LINK_COMPLEX_H_
-#include <gudhi/Utils.h>
#include <gudhi/Skeleton_blocker_complex.h>
+#include <gudhi/Debug_utils.h>
namespace Gudhi {
diff --git a/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp b/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp
index 42482e23..b8b1a0d1 100644
--- a/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp
+++ b/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp
@@ -24,7 +24,7 @@
#include <string>
#include <fstream>
#include <sstream>
-#include "gudhi/Utils.h"
+#include "gudhi/Debug_utils.h"
#include "gudhi/Test.h"
#include "gudhi/Skeleton_blocker.h"
//#include "gudhi/Skeleton_blocker_link_complex.h"
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index 41b8ba1e..3c42f72d 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -63,10 +63,12 @@
* CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms.
*
* The following examples require the <a target="_blank" href="http://www.cgal.org/">Computational Geometry Algorithms
- * Library</a> (CGAL) and will not be built if CGAL is not installed:
+ * Library</a> (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed:
* \li GudhUI
* \li Persistent_cohomology/alpha_shapes_persistence
* \li Simplex_tree/simplex_tree_from_alpha_shapes_3
+ * \li Alpha_complex/Alpha_complex_from_off
+ * \li Alpha_complex/Alpha_complex_from_points
*
* Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to
* your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt
new file mode 100644
index 00000000..d29e31e7
--- /dev/null
+++ b/src/common/example/CMakeLists.txt
@@ -0,0 +1,40 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIDelaunayTriangulationOffFileReadWrite)
+
+# need CGAL 4.6
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+
+ add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp )
+ target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off)
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+endif()
diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp
new file mode 100644
index 00000000..75e4fafb
--- /dev/null
+++ b/src/common/example/Delaunay_triangulation_off_rw.cpp
@@ -0,0 +1,54 @@
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <string>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K;
+typedef CGAL::Delaunay_triangulation<K> T;
+// The triangulation uses the default instantiation of the
+// TriangulationDataStructure template parameter
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " inputFile.off outputFile.off" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 3) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ std::string offInputFile(argv[1]);
+ // Read the OFF file (input file name given as parameter) and triangulates points
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader(offInputFile);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read file " << offInputFile << std::endl;
+ exit(-1); // ----- >>
+ }
+
+ // Retrieve the triangulation
+ T* triangulation = off_reader.get_complex();
+ // Operations on triangulation
+ std::cout << "Number of vertices= " << triangulation->number_of_vertices() << std::endl;
+ std::cout << "Number of finite full cells= " << triangulation->number_of_finite_full_cells() << std::endl;
+
+ std::string outFileName(argv[2]);
+ std::string offOutputFile(outFileName);
+ // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces
+ Gudhi::Delaunay_triangulation_off_writer<T> off_writer(offOutputFile, triangulation);
+
+ // Check the write operation was correct
+ if (!off_writer.is_valid()) {
+ std::cerr << "Unable to write file " << offOutputFile << std::endl;
+ exit(-1); // ----- >>
+ }
+
+ return 0;
+} \ No newline at end of file
diff --git a/src/common/example/dtoffrw_alphashapedoc_result.off b/src/common/example/dtoffrw_alphashapedoc_result.off
new file mode 100644
index 00000000..03b7ca75
--- /dev/null
+++ b/src/common/example/dtoffrw_alphashapedoc_result.off
@@ -0,0 +1,15 @@
+nOFF
+2 7 6 0
+1 1
+7 0
+4 6
+9 6
+0 14
+2 19
+9 17
+3 0 1 2
+3 3 2 1
+3 4 0 2
+3 4 2 6
+3 6 2 3
+3 5 4 6
diff --git a/src/common/example/dtoffrw_alphashapedoc_result.txt b/src/common/example/dtoffrw_alphashapedoc_result.txt
new file mode 100644
index 00000000..8e659740
--- /dev/null
+++ b/src/common/example/dtoffrw_alphashapedoc_result.txt
@@ -0,0 +1,2 @@
+Number of vertices= 7
+Number of finite full cells= 6
diff --git a/src/common/include/gudhi/Utils.h b/src/common/include/gudhi/Debug_utils.h
index 43916f11..48d61fef 100644
--- a/src/common/include/gudhi/Utils.h
+++ b/src/common/include/gudhi/Debug_utils.h
@@ -19,28 +19,37 @@
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#ifndef UTILS_H_
-#define UTILS_H_
+#ifndef DEBUG_UTILS_H_
+#define DEBUG_UTILS_H_
+#include <iostream>
+
+#ifndef NDEBUG
+ // GUDHI_DEBUG is the Gudhi official flag for debug mode.
+ #define GUDHI_DEBUG
+#endif
+
+// GUDHI_CHECK throw an exception on condition in debug mode, but does nothing in release mode
+// Could assert in release mode, but cmake sets NDEBUG (for "NO DEBUG") in this mode, means assert does nothing.
+#ifdef GUDHI_DEBUG
+ #define GUDHI_CHECK(cond, excpt) if (cond) throw excpt
+#else
+ #define GUDHI_CHECK(cond, excpt) (void) 0
+#endif
#define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl
// #define DBG_VERBOSE
#ifdef DBG_VERBOSE
-#define DBG(a) std::cerr << "DBG: " << (a) << std::endl
-#define DBGMSG(a, b) std::cerr << "DBG: " << a << b << std::endl
-#define DBGVALUE(a) std::cerr << "DBG: " << #a << ": " << a << std::endl
-#define DBGCONT(a) std::cerr << "DBG: container " << #a << " -> "; for (auto x : a) std::cerr << x << ","; std::cerr <<
-std::endl
+ #define DBG(a) std::cout << "DBG: " << (a) << std::endl
+ #define DBGMSG(a, b) std::cout << "DBG: " << a << b << std::endl
+ #define DBGVALUE(a) std::cout << "DBG: " << #a << ": " << a << std::endl
+ #define DBGCONT(a) std::cout << "DBG: container " << #a << " -> "; for (auto x : a) std::cout << x << ","; std::cout << std::endl
#else
-// #define DBG(a) a
-// #define DBGMSG(a,b) b
-// #define DBGVALUE(a) a
-// #define DBGCONT(a) a
-#define DBG(a)
-#define DBGMSG(a, b)
-#define DBGVALUE(a)
-#define DBGCONT(a)
+ #define DBG(a) (void) 0
+ #define DBGMSG(a, b) (void) 0
+ #define DBGVALUE(a) (void) 0
+ #define DBGCONT(a) (void) 0
#endif
-#endif // UTILS_H_
+#endif // DEBUG_UTILS_H_
diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h
new file mode 100644
index 00000000..b3f4a299
--- /dev/null
+++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h
@@ -0,0 +1,308 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+#ifndef DELAUNAY_TRIANGULATION_OFF_IO_H_
+#define DELAUNAY_TRIANGULATION_OFF_IO_H_
+
+#include <string>
+#include <vector>
+#include <fstream>
+#include <map>
+
+#include "gudhi/Off_reader.h"
+
+namespace Gudhi {
+
+/** \brief OFF file visitor implementation according to Off_reader in order to construct a CGAL Delaunay triangulation.
+ *
+ * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page
+ * http://doc.cgal.org/latest/Triangulation/
+ */
+template<typename Complex>
+class Delaunay_triangulation_off_visitor_reader {
+ private:
+ Complex* complex_;
+ typedef typename Complex::Point Point;
+
+ public:
+
+ // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO.
+
+ /** \brief Delaunay_triangulation_off_visitor_reader constructor
+ *
+ * @param[in] complex_ptr_ pointer on a Delaunay triangulation.
+ */
+ Delaunay_triangulation_off_visitor_reader(Complex* complex_ptr_)
+ : complex_(nullptr) { }
+
+ /** \brief Off_reader visitor init implementation.
+ *
+ * The init parameters are set from OFF file header.
+ * Dimension value is required in order to construct Delaunay triangulation.
+ *
+ * @param[in] dim space dimension of vertices.
+ * @param[in] num_vertices number of vertices in the OFF file (not used).
+ * @param[in] num_faces number of faces in the OFF file (not used).
+ * @param[in] num_edges number of edges in the OFF file (not used).
+ */
+ void init(int dim, int num_vertices, int num_faces, int num_edges) {
+#ifdef DEBUG_TRACES
+ std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" <<
+ num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl;
+#endif // DEBUG_TRACES
+ if (num_faces > 0) {
+ std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " <<
+ "file for Delaunay triangulation - faces are computed.\n";
+ }
+ if (num_edges > 0) {
+ std::cerr << "Delaunay_triangulation_off_visitor_reader::init edges are not taken into account from OFF " <<
+ "file for Delaunay triangulation - edges are computed.\n";
+ }
+ // Complex construction with dimension from file
+ complex_ = new Complex(dim);
+ }
+
+ /** \brief Off_reader visitor point implementation.
+ *
+ * The point function is called on each vertex line from OFF file.
+ * This function inserts the vertex in the Delaunay triangulation.
+ *
+ * @param[in] point vector of vertex coordinates.
+ */
+ void point(const std::vector<double>& point) {
+#ifdef DEBUG_TRACES
+ std::cout << "Delaunay_triangulation_off_visitor_reader::point ";
+ for (auto coordinate : point) {
+ std::cout << coordinate << " | ";
+ }
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+ complex_->insert(Point(point.size(), point.begin(), point.end()));
+ }
+
+ // Off_reader visitor maximal_face implementation - not used
+ void maximal_face(const std::vector<int>& face) {
+ // For Delaunay Triangulation, only points are read
+ }
+
+ // Off_reader visitor done implementation - not used
+ void done() {
+ // Nothing to be done on end of OFF file read
+ }
+
+ /** \brief Returns the constructed Delaunay triangulation.
+ *
+ * @return A pointer on the Delaunay triangulation.
+ *
+ * @warning The returned pointer can be nullptr.
+ */
+ Complex* get_complex() const {
+ return complex_;
+ }
+};
+
+/** \brief OFF file reader implementation in order to construct a Delaunay triangulation.
+ *
+ * This class is using the Delaunay_triangulation_off_visitor_reader to visit the OFF file according to Off_reader.
+ *
+ * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page
+ * http://doc.cgal.org/latest/Triangulation/
+ *
+ * \section Example
+ *
+ * This example loads points from an OFF file and builds the Delaunay triangulation.
+ * Then, it is asked to display the number of vertices and finites full cells from the Delaunay triangulation.
+ *
+ * \include Delaunay_triangulation_off_rw.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./dtoffrw ../../data/points/alphacomplexdoc triangulated.off
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include dtoffrw_alphashapedoc_result.txt
+ */
+template<typename Complex>
+class Delaunay_triangulation_off_reader {
+ public:
+
+ /** \brief Reads the OFF file and constructs the Delaunay triangulation from the points
+ * that are in the OFF file.
+ *
+ * @param[in] name_file OFF file to read.
+ *
+ * @warning Check with is_valid() function to see if read operation was successful.
+ */
+ Delaunay_triangulation_off_reader(const std::string & name_file)
+ : valid_(false) {
+ std::ifstream stream(name_file);
+ if (stream.is_open()) {
+ Delaunay_triangulation_off_visitor_reader<Complex> off_visitor(complex_);
+ Off_reader off_reader(stream);
+ valid_ = off_reader.read(off_visitor);
+ if (valid_) {
+ complex_ = off_visitor.get_complex();
+ if (complex_ == nullptr) {
+ std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader off_visitor returns " <<
+ "an empty pointer\n";
+ valid_ = false;
+ }
+ }
+ } else {
+ std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader could not open file " <<
+ name_file << "\n";
+ }
+
+ }
+
+ /** \brief Returns if the OFF file read operation was successful or not.
+ *
+ * @return OFF file read status.
+ */
+ bool is_valid() const {
+ return valid_;
+ }
+
+ /** \brief Returns the constructed Delaunay triangulation.
+ *
+ * @return A pointer on the Delaunay triangulation.
+ *
+ * @warning The returned pointer can be nullptr.
+ */
+ Complex* get_complex() const {
+ if (valid_)
+ return complex_;
+ return nullptr;
+
+ }
+
+ private:
+ /** \brief OFF file read status.*/
+ bool valid_;
+ /** \brief A pointer on the Delaunay triangulation.*/
+ Complex* complex_;
+};
+
+/** \brief OFF file writer from a Delaunay triangulation.
+ *
+ * This class constructs the OFF file header according to http://www.geomview.org/docs/html/OFF.html
+ *
+ * The header is followed by the list of points coordinates (Delaunay triangulation vertices)
+ *
+ * And finally is followed by the list of faces (Delaunay triangulation finite full cells)
+ *
+ * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page
+ * http://doc.cgal.org/latest/Triangulation/
+ *
+ * \section Example
+ *
+ * This example loads points from an OFF file and builds the Delaunay triangulation.
+ * Then, the Delaunay triangulation is saved in a new file including the triangulation as a list of faces.
+ *
+ * \include Delaunay_triangulation_off_rw.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off
+ * \endcode
+ *
+ * The result will be an OFF file of dimension 2 with the 7 points from alphashapedoc.off followed by the 6
+ * triangulations of dimension 3 (the first value on each faces):
+ * \include dtoffrw_alphashapedoc_result.off
+ */
+template<typename Complex>
+class Delaunay_triangulation_off_writer {
+ public:
+ typedef typename Complex::Point Point;
+
+ /** \brief Writes the OFF file from the Delaunay triangulation
+ *
+ * @param[in] name_file OFF file to write.
+ * @param[in] complex_ptr pointer on a Delaunay triangulation.
+ *
+ * @warning Check with is_valid() function to see if write operation was successful.
+ */
+ Delaunay_triangulation_off_writer(const std::string & name_file, Complex* complex_ptr)
+ : valid_(false) {
+ std::ofstream stream(name_file);
+ if (stream.is_open()) {
+ if (complex_ptr->current_dimension() == 3) {
+ // OFF header
+ stream << "OFF" << std::endl;
+ // no endl on next line - don't know why...
+ stream << complex_ptr->number_of_vertices() << " " << complex_ptr->number_of_finite_full_cells() << " 0";
+ } else {
+ // nOFF header
+ stream << "nOFF" << std::endl;
+ // no endl on next line - don't know why...
+ stream << complex_ptr->current_dimension() << " " << complex_ptr->number_of_vertices() << " " <<
+ complex_ptr->number_of_finite_full_cells() << " 0";
+ }
+
+ // bimap to retrieve vertex handles from points and vice versa
+ std::map< Point, int > points_to_vh;
+ // Start to insert at default handle value
+ int vertex_handle = int();
+
+ // Points list
+ for (auto vit = complex_ptr->vertices_begin(); vit != complex_ptr->vertices_end(); ++vit) {
+ for (auto Coord = vit->point().cartesian_begin(); Coord != vit->point().cartesian_end(); ++Coord) {
+ stream << *Coord << " ";
+ }
+ stream << std::endl;
+ points_to_vh[vit->point()] = vertex_handle;
+ vertex_handle++;
+ }
+
+ for (auto cit = complex_ptr->finite_full_cells_begin(); cit != complex_ptr->finite_full_cells_end(); ++cit) {
+ std::vector<int> vertexVector;
+ stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " ";
+ for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
+ stream << points_to_vh[(*vit)->point()] - 1 << " ";
+ }
+ stream << std::endl;
+ }
+ stream.close();
+ valid_ = true;
+ } else {
+ std::cerr << "Delaunay_triangulation_off_writer::Delaunay_triangulation_off_writer could not open file " <<
+ name_file << "\n";
+ }
+ }
+
+ /** \brief Returns if the OFF write operation was successful or not.
+ *
+ * @return OFF file write status.
+ */
+ bool is_valid() const {
+ return valid_;
+ }
+
+ private:
+ /* \brief OFF file write status. */
+ bool valid_;
+};
+
+} // namespace Gudhi
+
+#endif // DELAUNAY_TRIANGULATION_OFF_IO_H_
diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h
index 81b9e634..e45a7600 100644
--- a/src/common/include/gudhi/Off_reader.h
+++ b/src/common/include/gudhi/Off_reader.h
@@ -34,37 +34,31 @@
namespace Gudhi {
-/**
- * Read an off file and calls a visitor methods while reading it.
- * An off file must have its first/snd line in this format :
- * OFF
- * num_vert num_faces num_edges
- *
- * A noff file must have its first/snd line in this format :
- * nOFF
- * dim num_vert num_faces num_edges
- *
- * The number of edges num_edges is optional and can be left to zero.
+/** \brief OFF file reader top class visitor.
+ *
+ * OFF file must be conform to format described here :
+ * http://www.geomview.org/docs/html/OFF.html
*/
class Off_reader {
public:
Off_reader(std::ifstream& stream) : stream_(stream) { }
- // Off_reader(const std::string& name):stream_(name){
- // if(!stream_.is_open())
- // std::cerr <<"could not open file \n";
- // }
~Off_reader() {
stream_.close();
}
- /**
- * read an off file and calls the following methods :
- * void init(int dim,int num_vertices,int num_faces,int num_edges); //num_edges may not be set
- * void point(const std::vector<double>& point);
- * void maximal_face(const std::list<int>& face);
- * void done();
- * of the visitor when reading a point or a maximal face.
+ /** \brief
+ * Read an OFF file and calls the following methods :
+ *
+ * <CODE>void init(int dim,int num_vertices,int num_faces,int num_edges); // from file header - num_edges may not be set
+ *
+ * void point(const std::vector<double>& point); // for each point read
+ *
+ * void maximal_face(const std::list<int>& face); // for each face read
+ *
+ * void done(); // upon file read is finished</CODE>
+ *
+ * of the visitor when reading a point or a maximal face. Edges are not taken into account.
*/
template<typename OffVisitor>
bool read(OffVisitor& off_visitor) {
@@ -118,7 +112,7 @@ class Off_reader {
if (!goto_next_uncomment_line(line)) return false;
std::istringstream iss(line);
- if (is_off_file) {
+ if ((is_off_file) && (!is_noff_file)) {
off_info_.dim = 3;
if (!(iss >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)) {
std::cerr << "incorrect number of vertices/faces/edges\n";
@@ -126,8 +120,8 @@ class Off_reader {
}
} else {
if (!(iss >> off_info_.dim >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)) {
- std::cerr << "incorrect number of vertices/faces/edges\n";
- return false;
+ std::cerr << "incorrect number of vertices/faces/edges\n";
+ return false;
}
}
off_visitor.init(off_info_.dim, off_info_.num_vertices, off_info_.num_faces, off_info_.num_edges);
@@ -138,7 +132,7 @@ class Off_reader {
bool goto_next_uncomment_line(std::string& uncomment_line) {
uncomment_line.clear();
do
- std::getline(stream_, uncomment_line); while (uncomment_line[0] == '%'); // || uncomment_line.empty());
+ std::getline(stream_, uncomment_line); while (uncomment_line[0] == '%');
return (uncomment_line.size() > 0 && uncomment_line[0] != '%');
}
@@ -166,7 +160,7 @@ class Off_reader {
iss >> num_face_vertices;
std::vector<int> face;
face.assign(std::istream_iterator<int>(iss), std::istream_iterator<int>());
- if (face.size() != off_info_.dim) return false;
+ //if (face.size() != (off_info_.dim + 1)) return false;
visitor.maximal_face(face);
}
return true;
diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h
index e5c79ded..cd518581 100644
--- a/src/common/include/gudhi/distance_functions.h
+++ b/src/common/include/gudhi/distance_functions.h
@@ -23,6 +23,8 @@
#ifndef DISTANCE_FUNCTIONS_H_
#define DISTANCE_FUNCTIONS_H_
+#include <cmath> // for std::sqrt
+
/* Compute the Euclidean distance between two Points given
* by a range of coordinates. The points are assumed to have
* the same dimension. */
@@ -35,7 +37,7 @@ double euclidean_distance(Point &p1, Point &p2) {
double tmp = *it1 - *it2;
dist += tmp*tmp;
}
- return sqrt(dist);
+ return std::sqrt(dist);
}
#endif // DISTANCE_FUNCTIONS_H_
diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h
index e05714c7..da2c2c36 100644
--- a/src/common/include/gudhi/reader_utils.h
+++ b/src/common/include/gudhi/reader_utils.h
@@ -58,7 +58,9 @@ inline void read_points(std::string file_name, std::vector< std::vector< double
while (iss >> x) {
point.push_back(x);
}
- points.push_back(point);
+ // Check for empty lines
+ if (!point.empty())
+ points.push_back(point);
}
in_file.close();
}
diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt
new file mode 100644
index 00000000..a395338e
--- /dev/null
+++ b/src/common/test/CMakeLists.txt
@@ -0,0 +1,46 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIDelaunayTriangulationOffFileReadWriteUT)
+
+if(NOT MSVC)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage")
+ set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} --coverage")
+ set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} --coverage")
+endif()
+
+# need CGAL 4.6
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+
+ add_executable ( dtoffrw_UT dtoffrw_unit_test.cpp )
+ target_link_libraries(dtoffrw_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ # Do not forget to copy test files in current binary dir
+ file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+ # Unitary tests
+ add_test(dtoffrw_UT ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_UT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/dtoffrw_UT.xml --log_level=test_suite --report_level=no)
+
+ if (DIFF_PATH)
+ add_test(dtoffrw_diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off)
+ endif()
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+endif()
+
diff --git a/src/common/test/README b/src/common/test/README
new file mode 100644
index 00000000..f2a7eb5a
--- /dev/null
+++ b/src/common/test/README
@@ -0,0 +1,14 @@
+To compile:
+***********
+
+cmake .
+make
+
+To launch with details:
+***********************
+
+./dtoffrw_UT --report_level=detailed --log_level=all
+
+ ==> echo $? returns 0 in case of success (non-zero otherwise)
+
+
diff --git a/src/common/test/dtoffrw_alphashapedoc_result.off b/src/common/test/dtoffrw_alphashapedoc_result.off
new file mode 100644
index 00000000..03b7ca75
--- /dev/null
+++ b/src/common/test/dtoffrw_alphashapedoc_result.off
@@ -0,0 +1,15 @@
+nOFF
+2 7 6 0
+1 1
+7 0
+4 6
+9 6
+0 14
+2 19
+9 17
+3 0 1 2
+3 3 2 1
+3 4 0 2
+3 4 2 6
+3 6 2 3
+3 5 4 6
diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp
new file mode 100644
index 00000000..f682df1a
--- /dev/null
+++ b/src/common/test/dtoffrw_unit_test.cpp
@@ -0,0 +1,90 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+// to construct a Delaunay_triangulation from a OFF file
+#include "gudhi/Delaunay_triangulation_off_io.h"
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <stdlib.h>
+
+#include <iostream>
+#include <string>
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "delaunay_triangulation_off_read_write"
+#include <boost/test/unit_test.hpp>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K;
+typedef CGAL::Delaunay_triangulation<K> T;
+
+BOOST_AUTO_TEST_CASE( Delaunay_triangulation_doc_test )
+{
+ // Read the OFF file (input file name given as parameter) and triangulates points
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader("alphacomplexdoc.off");
+ // Check the read operation was correct
+ BOOST_CHECK(off_reader.is_valid());
+
+ // Retrieve the triangulation
+ T* triangulation = off_reader.get_complex();
+ BOOST_CHECK(triangulation != nullptr);
+ // Operations on triangulation
+ BOOST_CHECK(triangulation->number_of_vertices() == 7);
+ BOOST_CHECK(triangulation->number_of_finite_full_cells() == 6);
+
+ // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces
+ Gudhi::Delaunay_triangulation_off_writer<T> off_writer("UT.off", triangulation);
+
+ // Check the write operation was correct
+ BOOST_CHECK(off_writer.is_valid());
+
+ delete triangulation;
+}
+
+BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test )
+{
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader("some_impossible_weird_file_name.off");
+ // Check the read operation was correct
+ BOOST_CHECK(!off_reader.is_valid());
+ T* triangulation = off_reader.get_complex();
+ BOOST_CHECK(triangulation == nullptr);
+}
+
+BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_write_test )
+{
+ // Read the OFF file (input file name given as parameter) and triangulates points
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader("alphacomplexdoc.off");
+
+ // Retrieve the triangulation
+ T* triangulation = off_reader.get_complex();
+
+ // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces
+ Gudhi::Delaunay_triangulation_off_writer<T> off_writer("/some_impossible_weird_directory_name/another_weird_directory_name/some_impossible_weird_file_name.off", triangulation);
+
+ // Check the write operation was correct
+ BOOST_CHECK(!off_writer.is_valid());
+
+ delete triangulation;
+}
+