summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CMakeLists.txt10
-rw-r--r--biblio/bibliography.bib16
-rwxr-xr-xdata/points/alphacomplexdoc.off10
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h119
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.ipe438
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.pngbin0 -> 46746 bytes
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc_135.ipe514
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc_135.pngbin0 -> 127130 bytes
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe482
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.pngbin0 -> 62759 bytes
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_off.cpp48
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_points.cpp55
-rw-r--r--src/Alpha_complex/example/CMakeLists.txt46
-rw-r--r--src/Alpha_complex/example/alphaoffreader_for_doc.txt27
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h364
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp213
-rw-r--r--src/Alpha_complex/test/CMakeLists.txt39
-rw-r--r--src/Alpha_complex/test/README12
-rw-r--r--src/CMakeLists.txt2
-rw-r--r--src/Doxyfile5
-rw-r--r--src/GudhUI/CMakeLists.txt40
-rw-r--r--src/GudhUI/alpha_complex_persistence.cpp78
-rw-r--r--src/GudhUI/model/Model.h2
-rw-r--r--src/GudhUI/utils/Bar_code_persistence.h84
-rw-r--r--src/GudhUI/view/FirstCoordProjector.h5
-rw-r--r--src/Persistent_cohomology/example/CMakeLists.txt44
-rw-r--r--src/Persistent_cohomology/example/alpha_complex_persistence.cpp55
-rw-r--r--src/Persistent_cohomology/example/alpha_shapes_persistence.cpp14
-rw-r--r--src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h8
-rw-r--r--src/Simplex_tree/include/gudhi/Simplex_tree.h14
-rw-r--r--src/Simplex_tree/test/simplex_tree_unit_test.cpp13
-rw-r--r--src/common/doc/main_page.h2
-rw-r--r--src/common/example/CMakeLists.txt40
-rw-r--r--src/common/example/Delaunay_triangulation_off_rw.cpp54
-rw-r--r--src/common/example/dtoffrw_alphashapedoc_result.txt3
-rw-r--r--src/common/include/gudhi/Delaunay_triangulation_off_io.h308
-rw-r--r--src/common/include/gudhi/Off_reader.h48
-rw-r--r--src/common/test/CMakeLists.txt46
-rw-r--r--src/common/test/README14
-rw-r--r--src/common/test/dtoffrw_alphashapedoc_result.off15
-rw-r--r--src/common/test/dtoffrw_unit_test.cpp90
41 files changed, 3323 insertions, 54 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ae089562..460196d7 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -45,6 +45,11 @@ if (GPROF_PATH)
message("gprof found in ${GPROF_PATH}")
endif()
+FIND_PROGRAM( DIFF_PATH diff )
+if (DIFF_PATH)
+ message("diff found in ${DIFF_PATH}")
+endif()
+
if(NOT Boost_FOUND)
message(FATAL_ERROR "NOTICE: This demo requires Boost and will not be compiled.")
@@ -61,6 +66,7 @@ else()
message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS})
include_directories(src/common/include/)
+ include_directories(src/Alpha_complex/include/)
include_directories(src/Bottleneck/include/)
include_directories(src/Contraction/include/)
include_directories(src/Hasse_complex/include/)
@@ -68,6 +74,8 @@ else()
include_directories(src/Simplex_tree/include/)
include_directories(src/Skeleton_blocker/include/)
+ add_subdirectory(src/common/example)
+ add_subdirectory(src/common/test)
add_subdirectory(src/Simplex_tree/test)
add_subdirectory(src/Simplex_tree/example)
add_subdirectory(src/Persistent_cohomology/test)
@@ -75,6 +83,8 @@ else()
add_subdirectory(src/Skeleton_blocker/test)
add_subdirectory(src/Skeleton_blocker/example)
add_subdirectory(src/Contraction/example)
+ add_subdirectory(src/Alpha_complex/example)
+ add_subdirectory(src/Alpha_complex/test)
# data points generator
add_subdirectory(data/points/generator)
diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib
index 3fd1c10a..859696b4 100644
--- a/biblio/bibliography.bib
+++ b/biblio/bibliography.bib
@@ -897,6 +897,22 @@ language={English}
bibsource = {DBLP, http://dblp.uni-trier.de}
}
+@ARTICLE{AlphaShapesDefinition,
+ author = {N. Akkiraju, H. Edelsbrunner, M. Facello, P. Fu, E. P. Mucke, and C. Varela},
+ title = {\href{http://pub.ist.ac.at/~edels/Papers/1995-P-06-AlphaShapesSoftware.pdf}{Alpha shapes: definition and software}},
+ journal = {Proc. Internat. Comput. Geom. Software Workshop 1995},
+ year = {1995},
+ bibsource = {http://pub.ist.ac.at}
+}
+
+@ARTICLE{AlphaShapesIntroduction,
+ author = {Kaspar Fischer},
+ title = {\href{http://www.cs.uu.nl/docs/vakken/ddm/texts/Delaunay/alphashapes.pdf}{Introduction to Alpha Shapes}},
+ journal = {Unknown},
+ year = {Unknown},
+ bibsource = {http://www.cs.uu.nl}
+}
+
misc{buddha_stanford_scan,
author = "",
title = "The Stanford 3D Scanning Repository",
diff --git a/data/points/alphacomplexdoc.off b/data/points/alphacomplexdoc.off
new file mode 100755
index 00000000..bb790193
--- /dev/null
+++ b/data/points/alphacomplexdoc.off
@@ -0,0 +1,10 @@
+nOFF
+2 7 0 0
+1.0 1.0
+7.0 0.0
+4.0 6.0
+9.0 6.0
+0.0 14.0
+2.0 19.0
+9.0 17.0
+
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
new file mode 100644
index 00000000..2cb37578
--- /dev/null
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -0,0 +1,119 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+// needs namespace for Doxygen to link on classes
+namespace Gudhi {
+// needs namespace for Doxygen to link on classes
+namespace alphacomplex {
+
+/** \defgroup alpha_complex Alpha complex
+ *
+ * \author Vincent Rouvreau
+ *
+ * @{
+ *
+ * \section definition Definition
+ *
+ * Alpha_complex is a Simplex_tree constructed from each finite cell of a Delaunay Triangulation.
+ *
+ * The filtration value of each simplex is computed from the alpha square value of the simplex if it is Gabriel or
+ * from the alpha value of the simplex coface that makes the simplex not Gabriel.
+ *
+ * Please refer to \cite AlphaShapesDefinition for a more complete alpha complex definition.
+ *
+ * Alpha complex are interesting because it looks like an \ref alpha-shape "Alpha shape" as described in
+ * \cite AlphaShapesIntroduction (an alpha complex concept vulgarization).
+ *
+ * \section example Example
+ *
+ * This example loads points from an OFF file, builds the Delaunay triangulation from the points, and finally
+ * initialize the alpha complex with it.
+ *
+ * Then, it is asked to display information about the alpha complex.
+ *
+ * \include Alpha_complex_from_off.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./alphaoffreader ../../data/points/alphacomplexdoc.off 60.0
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include alphaoffreader_for_doc.txt
+ *
+ * \section algorithm Algorithm
+ *
+ * <b>Data structure</b>
+ *
+ * In order to build the alpha complex, first, a Simplex tree is build from the cells of a Delaunay Triangulation.
+ * (The filtration value is set to NaN, which stands for unknown value):
+ * \image html "alpha_complex_doc.png" "Simplex tree structure construction example"
+ *
+ * <b>Filtration value computation algorithm</b>
+ *
+ * \f{algorithm}{
+ * \caption{Filtration value computation algorithm}\label{alpha}
+ * \begin{algorithmic}
+ * \For{i : dimension $\rightarrow$ 1}
+ * \ForAll{$\sigma$ of dimension i}
+ * \If {filtration($\sigma$) is NaN}
+ * \State filtration($\sigma$) = $\alpha^2(\sigma)$
+ * \EndIf
+ * \ForAll{$\tau$ face of $\sigma$} \Comment{propagate alpha filtration value}
+ * \If {filtration($\tau$) is not NaN}
+ * \State filtration($\tau$) = min (filtration($\tau$), filtration($\sigma$))
+ * \Else
+ * \If {$\tau$ is not Gabriel for $\sigma$}
+ * \State filtration($\tau$) = filtration($\sigma$)
+ * \EndIf
+ * \EndIf
+ * \EndFor
+ * \EndFor
+ * \EndFor
+ * \end{algorithmic}
+ * \f}
+ *
+ * From the example above, it means the algorithm will look into each triangle ([1,2,3], [2,3,4], [1,3,5], ...),
+ * will compute the filtration value of the triangle, and then will propagate the filtration value as described
+ * here :
+ * \image html "alpha_complex_doc_135.png" "Filtration value propagation example"
+ * Then, the algorithm will look into each edge ([1,2], [2,3], [1,3], ...),
+ * will compute the filtration value of the edge (in this case, propagation will have no effect).
+ *
+ * Finally, the algorithm will look into each vertex ([1], [2], [3], [4], [5], [6] and [7]),
+ * will set the filtration value (0 in case of a vertex - propagation will have no effect).
+ *
+ * \section alpha-shape Alpha shape
+ *
+ * In the example above, the alpha shape of \f$\alpha^2_{63} < \alpha^2 < \alpha^2_{62}\f$ is the alpha complex where the
+ * \f$\alpha^2_{63} <\f$ filtration value \f$< \alpha^2_{62}\f$ as described in \cite AlphaShapesIntroduction
+ *
+ * \image html "alpha_complex_doc_alpha_shape.png" "Alpha shape example"
+ * \copyright GNU General Public License v3.
+ * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim
+ */
+/** @} */ // end defgroup alpha_complex
+
+} // namespace alphacomplex
+
+} // namespace Gudhi
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe
new file mode 100644
index 00000000..b5601143
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe
@@ -0,0 +1,438 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70005" creator="Ipe 7.1.4">
+<info created="D:20150603143945" modified="D:20150831170130"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="small" value="\small"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textsize name="tiny" value="\tiny"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<use layer="alpha" matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -239.3 -10.1537" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 -260 0" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="118.196" height="8.307" depth="2.32" valign="baseline" size="large">Delaunay triangulation</text>
+<text matrix="1 0 0 1 -242.155 -3.50128" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -239.3 -10.1537" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -240 0" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -240 0" stroke="darkcyan">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="360 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="360 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="360 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="370 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="380 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="380 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -10 -40" transformations="translations" pos="400 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="390 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="400 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="410 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -10 -40" transformations="translations" pos="430 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="370 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="380 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="430 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -20 -40" transformations="translations" pos="460 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="430 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 -50 -40" transformations="translations" pos="450 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 -40 -40" transformations="translations" pos="460 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 -30 -40" transformations="translations" pos="520 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="300 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 100 -40" transformations="translations" pos="350 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="350 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="350 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 90 -40" transformations="translations" pos="380 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<path matrix="1 0 0 1 90 -40" stroke="black">
+4 0 0 4 320 704 e
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black">
+322.919 706.788 m
+317.189 701.058 l
+317.189 701.203 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black">
+317.551 706.934 m
+322.629 701.058 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 120 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 160 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 210 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+220 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 680 m
+230 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+220 660 m
+220 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+230 660 m
+230 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+260 680 m
+260 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+260 660 m
+260 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+300 680 m
+300 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+300 680 m
+290 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+290 660 m
+290 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+300 660 m
+300 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+330 680 m
+330 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+350 680 m
+350 670 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+350 660 m
+350 650 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+240 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+270 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+310 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+330 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+350 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+380 690 l
+</path>
+<path matrix="1 0 0 1 90 -40" stroke="black" arrow="normal/tiny">
+320 700 m
+400 690 l
+</path>
+<path matrix="1 0 0 1 50 0" stroke="black">
+240 620 m
+220 600 l
+</path>
+<path matrix="1 0 0 1 50 0" stroke="black">
+240 620 m
+220 640 l
+</path>
+<text transformations="translations" pos="180 620" stroke="black" type="label" width="97.274" height="6.926" depth="1.93" valign="baseline">Simplex tree structure</text>
+<path stroke="black">
+280 630 m
+170 630 l
+</path>
+<path stroke="black">
+280 610 m
+170 610 l
+</path>
+</page>
+</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png
new file mode 100644
index 00000000..601ac051
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc.png
Binary files differ
diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.ipe b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe
new file mode 100644
index 00000000..28b893b8
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe
@@ -0,0 +1,514 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70005" creator="Ipe 7.1.4">
+<info created="D:20150603143945" modified="D:20150901093044"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="small" value="\small"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textsize name="tiny" value="\tiny"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<use layer="alpha" matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0.700256 69.8463" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="darkcyan" pen="heavier">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="54.628" height="8.965" depth="2.99" valign="baseline" size="large">Cell [4,2,0]</text>
+<text matrix="1 0 0 1 -2.15463 76.4987" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 0.700256 69.8463" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 0 80" stroke="darkcyan">
+77.2727 0 0 77.2727 243.636 591.818 e
+</path>
+<use matrix="1 0 0 1 0 80" name="mark/fdisk(sfx)" pos="243.636 591.818" size="normal" stroke="darkcyan" fill="white"/>
+<path matrix="1 0 0 1 0 80" stroke="darkcyan">
+243.428 591.569 m
+186.061 643.28 l
+</path>
+<text matrix="1 0 0 1 0 80" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="18.785" height="4.294" depth="1.49" valign="baseline">$\alpha_{420}$</text>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -263.3 -172.154" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="0.582962 0 0 1 -211.265 -209.555" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="231.798" height="8.965" depth="2.99" valign="baseline" size="large">[2,0] is Gabriel $\rightarrow$ $\alpha_{20}$ is not$\\$
+modified (NaN)
+</text>
+<text matrix="1 0 0 1 -266.155 -165.501" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -264 -172" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -263.3 -172.154" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -264 -162" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 -166.834 -240.52" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="14.814" height="4.294" depth="1.49" valign="baseline">$\alpha_{20}$</text>
+<path matrix="1 0 0 1 -264 -162" stroke="darkcyan" pen="heavier">
+290 530 m
+320 580 l
+</path>
+<path matrix="1 0 0 1 -264 -162" stroke="darkcyan">
+29.1548 0 0 29.1548 305 555 e
+</path>
+<use matrix="1 0 0 1 -264 -162" name="mark/fdisk(sfx)" pos="305 555" size="normal" stroke="darkcyan" fill="white"/>
+<path matrix="1 0 0 1 -264 -162" stroke="darkcyan">
+304.883 555.015 m
+334.509 555.015 l
+</path>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -37.2997 -174.154" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="darkred" fill="white"/>
+<use matrix="1 0 0 1 -38 -164" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 -37.2997 -163.65" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 -199.21 -189.117" transformations="translations" pos="380 530" stroke="darkred" type="label" width="168.308" height="8.965" depth="2.99" valign="baseline" size="large">[0,4] is not Gabriel $\rightarrow$ $\alpha_{40} = \alpha_{420}$</text>
+<text matrix="1 0 0 1 -40.1546 -167.501" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="310.693 578.759" stroke="darkred" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -37.2997 -174.154" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -38 -164" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 52.4654 -193.97" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="14.814" height="4.294" depth="1.49" valign="baseline">$\alpha_{40}$</text>
+<path matrix="1 0 0 1 -38 -164" stroke="darkcyan" pen="heavier">
+290 530 m
+280 660 l
+</path>
+<path matrix="1 0 0 1 -38 -164" stroke="darkred" pen="heavier">
+65.192 0 0 65.192 285 595 e
+</path>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126.7 -172.154" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 126 -162" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+350 520 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+280 660 l
+290 530 l
+320 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+370 580 l
+350 520 l
+320 580 l
+</path>
+<text matrix="1 0 0 1 123.845 -165.501" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 126.7 -172.154" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 126 -162" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+280 660 m
+300 710 l
+370 690 l
+280 660 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+320 580 m
+370 690 l
+370 580 l
+320 580 l
+</path>
+<path matrix="1 0 0 1 126 -162" stroke="lightgray">
+280 660 m
+370 690 l
+320 580 l
+280 660 l
+</path>
+<text matrix="1 0 0 1 225.859 -165.729" transformations="translations" pos="212.724 627.389" stroke="darkcyan" type="label" width="14.814" height="4.294" depth="1.49" valign="baseline">$\alpha_{42}$</text>
+<text matrix="1 0 0 1 122 -164" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<path stroke="darkcyan" pen="heavier">
+406.093 497.775 m
+446.094 418.092 l
+</path>
+<path stroke="darkcyan">
+44.5799 0 0 44.5799 425.934 457.774 e
+</path>
+<use name="mark/fdisk(sfx)" pos="425.934 457.774" size="normal" stroke="darkcyan" fill="white"/>
+<path stroke="darkcyan">
+425.854 457.774 m
+470.795 457.774 l
+</path>
+<text matrix="1 0 0 1 -48.9756 -209.799" transformations="translations" pos="380 530" stroke="darkcyan" type="label" width="231.798" height="8.965" depth="2.99" valign="baseline" size="large">[2,4] is Gabriel $\rightarrow$ $\alpha_{42}$ is not modified (NaN)
+</text>
+<path stroke="darkblue" arrow="normal/normal">
+205.028 596.091 m
+110.946 544.02 l
+</path>
+<path stroke="darkblue" arrow="normal/normal">
+280.768 588.99 m
+280.768 547.57 l
+</path>
+<path stroke="darkblue" arrow="normal/normal">
+341.123 594.316 m
+413.904 554.079 l
+</path>
+<text matrix="1 0 0 1 39.645 -2.36686" transformations="translations" pos="199.703 569.464" stroke="darkblue" type="label" width="93.206" height="7.473" depth="2.49" valign="baseline">For all faces of [4,2,0]</text>
+<text matrix="1 0 0 1 -93.391 2.68003" transformations="translations" pos="104.437 300.174" stroke="black" type="label" width="208.621" height="6.926" depth="1.93" valign="baseline">N.B. : is Gabriel on a single point has no sense.</text>
+<text matrix="1 0 0 1 -36.9231 10" transformations="translations" pos="48 784" stroke="black" type="label" width="118.324" height="7.473" depth="2.49" valign="baseline">Dimension =2 - $\sigma$ = [4,2,0]</text>
+<use name="mark/fdisk(sfx)" pos="247 431" size="normal" stroke="darkcyan" fill="white"/>
+<path stroke="darkcyan">
+247.333 430.892 m
+311.764 430.892 l
+</path>
+</page>
+</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.png b/src/Alpha_complex/doc/alpha_complex_doc_135.png
new file mode 100644
index 00000000..5dce5edd
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc_135.png
Binary files differ
diff --git a/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe b/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe
new file mode 100644
index 00000000..7563cf3d
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe
@@ -0,0 +1,482 @@
+<?xml version="1.0"?>
+<!DOCTYPE ipe SYSTEM "ipe.dtd">
+<ipe version="70005" creator="Ipe 7.1.4">
+<info created="D:20150603143945" modified="D:20150831165646"/>
+<ipestyle name="basic">
+<symbol name="arrow/arc(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/farc(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="mark/circle(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</symbol>
+<symbol name="mark/disk(sx)" transformations="translations">
+<path fill="sym-stroke">
+0.6 0 0 0.6 0 0 e
+</path>
+</symbol>
+<symbol name="mark/fdisk(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+0.5 0 0 0.5 0 0 e
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+0.6 0 0 0.6 0 0 e
+0.4 0 0 0.4 0 0 e
+</path>
+</group>
+</symbol>
+<symbol name="mark/box(sx)" transformations="translations">
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</symbol>
+<symbol name="mark/square(sx)" transformations="translations">
+<path fill="sym-stroke">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+</path>
+</symbol>
+<symbol name="mark/fsquare(sfx)" transformations="translations">
+<group>
+<path fill="sym-fill">
+-0.5 -0.5 m
+0.5 -0.5 l
+0.5 0.5 l
+-0.5 0.5 l
+h
+</path>
+<path fill="sym-stroke" fillrule="eofill">
+-0.6 -0.6 m
+0.6 -0.6 l
+0.6 0.6 l
+-0.6 0.6 l
+h
+-0.4 -0.4 m
+0.4 -0.4 l
+0.4 0.4 l
+-0.4 0.4 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="mark/cross(sx)" transformations="translations">
+<group>
+<path fill="sym-stroke">
+-0.43 -0.57 m
+0.57 0.43 l
+0.43 0.57 l
+-0.57 -0.43 l
+h
+</path>
+<path fill="sym-stroke">
+-0.43 0.57 m
+0.57 -0.43 l
+0.43 -0.57 l
+-0.57 0.43 l
+h
+</path>
+</group>
+</symbol>
+<symbol name="arrow/fnormal(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/pointed(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/fpointed(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-0.8 0 l
+-1 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/linear(spx)">
+<path stroke="sym-stroke" pen="sym-pen">
+-1 0.333 m
+0 0 l
+-1 -0.333 l
+</path>
+</symbol>
+<symbol name="arrow/fdouble(spx)">
+<path stroke="sym-stroke" fill="white" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<symbol name="arrow/double(spx)">
+<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
+0 0 m
+-1 0.333 l
+-1 -0.333 l
+h
+-1 0 m
+-2 0.333 l
+-2 -0.333 l
+h
+</path>
+</symbol>
+<pen name="heavier" value="0.8"/>
+<pen name="fat" value="1.2"/>
+<pen name="ultrafat" value="2"/>
+<symbolsize name="large" value="5"/>
+<symbolsize name="small" value="2"/>
+<symbolsize name="tiny" value="1.1"/>
+<arrowsize name="large" value="10"/>
+<arrowsize name="small" value="5"/>
+<arrowsize name="tiny" value="3"/>
+<color name="red" value="1 0 0"/>
+<color name="green" value="0 1 0"/>
+<color name="blue" value="0 0 1"/>
+<color name="yellow" value="1 1 0"/>
+<color name="orange" value="1 0.647 0"/>
+<color name="gold" value="1 0.843 0"/>
+<color name="purple" value="0.627 0.125 0.941"/>
+<color name="gray" value="0.745"/>
+<color name="brown" value="0.647 0.165 0.165"/>
+<color name="navy" value="0 0 0.502"/>
+<color name="pink" value="1 0.753 0.796"/>
+<color name="seagreen" value="0.18 0.545 0.341"/>
+<color name="turquoise" value="0.251 0.878 0.816"/>
+<color name="violet" value="0.933 0.51 0.933"/>
+<color name="darkblue" value="0 0 0.545"/>
+<color name="darkcyan" value="0 0.545 0.545"/>
+<color name="darkgray" value="0.663"/>
+<color name="darkgreen" value="0 0.392 0"/>
+<color name="darkmagenta" value="0.545 0 0.545"/>
+<color name="darkorange" value="1 0.549 0"/>
+<color name="darkred" value="0.545 0 0"/>
+<color name="lightblue" value="0.678 0.847 0.902"/>
+<color name="lightcyan" value="0.878 1 1"/>
+<color name="lightgray" value="0.827"/>
+<color name="lightgreen" value="0.565 0.933 0.565"/>
+<color name="lightyellow" value="1 1 0.878"/>
+<dashstyle name="dashed" value="[4] 0"/>
+<dashstyle name="dotted" value="[1 3] 0"/>
+<dashstyle name="dash dotted" value="[4 2 1 2] 0"/>
+<dashstyle name="dash dot dotted" value="[4 2 1 2 1 2] 0"/>
+<textsize name="large" value="\large"/>
+<textsize name="Large" value="\Large"/>
+<textsize name="LARGE" value="\LARGE"/>
+<textsize name="huge" value="\huge"/>
+<textsize name="Huge" value="\Huge"/>
+<textsize name="small" value="\small"/>
+<textsize name="footnote" value="\footnotesize"/>
+<textsize name="tiny" value="\tiny"/>
+<textstyle name="center" begin="\begin{center}" end="\end{center}"/>
+<textstyle name="itemize" begin="\begin{itemize}" end="\end{itemize}"/>
+<textstyle name="item" begin="\begin{itemize}\item{}" end="\end{itemize}"/>
+<gridsize name="4 pts" value="4"/>
+<gridsize name="8 pts (~3 mm)" value="8"/>
+<gridsize name="16 pts (~6 mm)" value="16"/>
+<gridsize name="32 pts (~12 mm)" value="32"/>
+<gridsize name="10 pts (~3.5 mm)" value="10"/>
+<gridsize name="20 pts (~7 mm)" value="20"/>
+<gridsize name="14 pts (~5 mm)" value="14"/>
+<gridsize name="28 pts (~10 mm)" value="28"/>
+<gridsize name="56 pts (~20 mm)" value="56"/>
+<anglesize name="90 deg" value="90"/>
+<anglesize name="60 deg" value="60"/>
+<anglesize name="45 deg" value="45"/>
+<anglesize name="30 deg" value="30"/>
+<anglesize name="22.5 deg" value="22.5"/>
+<tiling name="falling" angle="-60" step="4" width="1"/>
+<tiling name="rising" angle="30" step="4" width="1"/>
+</ipestyle>
+<page>
+<layer name="alpha"/>
+<view layers="alpha" active="alpha"/>
+<use layer="alpha" matrix="1 0 0 1 -170.178 30.1775" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -169.478 20.0238" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -170.178 30.1775" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -170.178 30.1775" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -170.178 30.1775" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -170.178 30.1775" name="mark/fdisk(sfx)" pos="320 580" size="normal" stroke="black" fill="white"/>
+<use matrix="1 0 0 1 -170.178 30.1775" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
+<text matrix="1 0 0 1 -190.178 30.1775" transformations="translations" pos="380 530" stroke="darkred" type="label" width="63.793" height="8.307" depth="2.32" valign="baseline" size="large">Alpha shape</text>
+<text matrix="1 0 0 1 -172.333 26.6762" transformations="translations" pos="282.952 524.893" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 -170.178 30.1775" transformations="translations" pos="352.708 510.349" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 -170.178 30.1775" transformations="translations" pos="310.693 578.759" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 -170.178 30.1775" transformations="translations" pos="375.332 578.49" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 -170.178 30.1775" transformations="translations" pos="272.179 660.635" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 -169.478 20.0238" transformations="translations" pos="296.419 724.197" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 -170.178 30.1775" transformations="translations" pos="375.332 689.453" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 29.8225 -9.82249" transformations="translations" pos="360 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="360 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="360 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="370 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 39.8225 -9.82249" transformations="translations" pos="380 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
+<text matrix="1 0 0 1 39.8225 -9.82249" transformations="translations" pos="380 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 59.8225 -9.82249" transformations="translations" pos="400 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
+<text matrix="1 0 0 1 29.8225 -9.82249" transformations="translations" pos="390 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 29.8225 -9.82249" transformations="translations" pos="400 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 39.8225 -9.82249" transformations="translations" pos="410 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 59.8225 -9.82249" transformations="translations" pos="430 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="370 640" stroke="darkgray" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="380 660" stroke="darkgray" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 29.8225 -9.82249" transformations="translations" pos="430 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 49.8225 -9.82249" transformations="translations" pos="460 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="430 640" stroke="darkgray" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 19.8225 -9.82249" transformations="translations" pos="450 660" stroke="darkgray" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 29.8225 -9.82249" transformations="translations" pos="460 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 39.8225 -9.82249" transformations="translations" pos="520 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 159.822 -9.82249" transformations="translations" pos="300 640" stroke="darkgray" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 169.822 -9.82249" transformations="translations" pos="350 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 159.822 -9.82249" transformations="translations" pos="350 660" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<text matrix="1 0 0 1 159.822 -9.82249" transformations="translations" pos="350 640" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
+<text matrix="1 0 0 1 159.822 -9.82249" transformations="translations" pos="380 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black">
+4 0 0 4 320 704 e
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black">
+322.919 706.788 m
+317.189 701.058 l
+317.189 701.203 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black">
+317.551 706.934 m
+322.629 701.058 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="darkgray" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 189.822 -9.82249" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 229.822 -9.82249" stroke="darkgray" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 279.822 -9.82249" stroke="black" arrow="normal/tiny">
+230 680 m
+240 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+230 680 m
+220 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+230 680 m
+230 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+220 660 m
+220 650 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="darkgray" arrow="normal/tiny">
+230 660 m
+230 650 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+260 680 m
+260 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+260 660 m
+260 650 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+300 680 m
+300 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+300 680 m
+290 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="darkgray" arrow="normal/tiny">
+290 660 m
+290 650 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="darkgray" arrow="normal/tiny">
+300 660 m
+300 650 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+330 680 m
+330 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+350 680 m
+350 670 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+350 660 m
+350 650 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+240 690 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+270 690 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+310 690 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+330 690 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+350 690 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+380 690 l
+</path>
+<path matrix="1 0 0 1 159.822 -9.82249" stroke="black" arrow="normal/tiny">
+320 700 m
+400 690 l
+</path>
+<text matrix="1 0 0 1 181.065 -17.7515" transformations="translations" pos="180 620" stroke="black" type="label" width="108.04" height="6.926" depth="1.93" valign="baseline">Alpha complex structure</text>
+<path matrix="1 0 0 1 0.147054 -27.9437" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 17.3033 -128.43" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -40.8179 81.2962" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -77.2312 -186.201" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -126.949 -81.8634" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -122.048 -59.1051" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -44.3191 -30.7447" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -146.206 42.0819" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 -53.4225 -19.5406" stroke="darkgray">
+58.1341 0 0 58.1341 218.925 692.601 e
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+60 710 m
+40 660 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+40 660 m
+130 690 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+130 690 m
+60 710 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+40 660 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+80 580 m
+130 580 l
+130 580 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+130 580 m
+110 520 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+110 520 m
+50 530 l
+50 530 l
+50 530 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+50 530 m
+80 580 l
+</path>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkred">
+130 580 m
+130 690 l
+</path>
+<use matrix="1 0 0 1 69.8225 30.1775" name="mark/fdisk(sfx)" pos="108.285 743.72" size="normal" stroke="darkgray" fill="white"/>
+<path matrix="1 0 0 1 69.8225 30.1775" stroke="darkgray">
+108.275 743.531 m
+166.45 743.531 l
+</path>
+<text matrix="1 0 0 1 69.8225 30.1775" transformations="translations" pos="127.397 746.763" stroke="darkgray" type="label" width="6.41" height="4.289" depth="0" valign="baseline">$\alpha$</text>
+<text matrix="1 0 0 1 180.473 -27.8112" transformations="translations" pos="180 620" stroke="darkgray" type="label" width="132.41" height="6.926" depth="1.93" valign="baseline">filtration value $&gt; \alpha$ are greyed</text>
+<path stroke="black">
+280 660 m
+300 680 l
+</path>
+<path stroke="black">
+280 660 m
+300 640 l
+</path>
+<path stroke="black">
+370 660 m
+350 680 l
+</path>
+<path stroke="black">
+370 660 m
+350 640 l
+</path>
+<path stroke="black">
+290 670 m
+360 670 l
+</path>
+<path stroke="black">
+290 650 m
+360 650 l
+</path>
+<text matrix="1 0 0 1 2.36686 -1.77515" transformations="translations" pos="300 660" stroke="black" type="label" width="44.002" height="6.926" depth="1.93" valign="baseline">equivalent</text>
+</page>
+</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png b/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png
new file mode 100644
index 00000000..2d5f59a3
--- /dev/null
+++ b/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png
Binary files differ
diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
new file mode 100644
index 00000000..e140fe3d
--- /dev/null
+++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
@@ -0,0 +1,48 @@
+#include <iostream>
+#include <string>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+#include <gudhi/Alpha_complex.h>
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value" << std::endl;
+ std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 3) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ std::string off_file_name(argv[1]);
+ double alpha_square_max_value = atof(argv[2]);
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from an OFF file
+ // ----------------------------------------------------------------------------
+ typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name, alpha_square_max_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
+ " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
+ alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) {
+ if (alpha_complex_from_file.filtration(f_simplex) <= alpha_complex_from_file.filtration()) {
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ }
+ return 0;
+}
diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
new file mode 100644
index 00000000..e2610dbd
--- /dev/null
+++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
@@ -0,0 +1,55 @@
+#include <stdlib.h>
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include "gudhi/Delaunay_triangulation_off_io.h"
+#include "gudhi/Alpha_complex.h"
+
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
+typedef Kernel::Point_d Point;
+typedef std::vector<Point> Vector_of_points;
+
+int main(int argc, char **argv) {
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+
+ std::vector<double> coords = { 0.0, 0.0, 0.0, 1.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 0.0, 1.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 1.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 1.0, 0.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ double max_alpha_square_value = 1e10;
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_points(3, points, max_alpha_square_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_points.dimension() <<
+ " - " << alpha_complex_from_points.num_simplices() << " simplices - " <<
+ alpha_complex_from_points.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ return 0;
+}
diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt
new file mode 100644
index 00000000..10b87f04
--- /dev/null
+++ b/src/Alpha_complex/example/CMakeLists.txt
@@ -0,0 +1,46 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIAlphaShapesExample)
+
+# need CGAL 4.7
+# cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../..
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+ if (CMAKE_BUILD_TYPE MATCHES Debug)
+ # For programs to be more verbose
+ add_definitions(-DDEBUG_TRACES)
+ endif()
+
+ add_executable ( alphaoffreader Alpha_complex_from_off.cpp )
+ target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+
+ add_executable ( alphapoints Alpha_complex_from_points.cpp )
+ target_link_libraries(alphapoints ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+endif()
diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc.txt b/src/Alpha_complex/example/alphaoffreader_for_doc.txt
new file mode 100644
index 00000000..71f29a00
--- /dev/null
+++ b/src/Alpha_complex/example/alphaoffreader_for_doc.txt
@@ -0,0 +1,27 @@
+Alpha complex is of dimension 2 - 25 simplices - 7 vertices.
+Iterator on alpha complex simplices in the filtration order, with [filtration value]:
+ ( 0 ) -> [0]
+ ( 1 ) -> [0]
+ ( 2 ) -> [0]
+ ( 3 ) -> [0]
+ ( 4 ) -> [0]
+ ( 5 ) -> [0]
+ ( 6 ) -> [0]
+ ( 3 2 ) -> [6.25]
+ ( 5 4 ) -> [7.25]
+ ( 2 0 ) -> [8.5]
+ ( 1 0 ) -> [9.25]
+ ( 3 1 ) -> [10]
+ ( 2 1 ) -> [11.25]
+ ( 3 2 1 ) -> [12.5]
+ ( 2 1 0 ) -> [12.9959]
+ ( 6 5 ) -> [13.25]
+ ( 4 2 ) -> [20]
+ ( 6 4 ) -> [22.7367]
+ ( 6 5 4 ) -> [22.7367]
+ ( 6 3 ) -> [30.25]
+ ( 6 2 ) -> [36.5]
+ ( 6 3 2 ) -> [36.5]
+ ( 6 4 2 ) -> [37.2449]
+ ( 4 0 ) -> [59.7107]
+ ( 4 2 0 ) -> [59.7107]
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
new file mode 100644
index 00000000..6b47ace7
--- /dev/null
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -0,0 +1,364 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef ALPHA_COMPLEX_H_
+#define ALPHA_COMPLEX_H_
+
+// to construct a simplex_tree from Delaunay_triangulation
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Simplex_tree.h>
+
+#include <stdlib.h>
+#include <math.h> // isnan, fmax
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+#include <CGAL/algorithm.h>
+#include <CGAL/assertions.h>
+#include <CGAL/enum.h>
+
+#include <iostream>
+#include <vector>
+#include <string>
+#include <limits> // NaN
+#include <map>
+#include <utility> // std::pair
+
+namespace Gudhi {
+
+namespace alphacomplex {
+
+/**
+ * \brief Alpha complex data structure.
+ *
+ * \details
+ * The data structure can be constructed from a CGAL Delaunay triangulation (for more informations on CGAL Delaunay
+ * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) or from
+ * an OFF file (cf. Delaunay_triangulation_off_reader).
+ *
+ * Please refer to \ref alpha_complex for examples.
+ *
+ */
+template<class Kernel>
+class Alpha_complex : public Simplex_tree<> {
+ private:
+ // From Simplex_tree
+ // Type required to insert into a simplex_tree (with or without subfaces).
+ typedef std::vector<Vertex_handle> Vector_vertex;
+
+ // Simplex_result is the type returned from simplex_tree insert function.
+ typedef typename std::pair<Simplex_handle, bool> Simplex_result;
+
+ // Delaunay_triangulation type required to create an alpha-complex.
+ typedef typename CGAL::Delaunay_triangulation<Kernel> Delaunay_triangulation;
+
+ typedef typename Kernel::Compute_squared_radius_d Squared_Radius;
+ typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel;
+
+ typedef typename Kernel::Point_d Point_d;
+
+ // Type required to compute squared radius, or side of bounded sphere on a vector of points.
+ typedef typename std::vector<Point_d> Vector_of_CGAL_points;
+
+ // Vertex_iterator type from CGAL.
+ typedef typename Delaunay_triangulation::Vertex_iterator CGAL_vertex_iterator;
+
+ // size_type type from CGAL.
+ typedef typename Delaunay_triangulation::size_type size_type;
+
+ // Double map type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.
+ typedef typename std::map< CGAL_vertex_iterator, Vertex_handle > Map_vertex_iterator_to_handle;
+ typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Map_vertex_handle_to_iterator;
+
+ private:
+ /** \brief Map to switch from CGAL vertex iterator to simplex tree vertex handle.*/
+ Map_vertex_iterator_to_handle vertex_iterator_to_handle_;
+ /** \brief Map to switch from simplex tree vertex handle to CGAL vertex iterator.*/
+ Map_vertex_handle_to_iterator vertex_handle_to_iterator_;
+ /** \brief Pointer on the CGAL Delaunay triangulation.*/
+ Delaunay_triangulation* triangulation_;
+ /** \brief Kernel for triangulation_ functions access.*/
+ Kernel kernel_;
+
+ public:
+ /** \brief Alpha_complex constructor from an OFF file name.
+ * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize
+ * the Alpha_complex.
+ *
+ * @param[in] off_file_name OFF file [path and] name.
+ */
+ Alpha_complex(const std::string& off_file_name,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ : triangulation_(nullptr) {
+ Gudhi::Delaunay_triangulation_off_reader<Delaunay_triangulation> off_reader(off_file_name);
+ if (!off_reader.is_valid()) {
+ std::cerr << "Alpha_complex - Unable to read file " << off_file_name << std::endl;
+ exit(-1); // ----- >>
+ }
+ triangulation_ = off_reader.get_complex();
+ set_filtration(max_alpha_square);
+ init();
+ }
+
+ /** \brief Alpha_complex constructor from a Delaunay triangulation.
+ *
+ * @param[in] triangulation_ptr Pointer on a Delaunay triangulation.
+ */
+ Alpha_complex(Delaunay_triangulation* triangulation_ptr,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ : triangulation_(triangulation_ptr) {
+ set_filtration(max_alpha_square);
+ init();
+ }
+
+ /** \brief Alpha_complex constructor from a list of points.
+ * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize
+ * the Alpha_complex.
+ *
+ * @param[in] dimension Dimension of points to be inserted.
+ * @param[in] points Range of points to triangulate. Points must be in Kernel::Point_d
+ *
+ * The type InputPointRange must be a range for which std::begin and
+ * std::end return input iterators on a Kernel::Point_d.
+ */
+ template<typename InputPointRange >
+ Alpha_complex(int dimension, const InputPointRange& points,
+ Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ : triangulation_(nullptr) {
+ triangulation_ = new Delaunay_triangulation(dimension);
+ auto first = std::begin(points);
+ auto last = std::end(points);
+
+ size_type inserted = triangulation_->insert(first, last);
+ if (inserted != (last -first)) {
+ std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << std::endl;
+ exit(-1); // ----- >>
+ }
+ set_filtration(max_alpha_square);
+ init();
+ }
+
+ /** \brief Alpha_complex destructor from a Delaunay triangulation.
+ *
+ * @warning Deletes the Delaunay triangulation.
+ */
+ ~Alpha_complex() {
+ delete triangulation_;
+ }
+
+ /** \brief get_point returns the point corresponding to the vertex given as parameter.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The founded point.
+ * @warning Exception std::out_of_range is thrown in case vertex is not in the map vertex_handle_to_iterator_.
+ */
+ Point_d get_point(Vertex_handle vertex) const {
+ auto found_it = vertex_handle_to_iterator_.find(vertex);
+ if (found_it != vertex_handle_to_iterator_.end()) {
+ return found_it->second->point();
+ } else {
+ throw std::out_of_range("Vertex out of vector range");
+ }
+ }
+
+ private:
+ /** \brief Initialize the Alpha_complex from the Delaunay triangulation.
+ *
+ * @warning Delaunay triangulation must be already constructed with at least one vertex and dimension must be more
+ * than 0.
+ *
+ * Initialization can be launched once.
+ */
+ void init() {
+ if (triangulation_ == nullptr) {
+ std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation" << std::endl;
+ return; // ----- >>
+ }
+ if (triangulation_->number_of_vertices() < 1) {
+ std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices" << std::endl;
+ return; // ----- >>
+ }
+ if (triangulation_->maximal_dimension() < 1) {
+ std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation" << std::endl;
+ return; // ----- >>
+ }
+ if (num_vertices() > 0) {
+ std::cerr << "Alpha_complex init - Cannot init twice" << std::endl;
+ return; // ----- >>
+ }
+
+ set_dimension(triangulation_->maximal_dimension());
+
+ // --------------------------------------------------------------------------------------------
+ // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
+ // Start to insert at handle = 0 - default integer value
+ Vertex_handle vertex_handle = Vertex_handle();
+ // Loop on triangulation vertices list
+ for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
+ if (!triangulation_->is_infinite(*vit)) {
+#ifdef DEBUG_TRACES
+ std::cout << "Vertex insertion - " << vertex_handle << " -> " << vit->point() << std::endl;
+#endif // DEBUG_TRACES
+ vertex_iterator_to_handle_.emplace(vit, vertex_handle);
+ vertex_handle_to_iterator_.emplace(vertex_handle, vit);
+ vertex_handle++;
+ }
+ }
+ // --------------------------------------------------------------------------------------------
+
+ // --------------------------------------------------------------------------------------------
+ // Simplex_tree construction from loop on triangulation finite full cells list
+ for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
+ Vector_vertex vertexVector;
+#ifdef DEBUG_TRACES
+ std::cout << "Simplex_tree insertion ";
+#endif // DEBUG_TRACES
+ for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
+ if (*vit != nullptr) {
+#ifdef DEBUG_TRACES
+ std::cout << " " << vertex_iterator_to_handle_[*vit];
+#endif // DEBUG_TRACES
+ // Vector of vertex construction for simplex_tree structure
+ vertexVector.push_back(vertex_iterator_to_handle_[*vit]);
+ }
+ }
+#ifdef DEBUG_TRACES
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+ // Insert each simplex and its subfaces in the simplex tree - filtration is NaN
+ Simplex_result insert_result = insert_simplex_and_subfaces(vertexVector,
+ std::numeric_limits<double>::quiet_NaN());
+ }
+ // --------------------------------------------------------------------------------------------
+
+ // --------------------------------------------------------------------------------------------
+ // ### For i : d -> 0
+ for (int decr_dim = dimension(); decr_dim >= 0; decr_dim--) {
+ // ### Foreach Sigma of dim i
+ for (auto f_simplex : skeleton_simplex_range(decr_dim)) {
+ int f_simplex_dim = dimension(f_simplex);
+ if (decr_dim == f_simplex_dim) {
+ Vector_of_CGAL_points pointVector;
+#ifdef DEBUG_TRACES
+ std::cout << "Sigma of dim " << decr_dim << " is";
+#endif // DEBUG_TRACES
+ for (auto vertex : simplex_vertex_range(f_simplex)) {
+ pointVector.push_back(get_point(vertex));
+#ifdef DEBUG_TRACES
+ std::cout << " " << vertex;
+#endif // DEBUG_TRACES
+ }
+#ifdef DEBUG_TRACES
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+ // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma)
+ if (isnan(filtration(f_simplex))) {
+ Filtration_value alpha_complex_filtration = 0.0;
+ // No need to compute squared_radius on a single point - alpha is 0.0
+ if (f_simplex_dim > 0) {
+ // squared_radius function initialization
+ Squared_Radius squared_radius = kernel_.compute_squared_radius_d_object();
+
+ alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end());
+ }
+ assign_filtration(f_simplex, alpha_complex_filtration);
+#ifdef DEBUG_TRACES
+ std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl;
+#endif // DEBUG_TRACES
+ }
+ propagate_alpha_filtration(f_simplex, decr_dim);
+ }
+ }
+ }
+ // --------------------------------------------------------------------------------------------
+ }
+
+ template<typename Simplex_handle>
+ void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) {
+ // ### Foreach Tau face of Sigma
+ for (auto f_boundary : boundary_simplex_range(f_simplex)) {
+#ifdef DEBUG_TRACES
+ std::cout << " | --------------------------------------------------\n";
+ std::cout << " | Tau ";
+ for (auto vertex : simplex_vertex_range(f_boundary)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << "is a face of Sigma\n";
+ std::cout << " | isnan(filtration(Tau)=" << isnan(filtration(f_boundary)) << std::endl;
+#endif // DEBUG_TRACES
+ // ### If filt(Tau) is not NaN
+ if (!isnan(filtration(f_boundary))) {
+ // ### filt(Tau) = fmin(filt(Tau), filt(Sigma))
+ Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex));
+ assign_filtration(f_boundary, alpha_complex_filtration);
+#ifdef DEBUG_TRACES
+ std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl;
+#endif // DEBUG_TRACES
+ // ### Else
+ } else {
+ // No need to compute is_gabriel for dimension <= 2
+ // i.e. : Sigma = (3,1) => Tau = 1
+ if (decr_dim > 1) {
+ // insert the Tau points in a vector for is_gabriel function
+ Vector_of_CGAL_points pointVector;
+ Vertex_handle vertexForGabriel = Vertex_handle();
+ for (auto vertex : simplex_vertex_range(f_boundary)) {
+ pointVector.push_back(get_point(vertex));
+ }
+ // Retrieve the Sigma point that is not part of Tau - parameter for is_gabriel function
+ Point_d point_for_gabriel;
+ for (auto vertex : simplex_vertex_range(f_simplex)) {
+ point_for_gabriel = get_point(vertex);
+ if (std::find(pointVector.begin(), pointVector.end(), point_for_gabriel) == pointVector.end()) {
+ // vertex is not found in Tau
+ vertexForGabriel = vertex;
+ // No need to continue loop
+ break;
+ }
+ }
+ // is_gabriel function initialization
+ Is_Gabriel is_gabriel = kernel_.side_of_bounded_sphere_d_object();
+ bool is_gab = is_gabriel(pointVector.begin(), pointVector.end(), point_for_gabriel)
+ != CGAL::ON_BOUNDED_SIDE;
+#ifdef DEBUG_TRACES
+ std::cout << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << vertexForGabriel << std::endl;
+#endif // DEBUG_TRACES
+ // ### If Tau is not Gabriel of Sigma
+ if (false == is_gab) {
+ // ### filt(Tau) = filt(Sigma)
+ Filtration_value alpha_complex_filtration = filtration(f_simplex);
+ assign_filtration(f_boundary, alpha_complex_filtration);
+#ifdef DEBUG_TRACES
+ std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl;
+#endif // DEBUG_TRACES
+ }
+ }
+ }
+ }
+ }
+};
+
+} // namespace alphacomplex
+
+} // namespace Gudhi
+
+#endif // ALPHA_COMPLEX_H_
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
new file mode 100644
index 00000000..3aa835ec
--- /dev/null
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -0,0 +1,213 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "alpha_complex"
+#include <boost/test/unit_test.hpp>
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <cmath> // float comparison
+#include <limits>
+#include <string>
+#include <vector>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+#include <gudhi/Alpha_complex.h>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d;
+// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
+
+BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of an alpha-complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double max_alpha_square_value = 60.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ max_alpha_square_value << "==========" << std::endl;
+
+ Gudhi::alphacomplex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name, max_alpha_square_value);
+
+ const int DIMENSION = 2;
+ std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+
+ const int NUMBER_OF_VERTICES = 7;
+ std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+
+ const int NUMBER_OF_SIMPLICES = 25;
+ std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+
+}
+
+BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file_filtered) {
+ // ----------------------------------------------------------------------------
+ //
+ // Init of an alpha-complex from a OFF file
+ //
+ // ----------------------------------------------------------------------------
+ std::string off_file_name("alphacomplexdoc.off");
+ double max_alpha_square_value = 59.0;
+ std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
+ max_alpha_square_value << "==========" << std::endl;
+
+ Gudhi::alphacomplex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name, max_alpha_square_value);
+
+ const int DIMENSION = 2;
+ std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+
+ const int NUMBER_OF_VERTICES = 7;
+ std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+
+ const int NUMBER_OF_SIMPLICES = 25;
+ std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+
+ int num_filtered_simplices = 0;
+ for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) {
+ if (alpha_complex_from_file.filtration(f_simplex) <= alpha_complex_from_file.filtration()) {
+ num_filtered_simplices++;
+ }
+ }
+ const int NUMBER_OF_FILTERED_SIMPLICES = 23;
+ std::cout << "num_filtered_simplices=" << num_filtered_simplices << std::endl;
+ BOOST_CHECK(num_filtered_simplices == NUMBER_OF_FILTERED_SIMPLICES);
+
+}
+
+bool are_almost_the_same(float a, float b) {
+ return std::fabs(a - b) < std::numeric_limits<float>::epsilon();
+}
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<4> > Kernel_s;
+typedef Kernel_s::Point_d Point;
+typedef std::vector<Point> Vector_of_points;
+
+
+bool is_point_in_list(Vector_of_points points_list, Point point) {
+ for (auto& point_in_list : points_list) {
+ if (point_in_list == point) {
+ return true; // point found
+ }
+ }
+ return false; // point not found
+}
+
+BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+ std::vector<double> coords = { 0.0, 0.0, 0.0, 1.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 0.0, 1.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 0.0, 1.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+ coords = { 1.0, 0.0, 0.0, 0.0 };
+ points.push_back(Point(coords.begin(), coords.end()));
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ double max_alpha_square_value = 1e10;
+ Gudhi::alphacomplex::Alpha_complex<Kernel_s> alpha_complex_from_points(3, points, max_alpha_square_value);
+
+ std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
+
+ // Another way to check num_simplices
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ int num_simplices = 0;
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ num_simplices++;
+ std::cout << " ( ";
+ for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
+ }
+ BOOST_CHECK(num_simplices == 15);
+ std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_simplices() == 15);
+
+ std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
+ std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+
+ for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ switch (alpha_complex_from_points.dimension(f_simplex)) {
+ case 0:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ break;
+ case 1:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ break;
+ case 2:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 2.0/3.0));
+ break;
+ case 3:
+ BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 3.0/4.0));
+ break;
+ default:
+ BOOST_CHECK(false); // Shall not happen
+ break;
+ }
+ }
+
+ Point p0 = alpha_complex_from_points.get_point(0);
+ std::cout << "alpha_complex_from_points.get_point(0)=" << p0 << std::endl;
+ BOOST_CHECK(4 == p0.dimension());
+ BOOST_CHECK(is_point_in_list(points, p0));
+
+ Point p1 = alpha_complex_from_points.get_point(1);
+ std::cout << "alpha_complex_from_points.get_point(1)=" << p1 << std::endl;
+ BOOST_CHECK(4 == p1.dimension());
+ BOOST_CHECK(is_point_in_list(points, p1));
+
+ Point p2 = alpha_complex_from_points.get_point(2);
+ std::cout << "alpha_complex_from_points.get_point(2)=" << p2 << std::endl;
+ BOOST_CHECK(4 == p2.dimension());
+ BOOST_CHECK(is_point_in_list(points, p2));
+
+ Point p3 = alpha_complex_from_points.get_point(3);
+ std::cout << "alpha_complex_from_points.get_point(3)=" << p3 << std::endl;
+ BOOST_CHECK(4 == p3.dimension());
+ BOOST_CHECK(is_point_in_list(points, p3));
+
+ // Test to the limit
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(4), std::out_of_range);
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(-1), std::out_of_range);
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(1234), std::out_of_range);
+}
diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt
new file mode 100644
index 00000000..847581aa
--- /dev/null
+++ b/src/Alpha_complex/test/CMakeLists.txt
@@ -0,0 +1,39 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIAlphaComplexTest)
+
+# need CGAL 4.7
+# cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../..
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+ include_directories (BEFORE "../../include")
+ if (CMAKE_BUILD_TYPE MATCHES Debug)
+ # For programs to be more verbose
+ add_definitions(-DDEBUG_TRACES)
+ endif()
+
+ add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp )
+ target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+ add_test(AlphaComplexUT ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/AlphaComplexUT.xml --log_level=test_suite --report_level=no)
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex feature. Version 4.6.0 is required.")
+ endif ()
+endif()
+
diff --git a/src/Alpha_complex/test/README b/src/Alpha_complex/test/README
new file mode 100644
index 00000000..45b87d91
--- /dev/null
+++ b/src/Alpha_complex/test/README
@@ -0,0 +1,12 @@
+To compile:
+***********
+
+cmake .
+make
+
+To launch with details:
+***********************
+
+./AlphaComplexUnitTest --report_level=detailed --log_level=all
+
+ ==> echo $? returns 0 in case of success (non-zero otherwise)
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 68eca65e..cd7f4991 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -44,10 +44,12 @@ else()
# Gudhi compilation part
include_directories(include)
+ add_subdirectory(example/common)
add_subdirectory(example/Simplex_tree)
add_subdirectory(example/Persistent_cohomology)
add_subdirectory(example/Skeleton_blocker)
add_subdirectory(example/Contraction)
+ add_subdirectory(example/Alpha_complex)
# data points generator
add_subdirectory(data/points/generator)
diff --git a/src/Doxyfile b/src/Doxyfile
index faa0d3fe..81f55e29 100644
--- a/src/Doxyfile
+++ b/src/Doxyfile
@@ -813,6 +813,8 @@ EXCLUDE_SYMBOLS =
# command).
EXAMPLE_PATH = biblio/
+ example/common/ \
+ example/Alpha_complex/
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -833,6 +835,7 @@ EXAMPLE_RECURSIVE = NO
# \image command).
IMAGE_PATH = doc/Skeleton_blocker/ \
+ doc/Alpha_complex/ \
doc/common/ \
doc/Contraction/
@@ -1596,7 +1599,7 @@ PAPER_TYPE = a4
# If left blank no extra packages will be included.
# This tag requires that the tag GENERATE_LATEX is set to YES.
-EXTRA_PACKAGES = amsfonts amsmath amssymb
+EXTRA_PACKAGES = amsfonts amsmath amssymb algorithm algpseudocode
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
# generated LaTeX document. The header should contain everything until the first
diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt
index 71f4fd1a..5c2afdd0 100644
--- a/src/GudhUI/CMakeLists.txt
+++ b/src/GudhUI/CMakeLists.txt
@@ -78,6 +78,46 @@ if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND )
target_link_libraries( GudhUI ${QT_LIBRARIES} ${QGLVIEWER_LIBRARIES} )
target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} )
+###############################################################################
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+ if (CMAKE_BUILD_TYPE MATCHES Debug)
+ # For programs to be more verbose
+ add_definitions(-DDEBUG_TRACES)
+ endif()
+
+ add_executable (acp alpha_complex_persistence.cpp)
+ target_link_libraries(acp ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${QT_LIBRARIES})
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+###############################################################################
+
else()
message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt4, and will not be compiled.")
endif()
diff --git a/src/GudhUI/alpha_complex_persistence.cpp b/src/GudhUI/alpha_complex_persistence.cpp
new file mode 100644
index 00000000..4f85459a
--- /dev/null
+++ b/src/GudhUI/alpha_complex_persistence.cpp
@@ -0,0 +1,78 @@
+#include <iostream>
+#include <string>
+
+
+#include <QtGui/QApplication>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+#include <gudhi/Alpha_complex.h>
+#include <gudhi/Persistent_cohomology.h>
+
+#include "utils/Bar_code_persistence.h"
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " filename.off " << // alpha_square_max_value[double] " <<
+ "coeff_field_characteristic[integer > 0] min_persistence[double >= -1.0]" << std::endl;
+ std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0 2 0.02" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 4) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ QApplication qtapp(argc, argv);
+
+ std::string off_file_name(argv[1]);
+ // double alpha_square_max_value = atof(argv[2]);
+ double alpha_square_max_value = 1e20;
+ int coeff_field_characteristic = atoi(argv[2]); // argv[3]
+ double min_persistence = atof(argv[3]); // argv[4]
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from an OFF file
+ // ----------------------------------------------------------------------------
+ typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name, alpha_square_max_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
+ " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
+ alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
+
+ // Sort the simplices in the order of the filtration
+ alpha_complex_from_file.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alphacomplex::Alpha_complex<Kernel>,
+ Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file);
+
+ std::cout << "coeff_field_characteristic " << coeff_field_characteristic <<
+ " - min_persistence " << min_persistence << std::endl;
+
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ pcoh.output_diagram();
+
+ std::vector<std::pair<double, double>> persistence_vector;
+ pcoh.get_persistence(persistence_vector);
+
+ Bar_code_persistence bc_persistence;
+
+ for (auto persistence : persistence_vector) {
+ bc_persistence.insert(persistence.first, persistence.second);
+ }
+
+ bc_persistence.show();
+
+ return qtapp.exec();
+}
diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h
index d78cbad9..4f8d48ee 100644
--- a/src/GudhUI/model/Model.h
+++ b/src/GudhUI/model/Model.h
@@ -71,7 +71,7 @@ class CGAL_geometric_flag_complex_wrapper {
void maximal_face(std::vector<int> vertices) {
if (!load_only_points_) {
- std::cout << "size:" << vertices.size() << std::endl;
+ //std::cout << "size:" << vertices.size() << std::endl;
for (int i = 0; i < vertices.size(); ++i)
for (int j = i + 1; j < vertices.size(); ++j)
complex_.add_edge(Vertex_handle(vertices[i]), Vertex_handle(vertices[j]));
diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h
new file mode 100644
index 00000000..a1a46ea8
--- /dev/null
+++ b/src/GudhUI/utils/Bar_code_persistence.h
@@ -0,0 +1,84 @@
+#include <math.h> // isfinite
+
+#include <QtGui/QApplication>
+
+#include <QGraphicsView>
+#include <QGraphicsScene>
+#include <QPointF>
+#include <QVector>
+#include <QGraphicsTextItem>
+
+#include <iostream>
+#include <vector>
+#include <limits> // NaN, infinity
+#include <utility> // for pair
+
+class Bar_code_persistence {
+ private:
+ typedef std::vector<std::pair<double, double>> Persistence;
+ Persistence persistence_vector;
+ double min_birth;
+ double max_death;
+
+ public:
+
+ Bar_code_persistence()
+ : min_birth(std::numeric_limits<double>::quiet_NaN()),
+ max_death(std::numeric_limits<double>::quiet_NaN()) { }
+
+ void insert(double birth, double death) {
+ persistence_vector.push_back(std::make_pair(birth, death));
+ if (std::isfinite(birth)) {
+ if ((birth < min_birth) || (std::isnan(min_birth)))
+ min_birth = birth;
+ if ((birth > max_death) || (std::isnan(max_death)))
+ max_death = birth;
+ }
+ if (std::isfinite(death))
+ if ((death > max_death) || (std::isnan(max_death)))
+ max_death = death;
+ }
+
+ void show() {
+ // Create a view, put a scene in it
+ QGraphicsView * view = new QGraphicsView();
+ QGraphicsScene * scene = new QGraphicsScene();
+ view->setScene(scene);
+ double ratio = 600.0 / (max_death - min_birth);
+ //std::cout << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl;
+
+ double height = 0.0, birth = 0.0, death = 0.0;
+ int pers_num = 1;
+ for (auto& persistence : persistence_vector) {
+ height = 5.0 * pers_num;
+ //std::cout << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl;
+ if (std::isfinite(persistence.first))
+ birth = ((persistence.first - min_birth) * ratio) + 50.0;
+ else
+ birth = 0.0;
+
+ if (std::isfinite(persistence.second))
+ death = ((persistence.second - min_birth) * ratio) + 50.0;
+ else
+ death = 700.0;
+
+ scene->addLine(birth, height, death, height, QPen(Qt::blue, 2));
+ pers_num++;
+ }
+ height += 10.0;
+ // scale line
+ scene->addLine(0, height, 700.0, height, QPen(Qt::black, 1));
+ int modulo = 0;
+ for (double scale = 50.0; scale < 700.0; scale += 50.0) {
+ modulo++;
+ // scale small dash
+ scene->addLine(scale, height - 3.0, scale, height + 3.0, QPen(Qt::black, 1));
+ // scale text
+ QString scale_value = QString::number(((scale - 50.0) / ratio) + min_birth);
+ QGraphicsTextItem* dimText = scene->addText(scale_value, QFont("Helvetica", 8));
+ dimText->setPos(scale - (3.0 * scale_value.size()), height + 9.0 * (modulo % 2));
+ }
+ // Show the view
+ view->show();
+ }
+};
diff --git a/src/GudhUI/view/FirstCoordProjector.h b/src/GudhUI/view/FirstCoordProjector.h
index 529d2d42..3ceda3f5 100644
--- a/src/GudhUI/view/FirstCoordProjector.h
+++ b/src/GudhUI/view/FirstCoordProjector.h
@@ -32,8 +32,11 @@ class FirstCoordProjector3D : public Projector3D {
typedef Projector3D::Point_3 Point_3;
Point_3 operator()(const Point& p) const {
- assert(p.dimension() >= 3);
+ if (p.dimension() >= 3)
return Point_3(p.x(), p.y(), p.z());
+ else if (p.dimension() >= 2)
+ return Point_3(p.x(), p.y(), 0.0);
+
}
};
diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt
index 50d10025..8caf7d8b 100644
--- a/src/Persistent_cohomology/example/CMakeLists.txt
+++ b/src/Persistent_cohomology/example/CMakeLists.txt
@@ -43,6 +43,50 @@ if(GMPXX_FOUND AND GMP_FOUND)
target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY})
add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5)
#add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100)
+
+
+
+
+ if (NOT CGAL_VERSION VERSION_LESS 4.7.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+ if (CMAKE_BUILD_TYPE MATCHES Debug)
+ # For programs to be more verbose
+ add_definitions(-DDEBUG_TRACES)
+ endif()
+
+ add_executable (alphacomplexpersistence alpha_complex_persistence.cpp)
+ target_link_libraries(alphacomplexpersistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+
+
+
endif()
endif()
diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
new file mode 100644
index 00000000..fbadf673
--- /dev/null
+++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp
@@ -0,0 +1,55 @@
+#include <iostream>
+#include <string>
+
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+#include <gudhi/Alpha_complex.h>
+#include <gudhi/Persistent_cohomology.h>
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value[double] " <<
+ "coeff_field_characteristic[integer > 0] min_persistence[double >= -1.0]" << std::endl;
+ std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0 2 0.02" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 5) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ std::string off_file_name(argv[1]);
+ double alpha_square_max_value = atof(argv[2]);
+ int coeff_field_characteristic = atoi(argv[3]);
+ double min_persistence = atof(argv[4]);
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from an OFF file
+ // ----------------------------------------------------------------------------
+ typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
+ Gudhi::alphacomplex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name, alpha_square_max_value);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
+ " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
+ alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
+
+ // Sort the simplices in the order of the filtration
+ alpha_complex_from_file.initialize_filtration();
+
+ std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl;
+ // Compute the persistence diagram of the complex
+ Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alphacomplex::Alpha_complex<Kernel>,
+ Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file);
+ // initializes the coefficient field for homology
+ pcoh.init_coefficients(coeff_field_characteristic);
+
+ pcoh.compute_persistent_cohomology(min_persistence);
+
+ pcoh.output_diagram();
+
+ return 0;
+}
diff --git a/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp b/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp
index 6d5eebcf..235ea141 100644
--- a/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp
+++ b/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp
@@ -124,6 +124,12 @@ void usage(char * const progName) {
}
int main(int argc, char * const argv[]) {
+ // program args management
+ if (argc != 4) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
+ usage(argv[0]);
+ }
+
int coeff_field_characteristic = 0;
int returnedScanValue = sscanf(argv[2], "%d", &coeff_field_characteristic);
if ((returnedScanValue == EOF) || (coeff_field_characteristic <= 0)) {
@@ -138,12 +144,6 @@ int main(int argc, char * const argv[]) {
usage(argv[0]);
}
- // program args management
- if (argc != 4) {
- std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n";
- usage(argv[0]);
- }
-
// Read points from file
std::string filegraph = argv[1];
std::list<Point_3> lp;
@@ -239,7 +239,7 @@ int main(int argc, char * const argv[]) {
}
}
// Construction of the simplex_tree
- Filtration_value filtr = std::sqrt(*the_alpha_value_iterator);
+ Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator);
#ifdef DEBUG_TRACES
std::cout << "filtration = " << filtr << std::endl;
#endif // DEBUG_TRACES
diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
index d096792f..f6773bac 100644
--- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
+++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h
@@ -713,6 +713,14 @@ class Persistent_cohomology {
}
}
+ void get_persistence(std::vector<std::pair<double, double>>& persistence) {
+ cmp_intervals_by_length cmp(cpx_);
+ std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp);
+ for (auto pair : persistent_pairs_) {
+ persistence.push_back(std::make_pair(cpx_->filtration(get<0>(pair)), cpx_->filtration(get<1>(pair))));
+ }
+ }
+
void write_output_diagram(std::string diagram_name) {
std::ofstream diagram_out(diagram_name.c_str());
cmp_intervals_by_length cmp(cpx_);
diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h
index 4c6a95e8..35d839e2 100644
--- a/src/Simplex_tree/include/gudhi/Simplex_tree.h
+++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h
@@ -41,7 +41,6 @@
#include <functional> // for greater<>
namespace Gudhi {
-
/** \defgroup simplex_tree Filtered Complexes
*
* A simplicial complex \f$\mathbf{K}\f$
@@ -449,6 +448,15 @@ class Simplex_tree {
}
}
+ /** \brief Sets the filtration value of a simplex.
+ *
+ * No action if called on the null_simplex*/
+ void assign_filtration(Simplex_handle sh, Filtration_value fv) {
+ if (sh != null_simplex()) {
+ sh->second.assign_filtration(fv);
+ }
+ }
+
/** \brief Returns an upper bound of the filtration values of the simplices. */
Filtration_value filtration() const {
return threshold_;
@@ -730,7 +738,6 @@ class Simplex_tree {
sh->second.assign_key(key);
}
- public:
/** Returns the two Simplex_handle corresponding to the endpoints of
* and edge. sh must point to a 1-dimensional simplex. This is an
* optimized version of the boundary computation. */
@@ -1091,7 +1098,7 @@ class Simplex_tree {
os << filtration(sh) << " \n";
}
}
-
+
private:
Vertex_handle null_vertex_;
/** \brief Upper bound on the filtration values of the simplices.*/
@@ -1106,7 +1113,6 @@ class Simplex_tree {
};
// Print a Simplex_tree in os.
-
template<typename...T>
std::ostream& operator<<(std::ostream & os, Simplex_tree<T...> & st) {
for (auto sh : st.filtration_simplex_range()) {
diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
index fff00d77..00cf69bc 100644
--- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp
+++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp
@@ -492,19 +492,6 @@ BOOST_AUTO_TEST_CASE(NSimplexAndSubfaces_tree_insertion) {
/* A facet [3,4,5] */
/* A cell [0,1,6,7] */
- typeSimplex simplexPair1 = std::make_pair(SimplexVector1, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair2 = std::make_pair(SimplexVector2, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair3 = std::make_pair(SimplexVector3, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair4 = std::make_pair(SimplexVector4, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair5 = std::make_pair(SimplexVector5, DEFAULT_FILTRATION_VALUE);
- typeSimplex simplexPair6 = std::make_pair(SimplexVector6, DEFAULT_FILTRATION_VALUE);
- test_simplex_tree_contains(st, simplexPair1, 6); // (2,1,0) is in position 6
- test_simplex_tree_contains(st, simplexPair2, 7); // (3) is in position 7
- test_simplex_tree_contains(st, simplexPair3, 8); // (3,0) is in position 8
- test_simplex_tree_contains(st, simplexPair4, 2); // (1,0) is in position 2
- test_simplex_tree_contains(st, simplexPair5, 14); // (3,4,5) is in position 14
- test_simplex_tree_contains(st, simplexPair6, 26); // (7,6,1,0) is in position 26
-
// ------------------------------------------------------------------------------------------------------------------
// Find in the simplex_tree
// ------------------------------------------------------------------------------------------------------------------
diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h
index 689e7a4d..0fd9a93e 100644
--- a/src/common/doc/main_page.h
+++ b/src/common/doc/main_page.h
@@ -66,6 +66,8 @@
* \li GudhUI
* \li Persistent_cohomology/alpha_shapes_persistence
* \li Simplex_tree/simplex_tree_from_alpha_shapes_3
+ * \li Alpha_complex/Alpha_complex_from_off
+ * \li Alpha_complex/Alpha_complex_from_points
*
* Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to
* your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html
diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt
new file mode 100644
index 00000000..d29e31e7
--- /dev/null
+++ b/src/common/example/CMakeLists.txt
@@ -0,0 +1,40 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIDelaunayTriangulationOffFileReadWrite)
+
+# need CGAL 4.6
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+ # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten.
+ # cf. http://doc.cgal.org/latest/Manual/installation.html#title40
+ # A workaround is to add "-std=c++11" again.
+ # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html
+ # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html
+ # but it implies to use cmake version 3.1 at least.
+ if(NOT MSVC)
+ include(CheckCXXCompilerFlag)
+ CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11)
+ if(COMPILER_SUPPORTS_CXX11)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+ endif()
+ endif()
+ # - End of workaround
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+
+ add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp )
+ target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off)
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+endif()
diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp
new file mode 100644
index 00000000..75e4fafb
--- /dev/null
+++ b/src/common/example/Delaunay_triangulation_off_rw.cpp
@@ -0,0 +1,54 @@
+// to construct a Delaunay_triangulation from a OFF file
+#include <gudhi/Delaunay_triangulation_off_io.h>
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <iostream>
+#include <string>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K;
+typedef CGAL::Delaunay_triangulation<K> T;
+// The triangulation uses the default instantiation of the
+// TriangulationDataStructure template parameter
+
+void usage(char * const progName) {
+ std::cerr << "Usage: " << progName << " inputFile.off outputFile.off" << std::endl;
+ exit(-1); // ----- >>
+}
+
+int main(int argc, char **argv) {
+ if (argc != 3) {
+ std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl;
+ usage(argv[0]);
+ }
+
+ std::string offInputFile(argv[1]);
+ // Read the OFF file (input file name given as parameter) and triangulates points
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader(offInputFile);
+ // Check the read operation was correct
+ if (!off_reader.is_valid()) {
+ std::cerr << "Unable to read file " << offInputFile << std::endl;
+ exit(-1); // ----- >>
+ }
+
+ // Retrieve the triangulation
+ T* triangulation = off_reader.get_complex();
+ // Operations on triangulation
+ std::cout << "Number of vertices= " << triangulation->number_of_vertices() << std::endl;
+ std::cout << "Number of finite full cells= " << triangulation->number_of_finite_full_cells() << std::endl;
+
+ std::string outFileName(argv[2]);
+ std::string offOutputFile(outFileName);
+ // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces
+ Gudhi::Delaunay_triangulation_off_writer<T> off_writer(offOutputFile, triangulation);
+
+ // Check the write operation was correct
+ if (!off_writer.is_valid()) {
+ std::cerr << "Unable to write file " << offOutputFile << std::endl;
+ exit(-1); // ----- >>
+ }
+
+ return 0;
+} \ No newline at end of file
diff --git a/src/common/example/dtoffrw_alphashapedoc_result.txt b/src/common/example/dtoffrw_alphashapedoc_result.txt
new file mode 100644
index 00000000..57761d14
--- /dev/null
+++ b/src/common/example/dtoffrw_alphashapedoc_result.txt
@@ -0,0 +1,3 @@
+Number of vertices= 7
+Number of finite full cells= 6
+
diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h
new file mode 100644
index 00000000..47066a94
--- /dev/null
+++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h
@@ -0,0 +1,308 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+#ifndef SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_
+#define SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_
+
+#include <string>
+#include <vector>
+#include <fstream>
+#include <map>
+
+#include "gudhi/Off_reader.h"
+
+namespace Gudhi {
+
+/** \brief OFF file visitor implementation according to Off_reader in order to construct a CGAL Delaunay triangulation.
+ *
+ * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page
+ * http://doc.cgal.org/latest/Triangulation/
+ */
+template<typename Complex>
+class Delaunay_triangulation_off_visitor_reader {
+ private:
+ Complex* complex_;
+ typedef typename Complex::Point Point;
+
+ public:
+
+ // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO.
+
+ /** \brief Delaunay_triangulation_off_visitor_reader constructor
+ *
+ * @param[in] complex_ptr_ pointer on a Delaunay triangulation.
+ */
+ Delaunay_triangulation_off_visitor_reader(Complex* complex_ptr_)
+ : complex_(nullptr) { }
+
+ /** \brief Off_reader visitor init implementation.
+ *
+ * The init parameters are set from OFF file header.
+ * Dimension value is required in order to construct Delaunay triangulation.
+ *
+ * @param[in] dim space dimension of vertices.
+ * @param[in] num_vertices number of vertices in the OFF file (not used).
+ * @param[in] num_faces number of faces in the OFF file (not used).
+ * @param[in] num_edges number of edges in the OFF file (not used).
+ */
+ void init(int dim, int num_vertices, int num_faces, int num_edges) {
+#ifdef DEBUG_TRACES
+ std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" <<
+ num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl;
+#endif // DEBUG_TRACES
+ if (num_faces > 0) {
+ std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " <<
+ "file for Delaunay triangulation - faces are computed." << std::endl;
+ }
+ if (num_edges > 0) {
+ std::cerr << "Delaunay_triangulation_off_visitor_reader::init edges are not taken into account from OFF " <<
+ "file for Delaunay triangulation - edges are computed." << std::endl;
+ }
+ // Complex construction with dimension from file
+ complex_ = new Complex(dim);
+ }
+
+ /** \brief Off_reader visitor point implementation.
+ *
+ * The point function is called on each vertex line from OFF file.
+ * This function inserts the vertex in the Delaunay triangulation.
+ *
+ * @param[in] point vector of vertex coordinates.
+ */
+ void point(const std::vector<double>& point) {
+#ifdef DEBUG_TRACES
+ std::cout << "Delaunay_triangulation_off_visitor_reader::point ";
+ for (auto coordinate : point) {
+ std::cout << coordinate << " | ";
+ }
+ std::cout << std::endl;
+#endif // DEBUG_TRACES
+ complex_->insert(Point(point.size(), point.begin(), point.end()));
+ }
+
+ // Off_reader visitor maximal_face implementation - not used
+ void maximal_face(const std::vector<int>& face) {
+ // For Delaunay Triangulation, only points are read
+ }
+
+ // Off_reader visitor done implementation - not used
+ void done() {
+ // Nothing to be done on end of OFF file read
+ }
+
+ /** \brief Returns the constructed Delaunay triangulation.
+ *
+ * @return A pointer on the Delaunay triangulation.
+ *
+ * @warning The returned pointer can be nullptr.
+ */
+ Complex* get_complex() const {
+ return complex_;
+ }
+};
+
+/** \brief OFF file reader implementation in order to construct a Delaunay triangulation.
+ *
+ * This class is using the Delaunay_triangulation_off_visitor_reader to visit the OFF file according to Off_reader.
+ *
+ * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page
+ * http://doc.cgal.org/latest/Triangulation/
+ *
+ * \section Example
+ *
+ * This example loads points from an OFF file and builds the Delaunay triangulation.
+ * Then, it is asked to display the number of vertices and finites full cells from the Delaunay triangulation.
+ *
+ * \include Delaunay_triangulation_off_rw.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off
+ * \endcode
+ *
+ * the program output is:
+ *
+ * \include dtoffrw_alphashapedoc_result.txt
+ */
+template<typename Complex>
+class Delaunay_triangulation_off_reader {
+ public:
+
+ /** \brief Reads the OFF file and constructs the Delaunay triangulation from the points
+ * that are in the OFF file.
+ *
+ * @param[in] name_file OFF file to read.
+ *
+ * @warning Check with is_valid() function to see if read operation was successful.
+ */
+ Delaunay_triangulation_off_reader(const std::string & name_file)
+ : valid_(false) {
+ std::ifstream stream(name_file);
+ if (stream.is_open()) {
+ Delaunay_triangulation_off_visitor_reader<Complex> off_visitor(complex_);
+ Off_reader off_reader(stream);
+ valid_ = off_reader.read(off_visitor);
+ if (valid_) {
+ complex_ = off_visitor.get_complex();
+ if (complex_ == nullptr) {
+ std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader off_visitor returns an empty pointer" << std::endl;
+ valid_ = false;
+ }
+ }
+ } else {
+ std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader could not open file " <<
+ name_file << std::endl;
+ }
+
+ }
+
+ /** \brief Returns if the OFF file read operation was successful or not.
+ *
+ * @return OFF file read status.
+ */
+ bool is_valid() const {
+ return valid_;
+ }
+
+ /** \brief Returns the constructed Delaunay triangulation.
+ *
+ * @return A pointer on the Delaunay triangulation.
+ *
+ * @warning The returned pointer can be nullptr.
+ */
+ Complex* get_complex() const {
+ if (valid_)
+ return complex_;
+ return nullptr;
+
+ }
+
+ private:
+ /** \brief OFF file read status.*/
+ bool valid_;
+ /** \brief A pointer on the Delaunay triangulation.*/
+ Complex* complex_;
+};
+
+/** \brief OFF file writer from a Delaunay triangulation.
+ *
+ * This class constructs the OFF file header according to http://www.geomview.org/docs/html/OFF.html
+ *
+ * The header is followed by the list of points coordinates (Delaunay triangulation vertices)
+ *
+ * And finally is followed by the list of faces (Delaunay triangulation finite full cells)
+ *
+ * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page
+ * http://doc.cgal.org/latest/Triangulation/
+ *
+ * \section Example
+ *
+ * This example loads points from an OFF file and builds the Delaunay triangulation.
+ * Then, the Delaunay triangulation is saved in a new file including the triangulation as a list of faces.
+ *
+ * \include Delaunay_triangulation_off_rw.cpp
+ *
+ * When launching:
+ *
+ * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off
+ * \endcode
+ *
+ * The result will be an OFF file of dimension 2 with the 7 points from alphashapedoc.off followed by the 6
+ * triangulations of dimension 3 (the first value on each faces):
+ * \include dtoffrw_alphashapedoc_result.off
+ */
+template<typename Complex>
+class Delaunay_triangulation_off_writer {
+ public:
+ typedef typename Complex::Point Point;
+
+ /** \brief Writes the OFF file from the Delaunay triangulation
+ *
+ * @param[in] name_file OFF file to write.
+ * @param[in] complex_ptr pointer on a Delaunay triangulation.
+ *
+ * @warning Check with is_valid() function to see if write operation was successful.
+ */
+ Delaunay_triangulation_off_writer(const std::string & name_file, Complex* complex_ptr)
+ : valid_(false) {
+ std::ofstream stream(name_file);
+ if (stream.is_open()) {
+ if (complex_ptr->current_dimension() == 3) {
+ // OFF header
+ stream << "OFF" << std::endl;
+ // no endl on next line - don't know why...
+ stream << complex_ptr->number_of_vertices() << " " << complex_ptr->number_of_finite_full_cells() << " 0";
+ } else {
+ // nOFF header
+ stream << "nOFF" << std::endl;
+ // no endl on next line - don't know why...
+ stream << complex_ptr->current_dimension() << " " << complex_ptr->number_of_vertices() << " " <<
+ complex_ptr->number_of_finite_full_cells() << " 0";
+
+ }
+
+ // bimap to retrieve vertex handles from points and vice versa
+ std::map< Point, int > points_to_vh;
+ // Start to insert at default handle value
+ int vertex_handle = int();
+
+ // Points list
+ for (auto vit = complex_ptr->vertices_begin(); vit != complex_ptr->vertices_end(); ++vit) {
+ for (auto Coord = vit->point().cartesian_begin(); Coord != vit->point().cartesian_end(); ++Coord) {
+ stream << *Coord << " ";
+ }
+ stream << std::endl;
+ points_to_vh[vit->point()] = vertex_handle;
+ vertex_handle++;
+ }
+
+ for (auto cit = complex_ptr->finite_full_cells_begin(); cit != complex_ptr->finite_full_cells_end(); ++cit) {
+ std::vector<int> vertexVector;
+ stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " ";
+ for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
+ stream << points_to_vh[(*vit)->point()] - 1 << " ";
+ }
+ stream << std::endl;
+ }
+ stream.close();
+ valid_ = true;
+ } else {
+ std::cerr << "Delaunay_triangulation_off_writer::Delaunay_triangulation_off_writer could not open file " <<
+ name_file << std::endl;
+ }
+ }
+
+ /** \brief Returns if the OFF write operation was successful or not.
+ *
+ * @return OFF file write status.
+ */
+ bool is_valid() const {
+ return valid_;
+ }
+
+ private:
+ /* \brief OFF file write status. */
+ bool valid_;
+};
+
+} // namespace Gudhi
+
+#endif // SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_
diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h
index 81b9e634..e45a7600 100644
--- a/src/common/include/gudhi/Off_reader.h
+++ b/src/common/include/gudhi/Off_reader.h
@@ -34,37 +34,31 @@
namespace Gudhi {
-/**
- * Read an off file and calls a visitor methods while reading it.
- * An off file must have its first/snd line in this format :
- * OFF
- * num_vert num_faces num_edges
- *
- * A noff file must have its first/snd line in this format :
- * nOFF
- * dim num_vert num_faces num_edges
- *
- * The number of edges num_edges is optional and can be left to zero.
+/** \brief OFF file reader top class visitor.
+ *
+ * OFF file must be conform to format described here :
+ * http://www.geomview.org/docs/html/OFF.html
*/
class Off_reader {
public:
Off_reader(std::ifstream& stream) : stream_(stream) { }
- // Off_reader(const std::string& name):stream_(name){
- // if(!stream_.is_open())
- // std::cerr <<"could not open file \n";
- // }
~Off_reader() {
stream_.close();
}
- /**
- * read an off file and calls the following methods :
- * void init(int dim,int num_vertices,int num_faces,int num_edges); //num_edges may not be set
- * void point(const std::vector<double>& point);
- * void maximal_face(const std::list<int>& face);
- * void done();
- * of the visitor when reading a point or a maximal face.
+ /** \brief
+ * Read an OFF file and calls the following methods :
+ *
+ * <CODE>void init(int dim,int num_vertices,int num_faces,int num_edges); // from file header - num_edges may not be set
+ *
+ * void point(const std::vector<double>& point); // for each point read
+ *
+ * void maximal_face(const std::list<int>& face); // for each face read
+ *
+ * void done(); // upon file read is finished</CODE>
+ *
+ * of the visitor when reading a point or a maximal face. Edges are not taken into account.
*/
template<typename OffVisitor>
bool read(OffVisitor& off_visitor) {
@@ -118,7 +112,7 @@ class Off_reader {
if (!goto_next_uncomment_line(line)) return false;
std::istringstream iss(line);
- if (is_off_file) {
+ if ((is_off_file) && (!is_noff_file)) {
off_info_.dim = 3;
if (!(iss >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)) {
std::cerr << "incorrect number of vertices/faces/edges\n";
@@ -126,8 +120,8 @@ class Off_reader {
}
} else {
if (!(iss >> off_info_.dim >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)) {
- std::cerr << "incorrect number of vertices/faces/edges\n";
- return false;
+ std::cerr << "incorrect number of vertices/faces/edges\n";
+ return false;
}
}
off_visitor.init(off_info_.dim, off_info_.num_vertices, off_info_.num_faces, off_info_.num_edges);
@@ -138,7 +132,7 @@ class Off_reader {
bool goto_next_uncomment_line(std::string& uncomment_line) {
uncomment_line.clear();
do
- std::getline(stream_, uncomment_line); while (uncomment_line[0] == '%'); // || uncomment_line.empty());
+ std::getline(stream_, uncomment_line); while (uncomment_line[0] == '%');
return (uncomment_line.size() > 0 && uncomment_line[0] != '%');
}
@@ -166,7 +160,7 @@ class Off_reader {
iss >> num_face_vertices;
std::vector<int> face;
face.assign(std::istream_iterator<int>(iss), std::istream_iterator<int>());
- if (face.size() != off_info_.dim) return false;
+ //if (face.size() != (off_info_.dim + 1)) return false;
visitor.maximal_face(face);
}
return true;
diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt
new file mode 100644
index 00000000..a395338e
--- /dev/null
+++ b/src/common/test/CMakeLists.txt
@@ -0,0 +1,46 @@
+cmake_minimum_required(VERSION 2.6)
+project(GUDHIDelaunayTriangulationOffFileReadWriteUT)
+
+if(NOT MSVC)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage")
+ set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} --coverage")
+ set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} --coverage")
+endif()
+
+# need CGAL 4.6
+if(CGAL_FOUND)
+ if (NOT CGAL_VERSION VERSION_LESS 4.6.0)
+ message(STATUS "CGAL version: ${CGAL_VERSION}.")
+
+ include( ${CGAL_USE_FILE} )
+
+ find_package(Eigen3 3.1.0)
+ if (EIGEN3_FOUND)
+ message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.")
+ include( ${EIGEN3_USE_FILE} )
+
+ add_executable ( dtoffrw_UT dtoffrw_unit_test.cpp )
+ target_link_libraries(dtoffrw_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+
+ # Do not forget to copy test files in current binary dir
+ file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+ # Do not forget to copy test files in current binary dir
+ file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)
+
+ # Unitary tests
+ add_test(dtoffrw_UT ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_UT
+ # XML format for Jenkins xUnit plugin
+ --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/dtoffrw_UT.xml --log_level=test_suite --report_level=no)
+
+ if (DIFF_PATH)
+ add_test(dtoffrw_diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off)
+ endif()
+
+ else()
+ message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.")
+ endif()
+ else()
+ message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.")
+ endif ()
+endif()
+
diff --git a/src/common/test/README b/src/common/test/README
new file mode 100644
index 00000000..f2a7eb5a
--- /dev/null
+++ b/src/common/test/README
@@ -0,0 +1,14 @@
+To compile:
+***********
+
+cmake .
+make
+
+To launch with details:
+***********************
+
+./dtoffrw_UT --report_level=detailed --log_level=all
+
+ ==> echo $? returns 0 in case of success (non-zero otherwise)
+
+
diff --git a/src/common/test/dtoffrw_alphashapedoc_result.off b/src/common/test/dtoffrw_alphashapedoc_result.off
new file mode 100644
index 00000000..13c255c6
--- /dev/null
+++ b/src/common/test/dtoffrw_alphashapedoc_result.off
@@ -0,0 +1,15 @@
+nOFF
+2 7 6 0
+1 1
+7 0
+4 6
+9 6
+0 14
+2 19
+9 17
+3 1 2 3
+3 4 3 2
+3 5 1 3
+3 5 3 7
+3 7 3 4
+3 6 5 7
diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp
new file mode 100644
index 00000000..20094229
--- /dev/null
+++ b/src/common/test/dtoffrw_unit_test.cpp
@@ -0,0 +1,90 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2015 INRIA Saclay (France)
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+// to construct a Delaunay_triangulation from a OFF file
+#include "gudhi/Delaunay_triangulation_off_io.h"
+
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Epick_d.h>
+
+#include <stdlib.h>
+
+#include <iostream>
+#include <string>
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE "delaunay_triangulation_off_read_write"
+#include <boost/test/unit_test.hpp>
+
+// Use dynamic_dimension_tag for the user to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K;
+typedef CGAL::Delaunay_triangulation<K> T;
+
+BOOST_AUTO_TEST_CASE( Delaunay_triangulation_doc_test )
+{
+ // Read the OFF file (input file name given as parameter) and triangulates points
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader("alphacomplexdoc.off");
+ // Check the read operation was correct
+ BOOST_CHECK(off_reader.is_valid());
+
+ // Retrieve the triangulation
+ T* triangulation = off_reader.get_complex();
+ BOOST_CHECK(triangulation != nullptr);
+ // Operations on triangulation
+ BOOST_CHECK(triangulation->number_of_vertices() == 7);
+ BOOST_CHECK(triangulation->number_of_finite_full_cells() == 6);
+
+ // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces
+ Gudhi::Delaunay_triangulation_off_writer<T> off_writer("UT.off", triangulation);
+
+ // Check the write operation was correct
+ BOOST_CHECK(off_writer.is_valid());
+
+ delete triangulation;
+}
+
+BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test )
+{
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader("pouetpouet_tralala.off");
+ // Check the read operation was correct
+ BOOST_CHECK(!off_reader.is_valid());
+ T* triangulation = off_reader.get_complex();
+ BOOST_CHECK(triangulation == nullptr);
+}
+
+BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_write_test )
+{
+ // Read the OFF file (input file name given as parameter) and triangulates points
+ Gudhi::Delaunay_triangulation_off_reader<T> off_reader("alphacomplexdoc.off");
+
+ // Retrieve the triangulation
+ T* triangulation = off_reader.get_complex();
+
+ // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces
+ Gudhi::Delaunay_triangulation_off_writer<T> off_writer("/pouetpouet_tralala/pouetpouet_tralala/pouetpouet_tralala.off", triangulation);
+
+ // Check the write operation was correct
+ BOOST_CHECK(!off_writer.is_valid());
+
+ delete triangulation;
+}
+