summaryrefslogtreecommitdiff
path: root/src/Alpha_complex
diff options
context:
space:
mode:
authorvrouvrea <vrouvrea@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2016-10-12 20:56:28 +0000
committervrouvrea <vrouvrea@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2016-10-12 20:56:28 +0000
commitcbe3d808ad2b15d730cc2f307857d07c666a55e2 (patch)
treeae565e75c6f82e6196805c6210660cbd62407401 /src/Alpha_complex
parentfc770aacbde64904795b9d6afdc503beb989f6ee (diff)
parentff39c01329748691e94a776ed10a56692e05e15e (diff)
Merge alpha_complex_create_complex branch
git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1716 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7ad47d844b62b64ef59a493676c1aa494f2c574c
Diffstat (limited to 'src/Alpha_complex')
-rw-r--r--src/Alpha_complex/concept/Simplicial_complex_for_alpha.h89
-rw-r--r--src/Alpha_complex/doc/Intro_alpha_complex.h36
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.ipe315
-rw-r--r--src/Alpha_complex/doc/alpha_complex_doc.pngbin25554 -> 18720 bytes
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_off.cpp41
-rw-r--r--src/Alpha_complex/example/Alpha_complex_from_points.cpp38
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h232
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp147
8 files changed, 367 insertions, 531 deletions
diff --git a/src/Alpha_complex/concept/Simplicial_complex_for_alpha.h b/src/Alpha_complex/concept/Simplicial_complex_for_alpha.h
new file mode 100644
index 00000000..2b8bff94
--- /dev/null
+++ b/src/Alpha_complex/concept/Simplicial_complex_for_alpha.h
@@ -0,0 +1,89 @@
+/* This file is part of the Gudhi Library. The Gudhi library
+ * (Geometric Understanding in Higher Dimensions) is a generic C++
+ * library for computational topology.
+ *
+ * Author(s): Vincent Rouvreau
+ *
+ * Copyright (C) 2016 INRIA
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#ifndef CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
+#define CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
+
+namespace Gudhi {
+
+namespace alpha_complex {
+
+/** \brief The concept SimplicialComplexForAlpha describes the requirements for a type to implement a simplicial
+ * complex, that can be created from a `Alpha_complex`.
+ */
+struct SimplicialComplexForAlpha {
+ /** Handle to specify a simplex. */
+ typedef unspecified Simplex_handle;
+ /** Handle to specify a vertex. Must be a non-negative integer. */
+ typedef unspecified Vertex_handle;
+ /** Handle to specify the simplex filtration value. */
+ typedef unspecified Filtration_value;
+
+ /** Returns the number of vertices in the simplicial complex. */
+ std::size_t num_vertices();
+
+ /** Sets the simplicial complex dimension. */
+ void set_dimension(int dimension);
+
+ /** Gets the 'simplex' dimension. */
+ int dimension(Simplex_handle simplex);
+
+ /** Assigns the 'simplex' with the given 'filtration' value. */
+ int assign_filtration(Simplex_handle simplex, Filtration_value filtration);
+
+ /** \brief Inserts a simplex with vertices from a given simplex (represented by a vector of Vertex_handle) in the
+ * simplicial complex with the given 'filtration' value. */
+ void insert_simplex_and_subfaces(std::vector<Vertex_handle> const & vertex_range, Filtration_value filtration);
+
+ /** Browses the simplicial complex to make the filtration non-decreasing. */
+ void make_filtration_non_decreasing();
+
+ /** Prune the simplicial complex above 'filtration' value given as parameter. */
+ void prune_above_filtration(Filtration_value filtration);
+
+ /** \brief Iterator over vertices of a simplex.
+ *
+ * 'value type' must be 'Vertex_handle'.*/
+ typedef unspecified Simplex_vertex_range;
+
+ /** \brief Returns a range over vertices of a given
+ * simplex. */
+ Simplex_vertex_range simplex_vertex_range(Simplex_handle const & simplex);
+
+ /** \brief Iterator over the boundaries of the complex, in an arbitrary order.
+ *
+ * 'value_type' must be 'Simplex_handle'.*/
+ typedef unspecified Boundary_simplex_range;
+
+ /** \brief Returns a range over boundaries of a given simplex. */
+ Boundary_simplex_range boundary_simplex_range(Simplex_handle const & simplex);
+
+ /** \brief Return type of an insertion of a simplex
+ */
+ typedef unspecified Insertion_result_type;
+};
+
+} // namespace alpha_complex
+
+} // namespace Gudhi
+
+#endif // CONCEPT_ALPHA_COMPLEX_SIMPLICIAL_COMPLEX_FOR_ALPHA_H_
diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h
index f3126169..3ffdae7f 100644
--- a/src/Alpha_complex/doc/Intro_alpha_complex.h
+++ b/src/Alpha_complex/doc/Intro_alpha_complex.h
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -39,7 +39,8 @@ namespace alpha_complex {
* Alpha_complex is a <a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
* constructed from the finite cells of a Delaunay Triangulation.
*
- * The filtration value of each simplex is computed as the square of the circumradius of the simplex if the circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
+ * The filtration value of each simplex is computed as the square of the circumradius of the simplex if the
+ * circumsphere is empty (the simplex is then said to be Gabriel), and as the minimum of the filtration
* values of the codimension 1 cofaces that make it not Gabriel otherwise.
*
* All simplices that have a filtration value strictly greater than a given alpha squared value are not inserted into
@@ -47,23 +48,24 @@ namespace alpha_complex {
*
* \image html "alpha_complex_representation.png" "Alpha-complex representation"
*
- * Alpha_complex is constructing a `Simplex_tree` using <a target="_blank"
+ * Alpha_complex is constructing a <a target="_blank"
* href="http://doc.cgal.org/latest/Triangulation/index.html#Chapter_Triangulations">Delaunay Triangulation</a>
* \cite cgal:hdj-t-15b from <a target="_blank" href="http://www.cgal.org/">CGAL</a> (the Computational Geometry
- * Algorithms Library \cite cgal:eb-15b).
+ * Algorithms Library \cite cgal:eb-15b) and is able to create a `SimplicialComplexForAlpha`.
*
* The complex is a template class requiring an Epick_d <a target="_blank"
* href="http://doc.cgal.org/latest/Kernel_d/index.html#Chapter_dD_Geometry_Kernel">dD Geometry Kernel</a>
* \cite cgal:s-gkd-15b from CGAL as template parameter.
*
- * \remark When Alpha_complex is constructed with an infinite value of alpha, the complex is a Delaunay complex.
+ * \remark When the simplicial complex is constructed with an infinite value of alpha, the complex is a Delaunay
+ * complex.
*
* \section pointsexample Example from points
*
- * This example builds the Delaunay triangulation from the given points in a 2D static kernel, and initializes the
- * alpha complex with it.
+ * This example builds the Delaunay triangulation from the given points in a 2D static kernel, and creates a
+ * `Simplex_tree` with it.
*
- * Then, it is asked to display information about the alpha complex.
+ * Then, it is asked to display information about the simplicial complex.
*
* \include Alpha_complex/Alpha_complex_from_points.cpp
*
@@ -76,13 +78,15 @@ namespace alpha_complex {
*
* \include Alpha_complex/alphaoffreader_for_doc_60.txt
*
- * \section algorithm Algorithm
+ * \section createcomplexalgorithm Create complex algorithm
*
* \subsection datastructure Data structure
*
- * In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation.
- * (The filtration value is set to NaN, which stands for unknown value):
- * \image html "alpha_complex_doc.png" "Simplex tree structure construction example"
+ * In order to create the simplicial complex, first, it is built from the cells of the Delaunay Triangulation.
+ * The filtration values are set to NaN, which stands for unknown value.
+ *
+ * In example, :
+ * \image html "alpha_complex_doc.png" "Simplicial complex structure construction example"
*
* \subsection filtrationcomputation Filtration value computation algorithm
*
@@ -129,12 +133,14 @@ namespace alpha_complex {
*
* \subsubsection nondecreasing Non decreasing filtration values
*
- * As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
- * We fix that up by calling `Simplex_tree::make_filtration_non_decreasing()`.
+ * As the squared radii computed by CGAL are an approximation, it might happen that these alpha squared values do not
+ * quite define a proper filtration (i.e. non-decreasing with respect to inclusion).
+ * We fix that up by calling `SimplicialComplexForAlpha::make_filtration_non_decreasing()`.
*
* \subsubsection pruneabove Prune above given filtration value
*
- * The simplex tree is pruned from the given maximum alpha squared value (cf. `Simplex_tree::prune_above_filtration()`).
+ * The simplex tree is pruned from the given maximum alpha squared value (cf.
+ * `SimplicialComplexForAlpha::prune_above_filtration()`).
* In the following example, the value is given by the user as argument of the program.
*
*
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe
index baf0d26a..71e5ce6c 100644
--- a/src/Alpha_complex/doc/alpha_complex_doc.ipe
+++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe
@@ -1,7 +1,7 @@
<?xml version="1.0"?>
<!DOCTYPE ipe SYSTEM "ipe.dtd">
<ipe version="70107" creator="Ipe 7.1.10">
-<info created="D:20150603143945" modified="D:20160406112209"/>
+<info created="D:20150603143945" modified="D:20160921180211"/>
<ipestyle name="basic">
<symbol name="arrow/arc(spx)">
<path stroke="sym-stroke" fill="sym-stroke" pen="sym-pen">
@@ -278,35 +278,7 @@ h
320 580 l
280 660 l
</path>
-<path matrix="1 0 0 1 104.05 -60.1773" stroke="black">
-4 0 0 4 320 704 e
-</path>
-<path matrix="1 0 0 1 104.05 -60.1773" stroke="black">
-322.919 706.788 m
-317.189 701.058 l
-317.189 701.203 l
-</path>
-<path matrix="1 0 0 1 104.05 -60.1773" stroke="black">
-317.551 706.934 m
-322.629 701.058 l
-</path>
-<path matrix="1 0 0 1 50 0" stroke="black">
-240 620 m
-220 600 l
-</path>
-<path matrix="1 0 0 1 50 0" stroke="black">
-240 620 m
-220 640 l
-</path>
-<text transformations="translations" pos="180 620" stroke="black" type="label" width="97.274" height="6.926" depth="1.93" valign="baseline">Simplex tree structure</text>
-<path stroke="black">
-280 630 m
-170 630 l
-</path>
-<path stroke="black">
-280 610 m
-170 610 l
-</path>
+<text matrix="1 0 0 1 76 36" transformations="translations" pos="180 620" stroke="black" type="label" width="153.148" height="6.926" depth="1.93" valign="baseline">Simplicial complex data structure :</text>
<use matrix="1 0 0 1 -239.3 -10.1537" name="mark/fdisk(sfx)" pos="300 720" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 690" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="280 660" size="normal" stroke="black" fill="white"/>
@@ -314,282 +286,11 @@ h
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="370 580" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="350 520" size="normal" stroke="black" fill="white"/>
<use matrix="1 0 0 1 -240 0" name="mark/fdisk(sfx)" pos="290 530" size="normal" stroke="black" fill="white"/>
-<text matrix="1 0 0 1 4 -96" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<path matrix="1 0 0 1 4 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 24 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<path matrix="1 0 0 1 36 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 24 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 24 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<text matrix="1 0 0 1 12 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
-<path matrix="1 0 0 1 12 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 24 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 12 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<text matrix="1 0 0 1 64 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<path matrix="1 0 0 1 64 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 52 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<path matrix="1 0 0 1 52 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 36 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<text matrix="1 0 0 1 104 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 104 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 92 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<path matrix="1 0 0 1 92 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 96 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 84 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<text matrix="1 0 0 1 148 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 148 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 136 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
-<path matrix="1 0 0 1 136 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 120 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 108 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 120 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 108 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-292 716 m
-292 728 l
-316 728 l
-316 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-316 716 m
-316 728 l
-340 728 l
-340 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-340 716 m
-340 728 l
-364 728 l
-364 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-364 716 m
-364 728 l
-388 728 l
-388 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-388 716 m
-388 728 l
-412 728 l
-412 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-412 716 m
-412 728 l
-436 728 l
-436 716 l
-h
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-436 716 m
-436 728 l
-460 728 l
-460 716 l
-h
-</path>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="304 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">0</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="328 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">1</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="352 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">2</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="376 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="400 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">4</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="424 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">5</text>
-<text matrix="1 0 0 1 48 -96" transformations="translations" pos="448 720" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 -12 -96" stroke="black">
-436 708 m
-436 716 l
-</path>
-<path matrix="1 0 0 1 28 -96" stroke="black">
-364 708 m
-364 716 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-364 688 m
-364 696 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-320 688 m
-320 696 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-296 708 m
-308 716 l
-308 716 l
-</path>
-<path matrix="1 0 0 1 48 -96" stroke="black">
-264 688 m
-268 696 l
-</path>
-<path matrix="1 0 0 1 40 -96" stroke="black">
-292 688 m
-292 696 l
-</path>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-388 736 m
-388 728 l
-</path>
-<path stroke="black">
-372 612 m
-376 620 l
-</path>
-<path stroke="black">
-448 612 m
-448 620 l
-</path>
-<text matrix="1 0 0 1 80 -76" transformations="translations" pos="304 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">3</text>
-<path matrix="1 0 0 1 80 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<path matrix="1 0 0 1 80 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 68 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 24 -96" stroke="black">
-364 688 m
-364 696 l
-</path>
-<path matrix="1 0 0 1 136 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 124 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 136 -96" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 124 -96" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 4 -116" stroke="black">
-436 708 m
-436 716 l
-</path>
-<path matrix="1 0 0 1 168 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 156 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 168 -76" stroke="black">
-300 688 m
-300 676 l
-312 676 l
-312 688 l
-h
-</path>
-<text matrix="1 0 0 1 156 -76" transformations="translations" pos="316 680" stroke="black" type="label" width="4.981" height="6.42" depth="0" valign="baseline">6</text>
-<path matrix="1 0 0 1 36 -96" stroke="black">
-436 708 m
-436 716 l
-</path>
+<text matrix="1 0 0 1 -20 -32" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [0,1,2]</text>
+<text matrix="1 0 0 1 -20 -56" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [1,2,3]</text>
+<text matrix="1 0 0 1 -20 -44" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [0,2,4]</text>
+<text matrix="1 0 0 1 -20 -68" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [2,3,6]</text>
+<text matrix="1 0 0 1 -20 -80" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [2,4,6]</text>
+<text matrix="1 0 0 1 -20 -92" transformations="translations" pos="288 672" stroke="black" type="label" width="148.582" height="7.473" depth="2.49" valign="baseline">insert simplex and subfaces [4,5,6]</text>
</page>
</ipe>
diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png
index 0b6201da..170bae80 100644
--- a/src/Alpha_complex/doc/alpha_complex_doc.png
+++ b/src/Alpha_complex/doc/alpha_complex_doc.png
Binary files differ
diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
index 7836d59a..32bef1cd 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp
@@ -1,4 +1,7 @@
#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from alpha complex
+#include <gudhi/Simplex_tree.h>
+
#include <CGAL/Epick_d.h>
#include <iostream>
@@ -21,7 +24,7 @@ int main(int argc, char **argv) {
// Init of an alpha complex from an OFF file
// ----------------------------------------------------------------------------
typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel;
- Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name, alpha_square_max_value);
+ Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_file(off_file_name);
std::streambuf* streambufffer;
std::ofstream ouput_file_stream;
@@ -33,23 +36,27 @@ int main(int argc, char **argv) {
streambufffer = std::cout.rdbuf();
}
- std::ostream output_stream(streambufffer);
-
- // ----------------------------------------------------------------------------
- // Display information about the alpha complex
- // ----------------------------------------------------------------------------
- output_stream << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() <<
- " - " << alpha_complex_from_file.num_simplices() << " simplices - " <<
- alpha_complex_from_file.num_vertices() << " vertices." << std::endl;
-
- output_stream << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
- for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) {
- output_stream << " ( ";
- for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) {
- output_stream << vertex << " ";
+ Gudhi::Simplex_tree<> simplex;
+ if (alpha_complex_from_file.create_complex(simplex, alpha_square_max_value)) {
+ std::ostream output_stream(streambufffer);
+
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ output_stream << "Alpha complex is of dimension " << simplex.dimension() <<
+ " - " << simplex.num_simplices() << " simplices - " <<
+ simplex.num_vertices() << " vertices." << std::endl;
+
+ output_stream << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" <<
+ std::endl;
+ for (auto f_simplex : simplex.filtration_simplex_range()) {
+ output_stream << " ( ";
+ for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
+ output_stream << vertex << " ";
+ }
+ output_stream << ") -> " << "[" << simplex.filtration(f_simplex) << "] ";
+ output_stream << std::endl;
}
- output_stream << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] ";
- output_stream << std::endl;
}
ouput_file_stream.close();
return 0;
diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
index 49f77276..491b7e6d 100644
--- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp
+++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp
@@ -1,5 +1,8 @@
-#include <CGAL/Epick_d.h>
#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from alpha complex
+#include <gudhi/Simplex_tree.h>
+
+#include <CGAL/Epick_d.h>
#include <iostream>
#include <string>
@@ -40,23 +43,26 @@ int main(int argc, char **argv) {
// ----------------------------------------------------------------------------
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
- Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_points(points, alpha_square_max_value);
+ Gudhi::alpha_complex::Alpha_complex<Kernel> alpha_complex_from_points(points);
- // ----------------------------------------------------------------------------
- // Display information about the alpha complex
- // ----------------------------------------------------------------------------
- std::cout << "Alpha complex is of dimension " << alpha_complex_from_points.dimension() <<
- " - " << alpha_complex_from_points.num_simplices() << " simplices - " <<
- alpha_complex_from_points.num_vertices() << " vertices." << std::endl;
-
- std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
- std::cout << " ( ";
- for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
- std::cout << vertex << " ";
+ Gudhi::Simplex_tree<> simplex;
+ if (alpha_complex_from_points.create_complex(simplex, alpha_square_max_value)) {
+ // ----------------------------------------------------------------------------
+ // Display information about the alpha complex
+ // ----------------------------------------------------------------------------
+ std::cout << "Alpha complex is of dimension " << simplex.dimension() <<
+ " - " << simplex.num_simplices() << " simplices - " <<
+ simplex.num_vertices() << " vertices." << std::endl;
+
+ std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
+ for (auto f_simplex : simplex.filtration_simplex_range()) {
+ std::cout << " ( ";
+ for (auto vertex : simplex.simplex_vertex_range(f_simplex)) {
+ std::cout << vertex << " ";
+ }
+ std::cout << ") -> " << "[" << simplex.filtration(f_simplex) << "] ";
+ std::cout << std::endl;
}
- std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
- std::cout << std::endl;
}
return 0;
}
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index 2c95ceb4..0c0dfc59 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -23,9 +23,6 @@
#ifndef ALPHA_COMPLEX_H_
#define ALPHA_COMPLEX_H_
-// to construct a simplex_tree from Delaunay_triangulation
-#include <gudhi/graph_simplicial_complex.h>
-#include <gudhi/Simplex_tree.h>
#include <gudhi/Debug_utils.h>
// to construct Alpha_complex from a OFF file of points
#include <gudhi/Points_off_io.h>
@@ -57,9 +54,9 @@ namespace alpha_complex {
* \ingroup alpha_complex
*
* \details
- * The data structure can be constructed from a CGAL Delaunay triangulation (for more informations on CGAL Delaunay
- * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) or from
- * an OFF file (cf. Points_off_reader).
+ * The data structure is constructing a CGAL Delaunay triangulation (for more informations on CGAL Delaunay
+ * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) from a
+ * range of points or from an OFF file (cf. Points_off_reader).
*
* Please refer to \ref alpha_complex for examples.
*
@@ -74,7 +71,7 @@ namespace alpha_complex {
*
*/
template<class Kernel = CGAL::Epick_d<CGAL::Dynamic_dimension_tag>>
-class Alpha_complex : public Simplex_tree<> {
+class Alpha_complex {
public:
// Add an int in TDS to save point index in the structure
typedef CGAL::Triangulation_data_structure<typename Kernel::Dimension,
@@ -90,13 +87,6 @@ class Alpha_complex : public Simplex_tree<> {
typedef Kernel Geom_traits;
private:
- // From Simplex_tree
- // Type required to insert into a simplex_tree (with or without subfaces).
- typedef std::vector<Vertex_handle> Vector_vertex;
-
- // Simplex_result is the type returned from simplex_tree insert function.
- typedef typename std::pair<Simplex_handle, bool> Simplex_result;
-
typedef typename Kernel::Compute_squared_radius_d Squared_Radius;
typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel;
typedef typename Kernel::Point_dimension_d Point_Dimension;
@@ -111,7 +101,7 @@ class Alpha_complex : public Simplex_tree<> {
typedef typename Delaunay_triangulation::size_type size_type;
// Map type to switch from simplex tree vertex handle to CGAL vertex iterator.
- typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Vector_vertex_iterator;
+ typedef typename std::map< std::size_t, CGAL_vertex_iterator > Vector_vertex_iterator;
private:
/** \brief Vertex iterator vector to switch from simplex tree vertex handle to CGAL vertex iterator.
@@ -124,16 +114,15 @@ class Alpha_complex : public Simplex_tree<> {
public:
/** \brief Alpha_complex constructor from an OFF file name.
- * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize
+ *
+ * Uses the Points_off_reader to construct the Delaunay triangulation required to initialize
* the Alpha_complex.
*
* Duplicate points are inserted once in the Alpha_complex. This is the reason why the vertices may be not contiguous.
*
* @param[in] off_file_name OFF file [path and] name.
- * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
*/
- Alpha_complex(const std::string& off_file_name,
- Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ Alpha_complex(const std::string& off_file_name)
: triangulation_(nullptr) {
Gudhi::Points_off_reader<Point_d> off_reader(off_file_name);
if (!off_reader.is_valid()) {
@@ -141,7 +130,7 @@ class Alpha_complex : public Simplex_tree<> {
exit(-1); // ----- >>
}
- init_from_range(off_reader.get_point_cloud(), max_alpha_square);
+ init_from_range(off_reader.get_point_cloud());
}
/** \brief Alpha_complex constructor from a list of points.
@@ -149,23 +138,17 @@ class Alpha_complex : public Simplex_tree<> {
* Duplicate points are inserted once in the Alpha_complex. This is the reason why the vertices may be not contiguous.
*
* @param[in] points Range of points to triangulate. Points must be in Kernel::Point_d
- * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
*
* The type InputPointRange must be a range for which std::begin and
* std::end return input iterators on a Kernel::Point_d.
- *
- * @post Compare num_simplices with InputPointRange points number (not the same in case of duplicate points).
*/
template<typename InputPointRange >
- Alpha_complex(const InputPointRange& points,
- Filtration_value max_alpha_square = std::numeric_limits<Filtration_value>::infinity())
+ Alpha_complex(const InputPointRange& points)
: triangulation_(nullptr) {
- init_from_range(points, max_alpha_square);
+ init_from_range(points);
}
- /** \brief Alpha_complex destructor.
- *
- * @warning Deletes the Delaunay triangulation.
+ /** \brief Alpha_complex destructor deletes the Delaunay triangulation.
*/
~Alpha_complex() {
delete triangulation_;
@@ -183,71 +166,85 @@ class Alpha_complex : public Simplex_tree<> {
* @return The point found.
* @exception std::out_of_range In case vertex is not found (cf. std::vector::at).
*/
- Point_d get_point(Vertex_handle vertex) const {
+ const Point_d& get_point(std::size_t vertex) const {
return vertex_handle_to_iterator_.at(vertex)->point();
}
private:
template<typename InputPointRange >
- void init_from_range(const InputPointRange& points, Filtration_value max_alpha_square) {
+ void init_from_range(const InputPointRange& points) {
auto first = std::begin(points);
auto last = std::end(points);
- if (first != last) {
- // point_dimension function initialization
- Point_Dimension point_dimension = kernel_.point_dimension_d_object();
-
- // Delaunay triangulation is point dimension.
- triangulation_ = new Delaunay_triangulation(point_dimension(*first));
-
- std::vector<Point_d> points(first, last);
-
- // Creates a vector {0, 1, ..., N-1}
- std::vector<std::ptrdiff_t> indices(boost::counting_iterator<std::ptrdiff_t>(0),
- boost::counting_iterator<std::ptrdiff_t>(points.size()));
-
- // Sort indices considering CGAL spatial sort
- typedef CGAL::Spatial_sort_traits_adapter_d<Kernel, Point_d*> Search_traits_d;
- spatial_sort(indices.begin(), indices.end(), Search_traits_d(&(points[0])));
-
- typename Delaunay_triangulation::Full_cell_handle hint;
- for (auto index : indices) {
- typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(points[index], hint);
- // Save index value as data to retrieve it after insertion
- pos->data() = index;
- hint = pos->full_cell();
- }
- init(max_alpha_square);
+ // point_dimension function initialization
+ Point_Dimension point_dimension = kernel_.point_dimension_d_object();
+
+ // Delaunay triangulation is point dimension.
+ triangulation_ = new Delaunay_triangulation(point_dimension(*first));
+
+ std::vector<Point_d> point_cloud(first, last);
+
+ // Creates a vector {0, 1, ..., N-1}
+ std::vector<std::ptrdiff_t> indices(boost::counting_iterator<std::ptrdiff_t>(0),
+ boost::counting_iterator<std::ptrdiff_t>(point_cloud.size()));
+
+ typedef boost::iterator_property_map<typename std::vector<Point_d>::iterator,
+ CGAL::Identity_property_map<std::ptrdiff_t>> Point_property_map;
+ typedef CGAL::Spatial_sort_traits_adapter_d<Kernel, Point_property_map> Search_traits_d;
+
+ CGAL::spatial_sort(indices.begin(), indices.end(), Search_traits_d(std::begin(point_cloud)));
+
+ typename Delaunay_triangulation::Full_cell_handle hint;
+ for (auto index : indices) {
+ typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(point_cloud[index], hint);
+ // Save index value as data to retrieve it after insertion
+ pos->data() = index;
+ hint = pos->full_cell();
}
}
- /** \brief Initialize the Alpha_complex from the Delaunay triangulation.
+ public:
+ template <typename SimplicialComplexForAlpha>
+ bool create_complex(SimplicialComplexForAlpha& complex) {
+ typedef typename SimplicialComplexForAlpha::Filtration_value Filtration_value;
+ return create_complex(complex, std::numeric_limits<Filtration_value>::infinity());
+ }
+
+ /** \brief Inserts all Delaunay triangulation into the simplicial complex.
+ * It also computes the filtration values accordingly to the \ref createcomplexalgorithm
*
- * @param[in] max_alpha_square maximum for alpha square value.
+ * \tparam SimplicialComplexForAlpha must meet `SimplicialComplexForAlpha` concept.
+ *
+ * @param[in] complex SimplicialComplexForAlpha to be created.
+ * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$.
+ *
+ * @return true if creation succeeds, false otherwise.
*
- * @warning Delaunay triangulation must be already constructed with at least one vertex and dimension must be more
- * than 0.
+ * @pre Delaunay triangulation must be already constructed with dimension strictly greater than 0.
+ * @pre The simplicial complex must be empty (no vertices)
*
* Initialization can be launched once.
*/
- void init(Filtration_value max_alpha_square) {
+ template <typename SimplicialComplexForAlpha, typename Filtration_value>
+ bool create_complex(SimplicialComplexForAlpha& complex, Filtration_value max_alpha_square) {
+ // From SimplicialComplexForAlpha type required to insert into a simplicial complex (with or without subfaces).
+ typedef typename SimplicialComplexForAlpha::Vertex_handle Vertex_handle;
+ typedef typename SimplicialComplexForAlpha::Simplex_handle Simplex_handle;
+ typedef std::vector<Vertex_handle> Vector_vertex;
+
if (triangulation_ == nullptr) {
- std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation\n";
- return; // ----- >>
- }
- if (triangulation_->number_of_vertices() < 1) {
- std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices\n";
- return; // ----- >>
+ std::cerr << "Alpha_complex cannot create_complex from a NULL triangulation\n";
+ return false; // ----- >>
}
if (triangulation_->maximal_dimension() < 1) {
- std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation\n";
- return; // ----- >>
+ std::cerr << "Alpha_complex cannot create_complex from a zero-dimension triangulation\n";
+ return false; // ----- >>
}
- if (num_vertices() > 0) {
- std::cerr << "Alpha_complex init - Cannot init twice\n";
- return; // ----- >>
+ if (complex.num_vertices() > 0) {
+ std::cerr << "Alpha_complex create_complex - complex is not empty\n";
+ return false; // ----- >>
}
- set_dimension(triangulation_->maximal_dimension());
+ complex.set_dimension(triangulation_->maximal_dimension());
// --------------------------------------------------------------------------------------------
// double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
@@ -264,25 +261,27 @@ class Alpha_complex : public Simplex_tree<> {
// --------------------------------------------------------------------------------------------
// Simplex_tree construction from loop on triangulation finite full cells list
- for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
- Vector_vertex vertexVector;
+ if (triangulation_->number_of_vertices() > 0) {
+ for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
+ Vector_vertex vertexVector;
#ifdef DEBUG_TRACES
- std::cout << "Simplex_tree insertion ";
+ std::cout << "Simplex_tree insertion ";
#endif // DEBUG_TRACES
- for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
- if (*vit != nullptr) {
+ for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) {
+ if (*vit != nullptr) {
#ifdef DEBUG_TRACES
- std::cout << " " << (*vit)->data();
+ std::cout << " " << (*vit)->data();
#endif // DEBUG_TRACES
- // Vector of vertex construction for simplex_tree structure
- vertexVector.push_back((*vit)->data());
+ // Vector of vertex construction for simplex_tree structure
+ vertexVector.push_back((*vit)->data());
+ }
}
- }
#ifdef DEBUG_TRACES
- std::cout << std::endl;
+ std::cout << std::endl;
#endif // DEBUG_TRACES
- // Insert each simplex and its subfaces in the simplex tree - filtration is NaN
- insert_simplex_and_subfaces(vertexVector, std::numeric_limits<double>::quiet_NaN());
+ // Insert each simplex and its subfaces in the simplex tree - filtration is NaN
+ complex.insert_simplex_and_subfaces(vertexVector, std::numeric_limits<double>::quiet_NaN());
+ }
}
// --------------------------------------------------------------------------------------------
@@ -290,16 +289,16 @@ class Alpha_complex : public Simplex_tree<> {
// Will be re-used many times
Vector_of_CGAL_points pointVector;
// ### For i : d -> 0
- for (int decr_dim = dimension(); decr_dim >= 0; decr_dim--) {
+ for (int decr_dim = triangulation_->maximal_dimension(); decr_dim >= 0; decr_dim--) {
// ### Foreach Sigma of dim i
- for (auto f_simplex : skeleton_simplex_range(decr_dim)) {
- int f_simplex_dim = dimension(f_simplex);
+ for (Simplex_handle f_simplex : complex.skeleton_simplex_range(decr_dim)) {
+ int f_simplex_dim = complex.dimension(f_simplex);
if (decr_dim == f_simplex_dim) {
pointVector.clear();
#ifdef DEBUG_TRACES
std::cout << "Sigma of dim " << decr_dim << " is";
#endif // DEBUG_TRACES
- for (auto vertex : simplex_vertex_range(f_simplex)) {
+ for (auto vertex : complex.simplex_vertex_range(f_simplex)) {
pointVector.push_back(get_point(vertex));
#ifdef DEBUG_TRACES
std::cout << " " << vertex;
@@ -309,7 +308,7 @@ class Alpha_complex : public Simplex_tree<> {
std::cout << std::endl;
#endif // DEBUG_TRACES
// ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma)
- if (std::isnan(filtration(f_simplex))) {
+ if (std::isnan(complex.filtration(f_simplex))) {
Filtration_value alpha_complex_filtration = 0.0;
// No need to compute squared_radius on a single point - alpha is 0.0
if (f_simplex_dim > 0) {
@@ -318,12 +317,12 @@ class Alpha_complex : public Simplex_tree<> {
alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end());
}
- assign_filtration(f_simplex, alpha_complex_filtration);
+ complex.assign_filtration(f_simplex, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl;
+ std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << complex.filtration(f_simplex) << std::endl;
#endif // DEBUG_TRACES
}
- propagate_alpha_filtration(f_simplex, decr_dim);
+ propagate_alpha_filtration(complex, f_simplex, decr_dim);
}
}
}
@@ -331,36 +330,41 @@ class Alpha_complex : public Simplex_tree<> {
// --------------------------------------------------------------------------------------------
// As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension
- bool modified_filt = make_filtration_non_decreasing();
+ complex.make_filtration_non_decreasing();
// Remove all simplices that have a filtration value greater than max_alpha_square
- // Remark: prune_above_filtration does not require initialize_filtration to be done before.
- modified_filt |= prune_above_filtration(max_alpha_square);
- if (modified_filt) {
- initialize_filtration();
- }
+ complex.prune_above_filtration(max_alpha_square);
// --------------------------------------------------------------------------------------------
+ return true;
}
- template<typename Simplex_handle>
- void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) {
+ private:
+ template <typename SimplicialComplexForAlpha, typename Simplex_handle>
+ void propagate_alpha_filtration(SimplicialComplexForAlpha& complex, Simplex_handle f_simplex, int decr_dim) {
+ // From SimplicialComplexForAlpha type required to assign filtration values.
+ typedef typename SimplicialComplexForAlpha::Filtration_value Filtration_value;
+#ifdef DEBUG_TRACES
+ typedef typename SimplicialComplexForAlpha::Vertex_handle Vertex_handle;
+#endif // DEBUG_TRACES
+
// ### Foreach Tau face of Sigma
- for (auto f_boundary : boundary_simplex_range(f_simplex)) {
+ for (auto f_boundary : complex.boundary_simplex_range(f_simplex)) {
#ifdef DEBUG_TRACES
std::cout << " | --------------------------------------------------\n";
std::cout << " | Tau ";
- for (auto vertex : simplex_vertex_range(f_boundary)) {
+ for (auto vertex : complex.simplex_vertex_range(f_boundary)) {
std::cout << vertex << " ";
}
std::cout << "is a face of Sigma\n";
- std::cout << " | isnan(filtration(Tau)=" << std::isnan(filtration(f_boundary)) << std::endl;
+ std::cout << " | isnan(complex.filtration(Tau)=" << std::isnan(complex.filtration(f_boundary)) << std::endl;
#endif // DEBUG_TRACES
// ### If filt(Tau) is not NaN
- if (!std::isnan(filtration(f_boundary))) {
+ if (!std::isnan(complex.filtration(f_boundary))) {
// ### filt(Tau) = fmin(filt(Tau), filt(Sigma))
- Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex));
- assign_filtration(f_boundary, alpha_complex_filtration);
+ Filtration_value alpha_complex_filtration = fmin(complex.filtration(f_boundary),
+ complex.filtration(f_simplex));
+ complex.assign_filtration(f_boundary, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl;
+ std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << complex.filtration(f_boundary) << std::endl;
#endif // DEBUG_TRACES
// ### Else
} else {
@@ -372,12 +376,12 @@ class Alpha_complex : public Simplex_tree<> {
#ifdef DEBUG_TRACES
Vertex_handle vertexForGabriel = Vertex_handle();
#endif // DEBUG_TRACES
- for (auto vertex : simplex_vertex_range(f_boundary)) {
+ for (auto vertex : complex.simplex_vertex_range(f_boundary)) {
pointVector.push_back(get_point(vertex));
}
// Retrieve the Sigma point that is not part of Tau - parameter for is_gabriel function
Point_d point_for_gabriel;
- for (auto vertex : simplex_vertex_range(f_simplex)) {
+ for (auto vertex : complex.simplex_vertex_range(f_simplex)) {
point_for_gabriel = get_point(vertex);
if (std::find(pointVector.begin(), pointVector.end(), point_for_gabriel) == pointVector.end()) {
#ifdef DEBUG_TRACES
@@ -398,10 +402,10 @@ class Alpha_complex : public Simplex_tree<> {
// ### If Tau is not Gabriel of Sigma
if (false == is_gab) {
// ### filt(Tau) = filt(Sigma)
- Filtration_value alpha_complex_filtration = filtration(f_simplex);
- assign_filtration(f_boundary, alpha_complex_filtration);
+ Filtration_value alpha_complex_filtration = complex.filtration(f_simplex);
+ complex.assign_filtration(f_boundary, alpha_complex_filtration);
#ifdef DEBUG_TRACES
- std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl;
+ std::cout << " | filt(Tau) = filt(Sigma) = " << complex.filtration(f_boundary) << std::endl;
#endif // DEBUG_TRACES
}
}
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index 4d7bf622..fc53eeeb 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -4,7 +4,7 @@
*
* Author(s): Vincent Rouvreau
*
- * Copyright (C) 2015 INRIA Saclay (France)
+ * Copyright (C) 2015 INRIA
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -33,6 +33,9 @@
#include <vector>
#include <gudhi/Alpha_complex.h>
+// to construct a simplex_tree from Delaunay_triangulation
+#include <gudhi/graph_simplicial_complex.h>
+#include <gudhi/Simplex_tree.h>
// Use dynamic_dimension_tag for the user to be able to set dimension
typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d;
@@ -49,47 +52,35 @@ BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
- Gudhi::alpha_complex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name, max_alpha_square_value);
+ Gudhi::alpha_complex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name);
- const int DIMENSION = 2;
- std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+ Gudhi::Simplex_tree<> simplex_tree_60;
+ BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
- const int NUMBER_OF_VERTICES = 7;
- std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+ std::cout << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree_60.dimension() == 2);
- const int NUMBER_OF_SIMPLICES = 25;
- std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+ std::cout << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
-}
+ std::cout << "simplex_tree_60.num_simplices()=" << simplex_tree_60.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree_60.num_simplices() == 25);
-BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file_filtered) {
- // ----------------------------------------------------------------------------
- //
- // Init of an alpha-complex from a OFF file
- //
- // ----------------------------------------------------------------------------
- std::string off_file_name("alphacomplexdoc.off");
- double max_alpha_square_value = 59.0;
+ max_alpha_square_value = 59.0;
std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
- // Use of the default dynamic kernel
- Gudhi::alpha_complex::Alpha_complex<> alpha_complex_from_file(off_file_name, max_alpha_square_value);
-
- const int DIMENSION = 2;
- std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION);
+ Gudhi::Simplex_tree<> simplex_tree_59;
+ BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_59, max_alpha_square_value));
+
+ std::cout << "simplex_tree_59.dimension()=" << simplex_tree_59.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree_59.dimension() == 2);
- const int NUMBER_OF_VERTICES = 7;
- std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES);
+ std::cout << "simplex_tree_59.num_vertices()=" << simplex_tree_59.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree_59.num_vertices() == 7);
- const int NUMBER_OF_SIMPLICES = 23;
- std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES);
+ std::cout << "simplex_tree_59.num_simplices()=" << simplex_tree_59.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree_59.num_simplices() == 23);
}
bool are_almost_the_same(float a, float b) {
@@ -132,40 +123,43 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
+ Gudhi::Simplex_tree<> simplex_tree;
+ BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+
// Another way to check num_simplices
std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
num_simplices++;
std::cout << " ( ";
- for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
std::cout << vertex << " ";
}
- std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
std::cout << std::endl;
}
BOOST_CHECK(num_simplices == 15);
- std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_simplices() == 15);
+ std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 15);
- std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
- std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+ std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == 4);
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == 4);
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
- switch (alpha_complex_from_points.dimension(f_simplex)) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ switch (simplex_tree.dimension(f_simplex)) {
case 0:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 0.0));
break;
case 1:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 1.0/2.0));
break;
case 2:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 2.0/3.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 2.0/3.0));
break;
case 3:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 3.0/4.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 3.0/4.0));
break;
default:
BOOST_CHECK(false); // Shall not happen
@@ -199,40 +193,40 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
BOOST_CHECK_THROW (alpha_complex_from_points.get_point(1234), std::out_of_range);
// Test after prune_above_filtration
- bool modified = alpha_complex_from_points.prune_above_filtration(0.6);
+ bool modified = simplex_tree.prune_above_filtration(0.6);
if (modified) {
- alpha_complex_from_points.initialize_filtration();
+ simplex_tree.initialize_filtration();
}
BOOST_CHECK(modified);
// Another way to check num_simplices
std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
num_simplices = 0;
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
num_simplices++;
std::cout << " ( ";
- for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) {
+ for (auto vertex : simplex_tree.simplex_vertex_range(f_simplex)) {
std::cout << vertex << " ";
}
- std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] ";
+ std::cout << ") -> " << "[" << simplex_tree.filtration(f_simplex) << "] ";
std::cout << std::endl;
}
BOOST_CHECK(num_simplices == 10);
- std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_simplices() == 10);
+ std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 10);
- std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.dimension() == 4);
- std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl;
- BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4);
+ std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == 4);
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == 4);
- for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) {
- switch (alpha_complex_from_points.dimension(f_simplex)) {
+ for (auto f_simplex : simplex_tree.filtration_simplex_range()) {
+ switch (simplex_tree.dimension(f_simplex)) {
case 0:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 0.0));
break;
case 1:
- BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0));
+ BOOST_CHECK(are_almost_the_same(simplex_tree.filtration(f_simplex), 1.0/2.0));
break;
default:
BOOST_CHECK(false); // Shall not happen
@@ -241,3 +235,32 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
}
+
+BOOST_AUTO_TEST_CASE(Alpha_complex_from_empty_points) {
+ // ----------------------------------------------------------------------------
+ // Init of a list of points
+ // ----------------------------------------------------------------------------
+ Vector_of_points points;
+
+ // ----------------------------------------------------------------------------
+ // Init of an alpha complex from the list of points
+ // ----------------------------------------------------------------------------
+ Gudhi::alpha_complex::Alpha_complex<Kernel_s> alpha_complex_from_points(points);
+
+ std::cout << "========== Alpha_complex_from_empty_points ==========" << std::endl;
+
+ Gudhi::Simplex_tree<> simplex_tree;
+ BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+
+ std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_simplices() == 0);
+
+ std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
+ BOOST_CHECK(simplex_tree.dimension() == 4);
+
+ std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
+ BOOST_CHECK(simplex_tree.num_vertices() == 0);
+
+ // Test to the limit
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
+}