summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorvrouvrea <vrouvrea@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2016-12-01 23:11:43 +0000
committervrouvrea <vrouvrea@636b058d-ea47-450e-bf9e-a15bfbe3eedb>2016-12-01 23:11:43 +0000
commit658e2ad845801f3b2a7a349e499763d7f28a8bc9 (patch)
treea9434fb7cb1b0799a2f23e12d5d3d1a53229fad7
parentfcfc9de5eb7e309c0ac309f57e26672c31bbc836 (diff)
parent58c77ecd1c09c1cf116be44c1a7e9d18cd99970b (diff)
Merge last trunk modifications
add Py test for subsampling Add choose_n_furthest from off file git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/ST_cythonize@1810 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a23746a3dd427edc958b375f9f0ac4b4d2901fcb
-rw-r--r--biblio/how_to_cite_gudhi.bib16
-rw-r--r--src/Alpha_complex/include/gudhi/Alpha_complex.h84
-rw-r--r--src/Alpha_complex/test/Alpha_complex_unit_test.cpp76
-rw-r--r--src/Subsampling/include/gudhi/choose_n_farthest_points.h20
-rw-r--r--src/Subsampling/test/test_choose_n_farthest_points.cpp55
-rw-r--r--src/Tangential_complex/benchmark/CMakeLists.txt2
-rw-r--r--src/cython/CMakeLists.txt2
-rw-r--r--src/cython/cython/alpha_complex.pyx2
-rw-r--r--src/cython/cython/subsampling.pyx40
-rw-r--r--src/cython/include/Subsampling_interface.h22
-rwxr-xr-xsrc/cython/test/test_subsampling.py94
11 files changed, 325 insertions, 88 deletions
diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib
index 7e1eac4f..03c05728 100644
--- a/biblio/how_to_cite_gudhi.bib
+++ b/biblio/how_to_cite_gudhi.bib
@@ -33,7 +33,7 @@
, year = 2015
}
-@incollection{gudhi:Skeleton-Blocker
+@incollection{gudhi:SkeletonBlocker
, author = "David Salinas"
, title = "Skeleton-Blocker"
, publisher = "{GUDHI Editorial Board}"
@@ -42,7 +42,7 @@
, year = 2015
}
-@incollection{gudhi:Alpha complex
+@incollection{gudhi:AlphaComplex
, author = "Vincent Rouvreau"
, title = "Alpha complex"
, publisher = "{GUDHI Editorial Board}"
@@ -51,7 +51,7 @@
, year = 2015
}
-@incollection{gudhi:Cubical complex
+@incollection{gudhi:CubicalComplex
, author = "Pawel Dlotko"
, title = "Cubical complex"
, publisher = "{GUDHI Editorial Board}"
@@ -60,7 +60,7 @@
, year = 2015
}
-@incollection{gudhi:Witness complex
+@incollection{gudhi:WitnessComplex
, author = "Siargey Kachanovich"
, title = "Witness complex"
, publisher = "{GUDHI Editorial Board}"
@@ -69,8 +69,8 @@
, year = 2015
}
-@incollection{gudhi:Subsampling
-, author = "Cl\'ement Jamin"
+@incollection{gudhi:SubSampling
+, author = "Cl\'ement Jamin, Siargey Kachanovich"
, title = "Subsampling"
, publisher = "{GUDHI Editorial Board}"
, booktitle = "{GUDHI} User and Reference Manual"
@@ -78,7 +78,7 @@
, year = 2016
}
-@incollection{gudhi:Spatial searching
+@incollection{gudhi:SpatialSearching
, author = "Cl\'ement Jamin"
, title = "Spatial searching"
, publisher = "{GUDHI Editorial Board}"
@@ -87,7 +87,7 @@
, year = 2016
}
-@incollection{gudhi:Tangential complex
+@incollection{gudhi:TangentialComplex
, author = "Cl\'ement Jamin"
, title = "Tangential complex"
, publisher = "{GUDHI Editorial Board}"
diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h
index f2a222c1..9d5a9bad 100644
--- a/src/Alpha_complex/include/gudhi/Alpha_complex.h
+++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h
@@ -171,35 +171,58 @@ class Alpha_complex {
return vertex_handle_to_iterator_.at(vertex)->point();
}
+ /** \brief number_of_vertices returns the number of vertices (same as the number of points).
+ *
+ * @return The number of vertices.
+ */
+ const std::size_t number_of_vertices() const {
+ return vertex_handle_to_iterator_.size();
+ }
+
private:
template<typename InputPointRange >
void init_from_range(const InputPointRange& points) {
auto first = std::begin(points);
auto last = std::end(points);
- // point_dimension function initialization
- Point_Dimension point_dimension = kernel_.point_dimension_d_object();
-
- // Delaunay triangulation is point dimension.
- triangulation_ = new Delaunay_triangulation(point_dimension(*first));
-
- std::vector<Point_d> point_cloud(first, last);
-
- // Creates a vector {0, 1, ..., N-1}
- std::vector<std::ptrdiff_t> indices(boost::counting_iterator<std::ptrdiff_t>(0),
- boost::counting_iterator<std::ptrdiff_t>(point_cloud.size()));
-
- typedef boost::iterator_property_map<typename std::vector<Point_d>::iterator,
- CGAL::Identity_property_map<std::ptrdiff_t>> Point_property_map;
- typedef CGAL::Spatial_sort_traits_adapter_d<Kernel, Point_property_map> Search_traits_d;
-
- CGAL::spatial_sort(indices.begin(), indices.end(), Search_traits_d(std::begin(point_cloud)));
-
- typename Delaunay_triangulation::Full_cell_handle hint;
- for (auto index : indices) {
- typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(point_cloud[index], hint);
- // Save index value as data to retrieve it after insertion
- pos->data() = index;
- hint = pos->full_cell();
+
+ if (first != last) {
+ // point_dimension function initialization
+ Point_Dimension point_dimension = kernel_.point_dimension_d_object();
+
+ // Delaunay triangulation is point dimension.
+ triangulation_ = new Delaunay_triangulation(point_dimension(*first));
+
+ std::vector<Point_d> point_cloud(first, last);
+
+ // Creates a vector {0, 1, ..., N-1}
+ std::vector<std::ptrdiff_t> indices(boost::counting_iterator<std::ptrdiff_t>(0),
+ boost::counting_iterator<std::ptrdiff_t>(point_cloud.size()));
+
+ typedef boost::iterator_property_map<typename std::vector<Point_d>::iterator,
+ CGAL::Identity_property_map<std::ptrdiff_t>> Point_property_map;
+ typedef CGAL::Spatial_sort_traits_adapter_d<Kernel, Point_property_map> Search_traits_d;
+
+ CGAL::spatial_sort(indices.begin(), indices.end(), Search_traits_d(std::begin(point_cloud)));
+
+ typename Delaunay_triangulation::Full_cell_handle hint;
+ for (auto index : indices) {
+ typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(point_cloud[index], hint);
+ // Save index value as data to retrieve it after insertion
+ pos->data() = index;
+ hint = pos->full_cell();
+ }
+ // --------------------------------------------------------------------------------------------
+ // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
+ // Loop on triangulation vertices list
+ for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
+ if (!triangulation_->is_infinite(*vit)) {
+#ifdef DEBUG_TRACES
+ std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
+#endif // DEBUG_TRACES
+ vertex_handle_to_iterator_.emplace(vit->data(), vit);
+ }
+ }
+ // --------------------------------------------------------------------------------------------
}
}
@@ -248,19 +271,6 @@ class Alpha_complex {
complex.set_dimension(triangulation_->maximal_dimension());
// --------------------------------------------------------------------------------------------
- // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa
- // Loop on triangulation vertices list
- for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) {
- if (!triangulation_->is_infinite(*vit)) {
-#ifdef DEBUG_TRACES
- std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl;
-#endif // DEBUG_TRACES
- vertex_handle_to_iterator_.emplace(vit->data(), vit);
- }
- }
- // --------------------------------------------------------------------------------------------
-
- // --------------------------------------------------------------------------------------------
// Simplex_tree construction from loop on triangulation finite full cells list
if (triangulation_->number_of_vertices() > 0) {
for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) {
diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
index fc53eeeb..7380547f 100644
--- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
+++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp
@@ -36,12 +36,17 @@
// to construct a simplex_tree from Delaunay_triangulation
#include <gudhi/graph_simplicial_complex.h>
#include <gudhi/Simplex_tree.h>
+#include <boost/mpl/list.hpp>
// Use dynamic_dimension_tag for the user to be able to set dimension
typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d;
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel_s;
// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter
-BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
+typedef boost::mpl::list<Kernel_d, Kernel_s> list_of_kernel_variants;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_OFF_file, TestedKernel, list_of_kernel_variants) {
// ----------------------------------------------------------------------------
//
// Init of an alpha-complex from a OFF file
@@ -52,7 +57,11 @@ BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
std::cout << "========== OFF FILE NAME = " << off_file_name << " - alpha²=" <<
max_alpha_square_value << "==========" << std::endl;
- Gudhi::alpha_complex::Alpha_complex<Kernel_d> alpha_complex_from_file(off_file_name);
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_file(off_file_name);
+
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_file.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.number_of_vertices() == 7);
Gudhi::Simplex_tree<> simplex_tree_60;
BOOST_CHECK(alpha_complex_from_file.create_complex(simplex_tree_60, max_alpha_square_value));
@@ -60,6 +69,10 @@ BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file) {
std::cout << "simplex_tree_60.dimension()=" << simplex_tree_60.dimension() << std::endl;
BOOST_CHECK(simplex_tree_60.dimension() == 2);
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_file.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_file.number_of_vertices() == 7);
+
std::cout << "simplex_tree_60.num_vertices()=" << simplex_tree_60.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree_60.num_vertices() == 7);
@@ -87,13 +100,12 @@ bool are_almost_the_same(float a, float b) {
return std::fabs(a - b) < std::numeric_limits<float>::epsilon();
}
-// Use dynamic_dimension_tag for the user to be able to set dimension
-typedef CGAL::Epick_d< CGAL::Dimension_tag<4> > Kernel_s;
-typedef Kernel_s::Point_d Point;
-typedef std::vector<Point> Vector_of_points;
-
+// Use static dimension_tag for the user not to be able to set dimension
+typedef CGAL::Epick_d< CGAL::Dimension_tag<4> > Kernel_4;
+typedef Kernel_4::Point_d Point_4;
+typedef std::vector<Point_4> Vector_4_Points;
-bool is_point_in_list(Vector_of_points points_list, Point point) {
+bool is_point_in_list(Vector_4_Points points_list, Point_4 point) {
for (auto& point_in_list : points_list) {
if (point_in_list == point) {
return true; // point found
@@ -106,26 +118,30 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
// ----------------------------------------------------------------------------
// Init of a list of points
// ----------------------------------------------------------------------------
- Vector_of_points points;
+ Vector_4_Points points;
std::vector<double> coords = { 0.0, 0.0, 0.0, 1.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
coords = { 0.0, 0.0, 1.0, 0.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
coords = { 0.0, 1.0, 0.0, 0.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
coords = { 1.0, 0.0, 0.0, 0.0 };
- points.push_back(Point(coords.begin(), coords.end()));
+ points.push_back(Point_4(coords.begin(), coords.end()));
// ----------------------------------------------------------------------------
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
- Gudhi::alpha_complex::Alpha_complex<Kernel_s> alpha_complex_from_points(points);
+ Gudhi::alpha_complex::Alpha_complex<Kernel_4> alpha_complex_from_points(points);
std::cout << "========== Alpha_complex_from_points ==========" << std::endl;
Gudhi::Simplex_tree<> simplex_tree;
BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_points.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.number_of_vertices() == points.size());
+
// Another way to check num_simplices
std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl;
int num_simplices = 0;
@@ -167,22 +183,22 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
}
- Point p0 = alpha_complex_from_points.get_point(0);
+ Point_4 p0 = alpha_complex_from_points.get_point(0);
std::cout << "alpha_complex_from_points.get_point(0)=" << p0 << std::endl;
BOOST_CHECK(4 == p0.dimension());
BOOST_CHECK(is_point_in_list(points, p0));
- Point p1 = alpha_complex_from_points.get_point(1);
+ Point_4 p1 = alpha_complex_from_points.get_point(1);
std::cout << "alpha_complex_from_points.get_point(1)=" << p1 << std::endl;
BOOST_CHECK(4 == p1.dimension());
BOOST_CHECK(is_point_in_list(points, p1));
- Point p2 = alpha_complex_from_points.get_point(2);
+ Point_4 p2 = alpha_complex_from_points.get_point(2);
std::cout << "alpha_complex_from_points.get_point(2)=" << p2 << std::endl;
BOOST_CHECK(4 == p2.dimension());
BOOST_CHECK(is_point_in_list(points, p2));
- Point p3 = alpha_complex_from_points.get_point(3);
+ Point_4 p3 = alpha_complex_from_points.get_point(3);
std::cout << "alpha_complex_from_points.get_point(3)=" << p3 << std::endl;
BOOST_CHECK(4 == p3.dimension());
BOOST_CHECK(is_point_in_list(points, p3));
@@ -236,31 +252,35 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) {
}
-BOOST_AUTO_TEST_CASE(Alpha_complex_from_empty_points) {
+BOOST_AUTO_TEST_CASE_TEMPLATE(Alpha_complex_from_empty_points, TestedKernel, list_of_kernel_variants) {
+ std::cout << "========== Alpha_complex_from_empty_points ==========" << std::endl;
+
// ----------------------------------------------------------------------------
- // Init of a list of points
+ // Init of an empty list of points
// ----------------------------------------------------------------------------
- Vector_of_points points;
+ std::vector<typename TestedKernel::Point_d> points;
// ----------------------------------------------------------------------------
// Init of an alpha complex from the list of points
// ----------------------------------------------------------------------------
- Gudhi::alpha_complex::Alpha_complex<Kernel_s> alpha_complex_from_points(points);
+ Gudhi::alpha_complex::Alpha_complex<TestedKernel> alpha_complex_from_points(points);
- std::cout << "========== Alpha_complex_from_empty_points ==========" << std::endl;
+ // Test to the limit
+ BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
Gudhi::Simplex_tree<> simplex_tree;
- BOOST_CHECK(alpha_complex_from_points.create_complex(simplex_tree));
+ BOOST_CHECK(!alpha_complex_from_points.create_complex(simplex_tree));
+ std::cout << "alpha_complex_from_points.number_of_vertices()=" << alpha_complex_from_points.number_of_vertices()
+ << std::endl;
+ BOOST_CHECK(alpha_complex_from_points.number_of_vertices() == points.size());
+
std::cout << "simplex_tree.num_simplices()=" << simplex_tree.num_simplices() << std::endl;
BOOST_CHECK(simplex_tree.num_simplices() == 0);
std::cout << "simplex_tree.dimension()=" << simplex_tree.dimension() << std::endl;
- BOOST_CHECK(simplex_tree.dimension() == 4);
+ BOOST_CHECK(simplex_tree.dimension() == -1);
std::cout << "simplex_tree.num_vertices()=" << simplex_tree.num_vertices() << std::endl;
BOOST_CHECK(simplex_tree.num_vertices() == 0);
-
- // Test to the limit
- BOOST_CHECK_THROW (alpha_complex_from_points.get_point(0), std::out_of_range);
}
diff --git a/src/Subsampling/include/gudhi/choose_n_farthest_points.h b/src/Subsampling/include/gudhi/choose_n_farthest_points.h
index 40c7808d..9b45c640 100644
--- a/src/Subsampling/include/gudhi/choose_n_farthest_points.h
+++ b/src/Subsampling/include/gudhi/choose_n_farthest_points.h
@@ -60,10 +60,15 @@ void choose_n_farthest_points(Kernel const &k,
std::size_t final_size,
std::size_t starting_point,
OutputIterator output_it) {
- typename Kernel::Squared_distance_d sqdist = k.squared_distance_d_object();
-
std::size_t nb_points = boost::size(input_pts);
- assert(nb_points >= final_size);
+ if (final_size > nb_points)
+ final_size = nb_points;
+
+ // Tests to the limit
+ if (final_size < 1)
+ return;
+
+ typename Kernel::Squared_distance_d sqdist = k.squared_distance_d_object();
std::size_t current_number_of_landmarks = 0; // counter for landmarks
const double infty = std::numeric_limits<double>::infinity(); // infinity (see next entry)
@@ -107,11 +112,16 @@ void choose_n_farthest_points(Kernel const& k,
Point_container const &input_pts,
unsigned final_size,
OutputIterator output_it) {
+ // Tests to the limit
+ if ((final_size < 1) || (input_pts.size() == 0))
+ return;
+
// Choose randomly the first landmark
std::random_device rd;
std::mt19937 gen(rd());
- std::uniform_int_distribution<> dis(1, 6);
- int starting_point = dis(gen);
+ std::uniform_int_distribution<> dis(0, (input_pts.size() - 1));
+ std::size_t starting_point = dis(gen);
+
choose_n_farthest_points(k, input_pts, final_size, starting_point, output_it);
}
diff --git a/src/Subsampling/test/test_choose_n_farthest_points.cpp b/src/Subsampling/test/test_choose_n_farthest_points.cpp
index d064899a..0bc0dff4 100644
--- a/src/Subsampling/test/test_choose_n_farthest_points.cpp
+++ b/src/Subsampling/test/test_choose_n_farthest_points.cpp
@@ -39,18 +39,65 @@ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> K;
typedef typename K::FT FT;
typedef typename K::Point_d Point_d;
-BOOST_AUTO_TEST_CASE(test_choose_farthest_point) {
+typedef boost::mpl::list<CGAL::Epick_d<CGAL::Dynamic_dimension_tag>, CGAL::Epick_d<CGAL::Dimension_tag<4>>> list_of_tested_kernels;
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(test_choose_farthest_point, Kernel, list_of_tested_kernels) {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point_d;
std::vector< Point_d > points, landmarks;
// Add grid points (625 points)
for (FT i = 0; i < 5; i += 1.0)
for (FT j = 0; j < 5; j += 1.0)
for (FT k = 0; k < 5; k += 1.0)
- for (FT l = 0; l < 5; l += 1.0)
- points.push_back(Point_d(std::vector<FT>({i, j, k, l})));
+ for (FT l = 0; l < 5; l += 1.0) {
+ std::vector<FT> point({i, j, k, l});
+ points.push_back(Point_d(point.begin(), point.end()));
+ }
landmarks.clear();
- K k;
+ Kernel k;
Gudhi::subsampling::choose_n_farthest_points(k, points, 100, std::back_inserter(landmarks));
BOOST_CHECK(landmarks.size() == 100);
+ for (auto landmark : landmarks)
+ {
+ // Check all landmarks are in points
+ BOOST_CHECK(std::find (points.begin(), points.end(), landmark) != points.end());
+ }
+}
+
+BOOST_AUTO_TEST_CASE_TEMPLATE(test_choose_farthest_point_limits, Kernel, list_of_tested_kernels) {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point_d;
+ std::vector< Point_d > points, landmarks;
+ landmarks.clear();
+ Kernel k;
+ // Choose -1 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, -1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+ // Choose 0 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 0, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+ // Choose 1 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+
+ std::vector<FT> point({0.0, 0.0, 0.0, 0.0});
+ points.push_back(Point_d(point.begin(), point.end()));
+ // Choose -1 farthest points in an empty point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, -1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 1);
+ landmarks.clear();
+ // Choose 0 farthest points in a one point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 0, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 0);
+ landmarks.clear();
+ // Choose 1 farthest points in a one point cloud
+ Gudhi::subsampling::choose_n_farthest_points(k, points, 1, std::back_inserter(landmarks));
+ BOOST_CHECK(landmarks.size() == 1);
+ landmarks.clear();
+
}
diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt
index a217d6e6..56dd8128 100644
--- a/src/Tangential_complex/benchmark/CMakeLists.txt
+++ b/src/Tangential_complex/benchmark/CMakeLists.txt
@@ -16,7 +16,7 @@ if(CGAL_FOUND)
if (EIGEN3_FOUND)
add_executable(Tangential_complex_benchmark benchmark_tc.cpp)
target_link_libraries(Tangential_complex_benchmark
- ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
+ ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY})
if (TBB_FOUND)
target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES})
endif(TBB_FOUND)
diff --git a/src/cython/CMakeLists.txt b/src/cython/CMakeLists.txt
index c2026682..998908e7 100644
--- a/src/cython/CMakeLists.txt
+++ b/src/cython/CMakeLists.txt
@@ -47,7 +47,7 @@ if(PYTHON_PATH AND CYTHON_PATH)
file(COPY include DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY cython DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY test DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
-
+
if (CGAL_FOUND)
if (NOT CGAL_VERSION VERSION_LESS 4.8.1)
# If CGAL_VERSION >= 4.8.1, include subsampling
diff --git a/src/cython/cython/alpha_complex.pyx b/src/cython/cython/alpha_complex.pyx
index 56cf925c..6b27594a 100644
--- a/src/cython/cython/alpha_complex.pyx
+++ b/src/cython/cython/alpha_complex.pyx
@@ -62,7 +62,7 @@ cdef class AlphaComplex:
cdef Alpha_complex_interface * thisptr
# Fake constructor that does nothing but documenting the constructor
- def __init__(self, points=None, off_file=''):
+ def __init__(self, points=[], off_file=''):
"""AlphaComplex constructor.
:param points: A list of points in d-Dimension.
diff --git a/src/cython/cython/subsampling.pyx b/src/cython/cython/subsampling.pyx
index e59e0c6a..5ca38099 100644
--- a/src/cython/cython/subsampling.pyx
+++ b/src/cython/cython/subsampling.pyx
@@ -32,6 +32,42 @@ __license__ = "GPL v3"
cdef extern from "Subsampling_interface.h" namespace "Gudhi::subsampling":
vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points)
+ vector[vector[double]] subsampling_n_farthest_points(vector[vector[double]] points, unsigned nb_points, unsigned starting_point)
+ vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points)
+ vector[vector[double]] subsampling_n_farthest_points_from_file(string off_file, unsigned nb_points, unsigned starting_point)
-def choose_n_farthest_points(points, nb_points):
- subsampling_n_farthest_points(points, nb_points)
+def choose_n_farthest_points(points=[], off_file='', nb_points=0, starting_point = ''):
+ """Subsample by a greedy strategy of iteratively adding the farthest point
+ from the current chosen point set to the subsampling.
+ The iteration starts with the landmark `starting point`.
+
+ :param points: The input point set.
+ :type points: vector[vector[double]].
+
+ Or
+
+ :param off_file: An OFF file style name.
+ :type off_file: string
+
+ :param nb_points: Number of points of the subsample.
+ :type nb_points: unsigned.
+ :param starting_point: The iteration starts with the landmark `starting \
+ point`,which is the index of the poit to start with. If not set, this \
+ index is choosen randomly.
+ :type starting_point: unsigned.
+ :returns: The subsamplepoint set.
+ :rtype: vector[vector[double]]
+ """
+ if off_file is not '':
+ if os.path.isfile(off_file):
+ if starting_point is '':
+ return subsampling_n_farthest_points_from_file(off_file, nb_points)
+ else:
+ return subsampling_n_farthest_points_from_file(off_file, nb_points, starting_point)
+ else:
+ print("file " + off_file + " not found.")
+ else:
+ if starting_point is '':
+ return subsampling_n_farthest_points(points, nb_points)
+ else:
+ return subsampling_n_farthest_points(points, nb_points, starting_point)
diff --git a/src/cython/include/Subsampling_interface.h b/src/cython/include/Subsampling_interface.h
index bd37a015..12c48012 100644
--- a/src/cython/include/Subsampling_interface.h
+++ b/src/cython/include/Subsampling_interface.h
@@ -45,12 +45,32 @@ std::vector<std::vector<double>> subsampling_n_farthest_points(std::vector<std::
std::vector<std::vector<double>> landmarks;
Subsampling_dynamic_kernel k;
choose_n_farthest_points(k, points, nb_points, std::back_inserter(landmarks));
- std::cout << "output " << landmarks.size() << std::endl;
+ return landmarks;
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points(std::vector<std::vector<double>>& points, unsigned nb_points, unsigned starting_point) {
+ std::vector<Subsampling_point_d> input, output;
+ for (auto point : points)
+ input.push_back(Subsampling_point_d(point.size(), point.begin(), point.end()));
+ std::vector<std::vector<double>> landmarks;
+ Subsampling_dynamic_kernel k;
+ choose_n_farthest_points(k, points, nb_points, starting_point, std::back_inserter(landmarks));
return landmarks;
}
+std::vector<std::vector<double>> subsampling_n_farthest_points_from_file(std::string& off_file, unsigned nb_points) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_farthest_points(points, nb_points);
+}
+
+std::vector<std::vector<double>> subsampling_n_farthest_points_from_file(std::string& off_file, unsigned nb_points, unsigned starting_point) {
+ Gudhi::Points_off_reader<std::vector<double>> off_reader(off_file);
+ std::vector<std::vector<double>> points = off_reader.get_point_cloud();
+ return subsampling_n_farthest_points(points, nb_points, starting_point);
+}
} // namespace subsampling
} // namespace Gudhi
diff --git a/src/cython/test/test_subsampling.py b/src/cython/test/test_subsampling.py
new file mode 100755
index 00000000..e5f2d70a
--- /dev/null
+++ b/src/cython/test/test_subsampling.py
@@ -0,0 +1,94 @@
+import gudhi
+import os
+
+"""This file is part of the Gudhi Library. The Gudhi library
+ (Geometric Understanding in Higher Dimensions) is a generic C++
+ library for computational topology.
+
+ Author(s): Vincent Rouvreau
+
+ Copyright (C) 2016 INRIA
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+
+__author__ = "Vincent Rouvreau"
+__copyright__ = "Copyright (C) 2016 INRIA"
+__license__ = "GPL v3"
+
+
+def test_write_off_file_for_tests():
+ file = open("n_farthest.off", "w")
+ file.write("nOFF\n")
+ file.write("2 7 0 0\n")
+ file.write("1.0 1.0\n")
+ file.write("7.0 0.0\n")
+ file.write("4.0 6.0\n")
+ file.write("9.0 6.0\n")
+ file.write("0.0 14.0\n")
+ file.write("2.0 19.0\n")
+ file.write("9.0 17.0\n")
+ file.close()
+
+def test_simple_choose_n_farthest_points_with_a_starting_point():
+ point_set = [[0,1], [0,0], [1,0], [1,1]]
+ i = 0
+ for point in point_set:
+ # The iteration starts with the given starting point
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 1, starting_point = i)
+ assert sub_set[0] == point_set[i]
+ i = i + 1
+
+ # The iteration finds then the farthest
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 1)
+ assert sub_set[1] == point_set[3]
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 3)
+ assert sub_set[1] == point_set[1]
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 0)
+ assert sub_set[1] == point_set[2]
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = 2, starting_point = 2)
+ assert sub_set[1] == point_set[0]
+
+ # Test the limits
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 0, starting_point = 0) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 1, starting_point = 0) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 0, starting_point = 1) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 1, starting_point = 1) == []
+
+ print(os.getcwd())
+ # From off file test
+ for i in range (0, 7):
+ assert len(gudhi.choose_n_farthest_points(off_file = 'n_farthest.off', nb_points = i, starting_point = i)) == i
+
+def test_simple_choose_n_farthest_points_randomed():
+ point_set = [[0,1], [0,0], [1,0], [1,1]]
+
+ # Test the limits
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 0) == []
+ assert gudhi.choose_n_farthest_points(points = [], nb_points = 1) == []
+ assert gudhi.choose_n_farthest_points(points = point_set, nb_points = 0) == []
+ # Go furter than point set on purpose
+ for iter in range(1,10):
+ sub_set = gudhi.choose_n_farthest_points(points = point_set, nb_points = iter)
+ for sub in sub_set:
+ found = False
+ for point in point_set:
+ if point == sub:
+ found = True
+ assert found == True
+
+ print(os.getcwd())
+ # From off file test
+ for i in range (0, 7):
+ assert len(gudhi.choose_n_farthest_points(off_file = 'n_farthest.off', nb_points = i)) == i