From 24ed9e6896b0aafcaeaf19e5d2970915282fa7b7 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 12 Jun 2015 10:47:38 +0000 Subject: delaunay off reader/writer fix. alpha complex algo seems ok. tests are Nok. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@611 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 37cea3b33d783a77a23fa3a54a1bee106508d981 --- src/common/include/gudhi/Off_reader.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/common') diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index e29218d8..618d1b4d 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -7,7 +7,7 @@ * * Author(s): David Salinas * - * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France) + * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France) * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by @@ -121,7 +121,7 @@ private: if(!goto_next_uncomment_line(line)) return false; std::istringstream iss(line); - if(is_off_file){ + if((is_off_file) && (!is_noff_file)) { off_info_.dim = 3; if(!(iss >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)){ std::cerr << "incorrect number of vertices/faces/edges\n"; -- cgit v1.2.3 From 56e89b6b7666dec86a70f6a30f08ef8b7960eb21 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 18 Jun 2015 14:21:31 +0000 Subject: Moved alphashapedoc.off in data/points Moved Delaunay triangulation OFF files read and write in src/common Delaunay triangulation OFF files read and write documentation, examples and tests git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@623 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: e03902736a79436e97dbf77a88504f3faa8bd9c6 --- CMakeLists.txt | 9 +- data/points/alphashapedoc.off | 10 + src/Alpha_complex/example/CMakeLists.txt | 9 +- .../example/Delaunay_triangulation_off_rw.cpp | 79 ------ .../Simplex_tree_from_delaunay_triangulation.cpp | 27 +- src/Alpha_complex/example/alphashapedoc.off | 10 - src/Alpha_complex/include/gudhi/Alpha_complex.h | 70 +++-- .../Alpha_shapes/Delaunay_triangulation_off_io.h | 210 -------------- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 71 ++--- src/Alpha_complex/test/CMakeLists.txt | 2 +- src/Alpha_complex/test/README | 2 +- src/Alpha_complex/test/S4_100.off | 4 +- src/Alpha_complex/test/S8_10.off | 4 +- src/CMakeLists.txt | 1 + src/Doxyfile | 3 +- src/common/example/CMakeLists.txt | 26 ++ .../example/Delaunay_triangulation_off_rw.cpp | 55 ++++ .../include/gudhi/Delaunay_triangulation_off_io.h | 308 +++++++++++++++++++++ src/common/include/gudhi/Off_reader.h | 291 ++++++++++--------- src/common/test/CMakeLists.txt | 44 +++ src/common/test/README | 14 + src/common/test/dtoffrw_alphashapedoc_result.off | 15 + src/common/test/dtoffrw_alphashapedoc_result.txt | 3 + src/common/test/dtoffrw_unit_test.cpp | 91 ++++++ 24 files changed, 790 insertions(+), 568 deletions(-) create mode 100755 data/points/alphashapedoc.off delete mode 100644 src/Alpha_complex/example/Delaunay_triangulation_off_rw.cpp delete mode 100755 src/Alpha_complex/example/alphashapedoc.off delete mode 100644 src/Alpha_complex/include/gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h create mode 100644 src/common/example/CMakeLists.txt create mode 100644 src/common/example/Delaunay_triangulation_off_rw.cpp create mode 100644 src/common/include/gudhi/Delaunay_triangulation_off_io.h create mode 100644 src/common/test/CMakeLists.txt create mode 100644 src/common/test/README create mode 100644 src/common/test/dtoffrw_alphashapedoc_result.off create mode 100644 src/common/test/dtoffrw_alphashapedoc_result.txt create mode 100644 src/common/test/dtoffrw_unit_test.cpp (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index 01108db9..86b4e2b6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -42,6 +42,11 @@ if (PYTHON_PATH) message("python found in ${PYTHON_PATH}") endif() +FIND_PROGRAM( DIFF_PATH diff ) +if (DIFF_PATH) + message("diff found in ${DIFF_PATH}") +endif() + # Function to add_test cpplint on each header file of the Gudhi module function(cpplint_add_tests the_directory) if (PYTHON_PATH) @@ -77,9 +82,11 @@ else() include_directories(src/Simplex_tree/include/) include_directories(src/Skeleton_blocker/include/) + add_subdirectory(src/common/example) + add_subdirectory(src/common/test) add_subdirectory(src/Simplex_tree/test) add_subdirectory(src/Simplex_tree/example) - add_subdirectory(src/Persistent_cohomology/test) + #add_subdirectory(src/Persistent_cohomology/test) add_subdirectory(src/Persistent_cohomology/example) add_subdirectory(src/Skeleton_blocker/test) add_subdirectory(src/Skeleton_blocker/example) diff --git a/data/points/alphashapedoc.off b/data/points/alphashapedoc.off new file mode 100755 index 00000000..bb790193 --- /dev/null +++ b/data/points/alphashapedoc.off @@ -0,0 +1,10 @@ +nOFF +2 7 0 0 +1.0 1.0 +7.0 0.0 +4.0 6.0 +9.0 6.0 +0.0 14.0 +2.0 19.0 +9.0 17.0 + diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 258def49..9129fdcf 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -14,17 +14,10 @@ if(CGAL_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - add_definitions(-DDEBUG_TRACES) - - add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp ) - target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_test(dtoffrw_tore3D ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.off 3) - + #add_definitions(-DDEBUG_TRACES) add_executable ( stfromdt Simplex_tree_from_delaunay_triangulation.cpp ) target_link_libraries(stfromdt ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_executable ( template_off template_off.cpp ) - target_link_libraries(template_off ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") endif() diff --git a/src/Alpha_complex/example/Delaunay_triangulation_off_rw.cpp b/src/Alpha_complex/example/Delaunay_triangulation_off_rw.cpp deleted file mode 100644 index 405b3cb9..00000000 --- a/src/Alpha_complex/example/Delaunay_triangulation_off_rw.cpp +++ /dev/null @@ -1,79 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2014 INRIA Saclay (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -// to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h" - -#include -#include -#include -#include -#include - -#include -#include -#include - -#include -#include -#include - -// Use dynamic_dimension_tag for the user to be able to set dimension -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; -typedef CGAL::Delaunay_triangulation T; -// The triangulation uses the default instanciation of the -// TriangulationDataStructure template parameter - -void usage(char * const progName) { - std::cerr << "Usage: " << progName << " inputFile.off dimension outputFile.off" << std::endl; - exit(-1); // ----- >> -} - -int main(int argc, char **argv) { - if (argc != 4) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; - usage(argv[0]); - } - - int dimension = 0; - int returnedScanValue = sscanf(argv[2], "%d", &dimension); - if ((returnedScanValue == EOF) || (dimension <= 0)) { - std::cerr << "Error: " << argv[2] << " is not correct" << std::endl; - usage(argv[0]); - } - - T dt(dimension); - std::string offFileName(argv[1]); - Gudhi::alphacomplex::Delaunay_triangulation_off_reader off_reader(offFileName, dt); - if (!off_reader.is_valid()) { - std::cerr << "Unable to read file " << offFileName << std::endl; - exit(-1); // ----- >> - } - - std::cout << "number_of_finite_full_cells= " << dt.number_of_finite_full_cells() << std::endl; - - std::string outFileName(argv[3]); - std::string offOutputFile(outFileName); - Gudhi::alphacomplex::Delaunay_triangulation_off_writer off_writer(offOutputFile, dt); - - return 0; -} \ No newline at end of file diff --git a/src/Alpha_complex/example/Simplex_tree_from_delaunay_triangulation.cpp b/src/Alpha_complex/example/Simplex_tree_from_delaunay_triangulation.cpp index f09e6121..1523372a 100644 --- a/src/Alpha_complex/example/Simplex_tree_from_delaunay_triangulation.cpp +++ b/src/Alpha_complex/example/Simplex_tree_from_delaunay_triangulation.cpp @@ -21,19 +21,13 @@ */ // to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h" +#include "gudhi/Delaunay_triangulation_off_io.h" #include "gudhi/Alpha_complex.h" // to construct a simplex_tree from Delaunay_triangulation #include "gudhi/graph_simplicial_complex.h" #include "gudhi/Simplex_tree.h" -#include -#include -#include -#include -#include - #include #include @@ -41,12 +35,6 @@ #include #include -// Use dynamic_dimension_tag for the user to be able to set dimension -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; -typedef CGAL::Delaunay_triangulation T; -// The triangulation uses the default instanciation of the -// TriangulationDataStructure template parameter - void usage(char * const progName) { std::cerr << "Usage: " << progName << " filename.off" << std::endl; exit(-1); // ----- >> @@ -62,7 +50,7 @@ int main(int argc, char **argv) { // ---------------------------------------------------------------------------- // - // Init of an alpha-complex from a OFF file + // Init of an alpha-complex from an OFF file // // ---------------------------------------------------------------------------- Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name); @@ -71,7 +59,14 @@ int main(int argc, char **argv) { std::cout << "alpha_complex_from_file.filtration()=" << alpha_complex_from_file.filtration() << std::endl; std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl; std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl; - std::cout << alpha_complex_from_file << std::endl; - + + std::cout << "Iterator on Simplices in the filtration order, with [filtration value]:" << std::endl; + for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) { + std::cout << " " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] "; + for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << std::endl; + } return 0; } \ No newline at end of file diff --git a/src/Alpha_complex/example/alphashapedoc.off b/src/Alpha_complex/example/alphashapedoc.off deleted file mode 100755 index bb790193..00000000 --- a/src/Alpha_complex/example/alphashapedoc.off +++ /dev/null @@ -1,10 +0,0 @@ -nOFF -2 7 0 0 -1.0 1.0 -7.0 0.0 -4.0 6.0 -9.0 6.0 -0.0 14.0 -2.0 19.0 -9.0 17.0 - diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index ca84d6d9..d25c05cb 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -23,9 +23,6 @@ #ifndef SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_H_ #define SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_H_ -// to construct a Delaunay_triangulation from a OFF file -#include - // to construct a simplex_tree from Delaunay_triangulation #include #include @@ -46,8 +43,7 @@ #include #include #include -#include -#include +#include // NaN namespace Gudhi { @@ -82,59 +78,59 @@ class Alpha_complex { private: // From Simplex_tree /** \brief Type required to insert into a simplex_tree (with or without subfaces).*/ - typedef std::vector typeVectorVertex; + typedef std::vector Vector_vertex; + /** \brief Simplex_handle type from simplex_tree.*/ typedef typename Gudhi::Simplex_tree<>::Simplex_handle Simplex_handle; + /** \brief Simplex_result is the type returned from simplex_tree insert function.*/ typedef typename std::pair Simplex_result; + /** \brief Filtration_simplex_range type from simplex_tree.*/ + typedef typename Gudhi::Simplex_tree<>::Filtration_simplex_range Filtration_simplex_range; + + /** \brief Simplex_vertex_range type from simplex_tree.*/ + typedef typename Gudhi::Simplex_tree<>::Simplex_vertex_range Simplex_vertex_range; + // From CGAL - /** \brief Kernel for the Delaunay_triangulation-> - * Dimension can be set dynamically. - */ + /** \brief Kernel for the Delaunay_triangulation. Dimension can be set dynamically.*/ typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; - /** \brief Delaunay_triangulation type required to create an alpha-complex. - */ + /** \brief Delaunay_triangulation type required to create an alpha-complex.*/ typedef CGAL::Delaunay_triangulation Delaunay_triangulation; typedef typename Kernel::Compute_squared_radius_d Squared_Radius; typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel; - /** \brief Type required to insert into a simplex_tree (with or without subfaces).*/ + /** \brief Type required to compute squared radius, or side of bounded sphere on a vector of points.*/ typedef std::vector Vector_of_CGAL_points; + /** \brief Vertex_iterator type from CGAL.*/ typedef Delaunay_triangulation::Vertex_iterator CGAL_vertex_iterator; + /** \brief Boost bimap type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.*/ typedef boost::bimap< CGAL_vertex_iterator, Vertex_handle > Bimap_vertex; private: - /** \brief Upper bound on the simplex tree of the simplicial complex.*/ + /** \brief Alpha complex is represented internally by a simplex tree.*/ Gudhi::Simplex_tree<> st_; + /** \brief Boost bimap to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.*/ Bimap_vertex cgal_simplextree; + /** \brief Pointer on the CGAL Delaunay triangulation.*/ Delaunay_triangulation* triangulation; public: - Alpha_complex(std::string& off_file_name) : triangulation(nullptr) { -#ifdef DEBUG_TRACES - char buffer[256]={0}; - sprintf(buffer,"%p", triangulation); - std::cout << "pointer=" << buffer << std::endl; -#endif // DEBUG_TRACES - Gudhi::alphacomplex::Delaunay_triangulation_off_reader off_reader(off_file_name); + Gudhi::Delaunay_triangulation_off_reader off_reader(off_file_name); if (!off_reader.is_valid()) { std::cerr << "Unable to read file " << off_file_name << std::endl; exit(-1); // ----- >> } triangulation = off_reader.get_complex(); -#ifdef DEBUG_TRACES - //char buffer[256]={0}; - sprintf(buffer,"%p", triangulation); - std::cout << "pointer=" << buffer << std::endl; - std::cout << "number of vertices=" << triangulation->number_of_vertices() << std::endl; - std::cout << "number of full cells=" << triangulation->number_of_full_cells() << std::endl; - std::cout << "number of finite full cells=" << triangulation->number_of_finite_full_cells() << std::endl; -#endif // DEBUG_TRACES + init(); + } + + Alpha_complex(Delaunay_triangulation* triangulation_ptr) + : triangulation(triangulation_ptr) { init(); } @@ -142,6 +138,21 @@ class Alpha_complex { delete triangulation; } + Filtration_simplex_range filtration_simplex_range() { + return st_.filtration_simplex_range(); + } + + Simplex_vertex_range simplex_vertex_range(Simplex_handle sh) { + return st_.simplex_vertex_range(sh); + } + + /** \brief Returns the filtration value of a simplex. + * + * Called on the null_simplex, returns INFINITY. */ + Gudhi::Simplex_tree<>::Filtration_value filtration(Simplex_handle sh) { + return st_.filtration(sh); + } + private: void init() { @@ -161,7 +172,7 @@ class Alpha_complex { // -------------------------------------------------------------------------------------------- // Simplex_tree construction from loop on triangulation finite full cells list for (auto cit = triangulation->finite_full_cells_begin(); cit != triangulation->finite_full_cells_end(); ++cit) { - typeVectorVertex vertexVector; + Vector_vertex vertexVector; #ifdef DEBUG_TRACES std::cout << "Simplex_tree insertion "; #endif // DEBUG_TRACES @@ -325,7 +336,6 @@ class Alpha_complex { } friend std::ostream& operator<<(std::ostream& os, const Alpha_complex & alpha_complex) { - // TODO: Program terminated with signal SIGABRT, Aborted - Maybe because of copy constructor Gudhi::Simplex_tree<> st = alpha_complex.st_; os << st << std::endl; return os; diff --git a/src/Alpha_complex/include/gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h b/src/Alpha_complex/include/gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h deleted file mode 100644 index 8bda23b7..00000000 --- a/src/Alpha_complex/include/gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h +++ /dev/null @@ -1,210 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2015 INRIA Saclay (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ -#ifndef SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ -#define SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ - -#include -#include -#include -#include - -#include "gudhi/Off_reader.h" - -namespace Gudhi { - -namespace alphacomplex { - -/** - *@brief Off reader visitor with flag that can be passed to Off_reader to read a Delaunay_triangulation_complex. - */ -template -class Delaunay_triangulation_off_visitor_reader { - private: - Complex* _complex; - typedef typename Complex::Point Point; - - public: - - // Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO. - Delaunay_triangulation_off_visitor_reader(Complex* _complex_ptr) - : _complex(nullptr) { } - - void init(int dim, int num_vertices, int num_faces, int num_edges) { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << - num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; -#endif // DEBUG_TRACES - if (num_faces > 0) { - std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " << - "file for Delaunay triangulation - faces are computed." << std::endl; - } - if (num_edges > 0) { - std::cerr << "Delaunay_triangulation_off_visitor_reader::init edges are not taken into account from OFF " << - "file for Delaunay triangulation - edges are computed." << std::endl; - } - // Complex construction with dimension from file - _complex = new Complex(dim); - } - - void point(const std::vector& point) { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::point "; - for (auto coordinate : point) { - std::cout << coordinate << " | "; - } - std::cout << std::endl; -#endif // DEBUG_TRACES - _complex->insert(Point(point.size(), point.begin(), point.end())); - } - - void maximal_face(const std::vector& face) { - // For Delaunay Triangulation, only points are read -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::face "; - for (auto vertex : face) { - std::cout << vertex << " | "; - } - std::cout << std::endl; -#endif // DEBUG_TRACES - } - - void done() { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::done" << std::endl; -#endif // DEBUG_TRACES - } - - Complex* get_complex() { - return _complex; - } -}; - -/** - *@brief Class that allows to load a Delaunay_triangulation_complex from an off file. - */ -template -class Delaunay_triangulation_off_reader { - public: - - /** - * name_file : file to read - * read_complex : complex that will receive the file content - * read_only_points : specify true if only the points must be read - */ - Delaunay_triangulation_off_reader(const std::string & name_file) : valid_(false) { - std::ifstream stream(name_file); - if (stream.is_open()) { - Delaunay_triangulation_off_visitor_reader off_visitor(_complex); - Off_reader off_reader(stream); - valid_ = off_reader.read(off_visitor); - if (valid_) { - _complex = off_visitor.get_complex(); - } - } else { - std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader could not open file " << - name_file << std::endl; - } - - } - - /** - * return true if reading did not meet problems. - */ - bool is_valid() const { - return valid_; - } - - Complex* get_complex() { - if (valid_) - return _complex; - return nullptr; - - } - - private: - bool valid_; - Complex* _complex; -}; - -template -class Delaunay_triangulation_off_writer { - public: - typedef typename Complex::Point Point; - - /** - * name_file : file where the off will be written - * save_complex : complex that be outputted in the file - * for now only save triangles. - */ - Delaunay_triangulation_off_writer(const std::string & name_file, Complex* complex_ptr) { - std::ofstream stream(name_file); - if (stream.is_open()) { - if (complex_ptr->current_dimension() == 3) { - // OFF header - stream << "OFF" << std::endl; - // no endl on next line - don't know why... - stream << complex_ptr->number_of_vertices() << " " << complex_ptr->number_of_finite_full_cells() << " 0"; - } else { - // nOFF header - stream << "nOFF" << std::endl; - // no endl on next line - don't know why... - stream << complex_ptr->current_dimension() << " " << complex_ptr->number_of_vertices() << " " << - complex_ptr->number_of_finite_full_cells() << " 0"; - - } - - // bimap to retrieve vertex handles from points and vice versa - std::map< Point, int > points_to_vh; - // Start to insert at default handle value - int vertex_handle = int(); - - // Points list - for (auto vit = complex_ptr->vertices_begin(); vit != complex_ptr->vertices_end(); ++vit) { - for (auto Coord = vit->point().cartesian_begin(); Coord != vit->point().cartesian_end(); ++Coord) { - stream << *Coord << " "; - } - stream << std::endl; - points_to_vh[vit->point()] = vertex_handle; - vertex_handle++; - } - - for (auto cit = complex_ptr->finite_full_cells_begin(); cit != complex_ptr->finite_full_cells_end(); ++cit) { - std::vector vertexVector; - stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " "; - for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { - stream << points_to_vh[(*vit)->point()] << " "; - } - stream << std::endl; - } - stream.close(); - } else { - std::cerr << "Delaunay_triangulation_off_writer::Delaunay_triangulation_off_writer could not open file " << - name_file << std::endl; - } - } -}; - -} // namespace alphacomplex - -} // namespace Gudhi - -#endif // SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index 38168f10..86d4d9c3 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -25,25 +25,10 @@ #include #include // to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Alpha_shapes/Delaunay_triangulation_off_io.h" +#include "gudhi/Delaunay_triangulation_off_io.h" #include "gudhi/Alpha_complex.h" -// to construct a simplex_tree from Delaunay_triangulation -#include "gudhi/graph_simplicial_complex.h" -#include "gudhi/Simplex_tree.h" - -#include -#include -#include -#include -#include - -#include -#include - -#include -#include -#include +#include // float comparison // Use dynamic_dimension_tag for the user to be able to set dimension typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; @@ -51,7 +36,7 @@ typedef CGAL::Delaunay_triangulation T; // The triangulation uses the default instantiation of the // TriangulationDataStructure template parameter -BOOST_AUTO_TEST_CASE( OFF_file ) { +BOOST_AUTO_TEST_CASE( S4_100_OFF_file ) { // ---------------------------------------------------------------------------- // // Init of an alpha-complex from a OFF file @@ -66,61 +51,37 @@ BOOST_AUTO_TEST_CASE( OFF_file ) { std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl; BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION); - const double FILTRATION = 0.0; - std::cout << "alpha_complex_from_file.filtration()=" << alpha_complex_from_file.filtration() << std::endl; - BOOST_CHECK(alpha_complex_from_file.filtration() == FILTRATION); - const int NUMBER_OF_VERTICES = 100; std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES); - const int NUMBER_OF_SIMPLICES = 6779; + const int NUMBER_OF_SIMPLICES = 6879; std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES); } -BOOST_AUTO_TEST_CASE( Delaunay_triangulation ) { +BOOST_AUTO_TEST_CASE( S8_10_OFF_file ) { // ---------------------------------------------------------------------------- // - // Init of an alpha-complex from a Delaunay triangulation + // Init of an alpha-complex from a OFF file // // ---------------------------------------------------------------------------- - T dt; std::string off_file_name("S8_10.off"); std::cout << "========== OFF FILE NAME = " << off_file_name << " ==========" << std::endl; - Gudhi::alphacomplex::Delaunay_triangulation_off_reader off_reader(off_file_name, dt); - std::cout << "off_reader.is_valid()=" << off_reader.is_valid() << std::endl; - BOOST_CHECK(off_reader.is_valid()); - - const int NUMBER_OF_VERTICES = 10; - std::cout << "dt.number_of_vertices()=" << dt.number_of_vertices() << std::endl; - BOOST_CHECK(dt.number_of_vertices() == NUMBER_OF_VERTICES); - - const int NUMBER_OF_FULL_CELLS = 30; - std::cout << "dt.number_of_full_cells()=" << dt.number_of_full_cells() << std::endl; - BOOST_CHECK(dt.number_of_full_cells() == NUMBER_OF_FULL_CELLS); - - const int NUMBER_OF_FINITE_FULL_CELLS = 6; - std::cout << "dt.number_of_finite_full_cells()=" << dt.number_of_finite_full_cells() << std::endl; - BOOST_CHECK(dt.number_of_finite_full_cells() == NUMBER_OF_FINITE_FULL_CELLS); - - Gudhi::alphacomplex::Alpha_complex alpha_complex_from_dt(dt); + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name); const int DIMENSION = 8; - std::cout << "alpha_complex_from_dt.dimension()=" << alpha_complex_from_dt.dimension() << std::endl; - BOOST_CHECK(alpha_complex_from_dt.dimension() == DIMENSION); - - const double FILTRATION = 0.0; - std::cout << "alpha_complex_from_dt.filtration()=" << alpha_complex_from_dt.filtration() << std::endl; - BOOST_CHECK(alpha_complex_from_dt.filtration() == FILTRATION); + std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl; + BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION); - std::cout << "alpha_complex_from_dt.num_vertices()=" << alpha_complex_from_dt.num_vertices() << std::endl; - BOOST_CHECK(alpha_complex_from_dt.num_vertices() == NUMBER_OF_VERTICES); + const int NUMBER_OF_VERTICES = 10; + std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl; + BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES); - const int NUMBER_OF_SIMPLICES = 997; - std::cout << "alpha_complex_from_dt.num_simplices()=" << alpha_complex_from_dt.num_simplices() << std::endl; - BOOST_CHECK(alpha_complex_from_dt.num_simplices() == NUMBER_OF_SIMPLICES); -} + const int NUMBER_OF_SIMPLICES = 1007; + std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl; + BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES); +} diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index 72e8390a..4fe69ce5 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -15,7 +15,7 @@ if(CGAL_FOUND) include( ${EIGEN3_USE_FILE} ) include_directories (BEFORE "../../include") - add_definitions(-DDEBUG_TRACES) + #add_definitions(-DDEBUG_TRACES) add_executable ( AlphaComplexUnitTest Alpha_complex_unit_test.cpp ) target_link_libraries(AlphaComplexUnitTest ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) add_test(AlphaComplexUnitTest ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUnitTest) diff --git a/src/Alpha_complex/test/README b/src/Alpha_complex/test/README index 244a2b84..45b87d91 100644 --- a/src/Alpha_complex/test/README +++ b/src/Alpha_complex/test/README @@ -7,6 +7,6 @@ make To launch with details: *********************** -./AlphaShapesUnitTest --report_level=detailed --log_level=all +./AlphaComplexUnitTest --report_level=detailed --log_level=all ==> echo $? returns 0 in case of success (non-zero otherwise) diff --git a/src/Alpha_complex/test/S4_100.off b/src/Alpha_complex/test/S4_100.off index 0a5dc58c..cd017e12 100644 --- a/src/Alpha_complex/test/S4_100.off +++ b/src/Alpha_complex/test/S4_100.off @@ -1,5 +1,5 @@ -OFF -100 0 0 +nOFF +4 100 0 0 0.562921 -0.735261 -0.256472 0.277007 -0.803733 -0.0527915 -0.315125 0.501918 -0.24946 -0.354982 -0.410773 -0.801887 diff --git a/src/Alpha_complex/test/S8_10.off b/src/Alpha_complex/test/S8_10.off index 1d67e10f..4e147c44 100644 --- a/src/Alpha_complex/test/S8_10.off +++ b/src/Alpha_complex/test/S8_10.off @@ -1,5 +1,5 @@ -OFF -10 0 0 +nOFF +8 10 0 0 0.440036 -0.574754 -0.200485 0.216537 -0.501251 -0.0329236 -0.196529 0.313023 -0.129367 -0.184089 -0.213021 -0.415848 0.783529 -0.0438025 0.317256 0.120749 0.132429 0.683748 -0.124536 -0.166133 -0.540695 -0.0887576 0.390234 -0.139031 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index e0d5ff28..e2271efd 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -32,6 +32,7 @@ else() LINK_DIRECTORIES(${Boost_LIBRARY_DIRS}) include_directories(include/) + add_subdirectory(example/common) add_subdirectory(example/Simplex_tree) add_subdirectory(example/Persistent_cohomology) add_subdirectory(example/Skeleton_blocker) diff --git a/src/Doxyfile b/src/Doxyfile index 62412627..9d4bc9c8 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -811,7 +811,8 @@ EXCLUDE_SYMBOLS = # that contain example code fragments that are included (see the \include # command). -EXAMPLE_PATH = +EXAMPLE_PATH = common/example \ + common/test # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt new file mode 100644 index 00000000..ae30da54 --- /dev/null +++ b/src/common/example/CMakeLists.txt @@ -0,0 +1,26 @@ +cmake_minimum_required(VERSION 2.6) +project(GUDHIDelaunayTriangulationOffFileReadWrite) + +# need CGAL 4.6 +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.6.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + include( ${CGAL_USE_FILE} ) + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + + add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp ) + target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphashapedoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) + + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + endif () +endif() diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp new file mode 100644 index 00000000..d1aa7988 --- /dev/null +++ b/src/common/example/Delaunay_triangulation_off_rw.cpp @@ -0,0 +1,55 @@ +// to construct a Delaunay_triangulation from a OFF file +#include "gudhi/Delaunay_triangulation_off_io.h" + +#include +#include + +#include +#include +#include + +// Use dynamic_dimension_tag for the user to be able to set dimension +typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; +typedef CGAL::Delaunay_triangulation T; +// The triangulation uses the default instantiation of the +// TriangulationDataStructure template parameter + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " inputFile.off outputFile.off" << std::endl; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if (argc != 3) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulates points + Gudhi::Delaunay_triangulation_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + exit(-1); // ----- >> + } + + // Retrieve the triangulation + T* triangulation = off_reader.get_complex(); + // Operations on triangulation + std::cout << "Number of vertices= " << triangulation->number_of_vertices() << std::endl; + std::cout << "Number of finite full cells= " << triangulation->number_of_finite_full_cells() << std::endl; + + std::string outFileName(argv[2]); + std::string offOutputFile(outFileName); + // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces + Gudhi::Delaunay_triangulation_off_writer off_writer(offOutputFile, triangulation); + + // Check the write operation was correct + if (!off_writer.is_valid()) { + std::cerr << "Unable to write file " << offOutputFile << std::endl; + exit(-1); // ----- >> + } + + return 0; +} \ No newline at end of file diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h new file mode 100644 index 00000000..de5fa2af --- /dev/null +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -0,0 +1,308 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#ifndef SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ +#define SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ + +#include +#include +#include +#include + +#include "gudhi/Off_reader.h" + +namespace Gudhi { + +/** \brief OFF file visitor implementation according to Off_reader in order to construct a CGAL Delaunay triangulation. + * + * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page + * http://doc.cgal.org/latest/Triangulation/ + */ +template +class Delaunay_triangulation_off_visitor_reader { + private: + Complex* _complex; + typedef typename Complex::Point Point; + + public: + + // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO. + + /** \brief Delaunay_triangulation_off_visitor_reader constructor + * + * @param[in] _complex_ptr pointer on a Delaunay triangulation. + */ + Delaunay_triangulation_off_visitor_reader(Complex* _complex_ptr) + : _complex(nullptr) { } + + /** \brief Off_reader visitor init implementation. + * + * The init parameters are set from OFF file header. + * Dimension value is required in order to construct Delaunay triangulation. + * + * @param[in] dim space dimension of vertices. + * @param[in] num_vertices number of vertices in the OFF file (not used). + * @param[in] num_faces number of faces in the OFF file (not used). + * @param[in] num_edges number of edges in the OFF file (not used). + */ + void init(int dim, int num_vertices, int num_faces, int num_edges) { +#ifdef DEBUG_TRACES + std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << + num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; +#endif // DEBUG_TRACES + if (num_faces > 0) { + std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " << + "file for Delaunay triangulation - faces are computed." << std::endl; + } + if (num_edges > 0) { + std::cerr << "Delaunay_triangulation_off_visitor_reader::init edges are not taken into account from OFF " << + "file for Delaunay triangulation - edges are computed." << std::endl; + } + // Complex construction with dimension from file + _complex = new Complex(dim); + } + + /** \brief Off_reader visitor point implementation. + * + * The point function is called on each vertex line from OFF file. + * This function inserts the vertex in the Delaunay triangulation. + * + * @param[in] point vector of vertex coordinates. + */ + void point(const std::vector& point) { +#ifdef DEBUG_TRACES + std::cout << "Delaunay_triangulation_off_visitor_reader::point "; + for (auto coordinate : point) { + std::cout << coordinate << " | "; + } + std::cout << std::endl; +#endif // DEBUG_TRACES + _complex->insert(Point(point.size(), point.begin(), point.end())); + } + + // Off_reader visitor maximal_face implementation - not used + void maximal_face(const std::vector& face) { + // For Delaunay Triangulation, only points are read + } + + // Off_reader visitor done implementation - not used + void done() { + // Nothing to be done on end of OFF file read + } + + /** \brief Returns the constructed Delaunay triangulation. + * + * @return A pointer on the Delaunay triangulation. + * + * @warning The returned pointer can be nullptr. + */ + Complex* get_complex() const { + return _complex; + } +}; + +/** \brief OFF file reader implementation in order to construct a Delaunay triangulation. + * + * This class is using the Delaunay_triangulation_off_visitor_reader to visit the OFF file according to Off_reader. + * + * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page + * http://doc.cgal.org/latest/Triangulation/ + * + * \section Example + * + * This example loads points from an OFF file and builds the Delaunay triangulation. + * Then, it is asked to display the number of vertices and finites full cells from the Delaunay triangulation. + * + * \include Delaunay_triangulation_off_rw.cpp + * + * When launching: + * + * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off + * \endcode + * + * the program output is: + * + * \include dtoffrw_alphashapedoc_result.txt + */ +template +class Delaunay_triangulation_off_reader { + public: + + /** \brief Reads the OFF file and constructs the Delaunay triangulation from the points + * that are in the OFF file. + * + * @param[in] name_file OFF file to read. + * + * @warning Check with is_valid() function to see if read operation was successful. + */ + Delaunay_triangulation_off_reader(const std::string & name_file) + : valid_(false) { + std::ifstream stream(name_file); + if (stream.is_open()) { + Delaunay_triangulation_off_visitor_reader off_visitor(_complex); + Off_reader off_reader(stream); + valid_ = off_reader.read(off_visitor); + if (valid_) { + _complex = off_visitor.get_complex(); + if (_complex == nullptr) { + std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader off_visitor returns an empty pointer" << std::endl; + valid_ = false; + } + } + } else { + std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader could not open file " << + name_file << std::endl; + } + + } + + /** \brief Returns if the OFF file read operation was successful or not. + * + * @return OFF file read status. + */ + bool is_valid() const { + return valid_; + } + + /** \brief Returns the constructed Delaunay triangulation. + * + * @return A pointer on the Delaunay triangulation. + * + * @warning The returned pointer can be nullptr. + */ + Complex* get_complex() const { + if (valid_) + return _complex; + return nullptr; + + } + + private: + /** \brief OFF file read status.*/ + bool valid_; + /** \brief A pointer on the Delaunay triangulation.*/ + Complex* _complex; +}; + +/** \brief OFF file writer from a Delaunay triangulation. + * + * This class constructs the OFF file header according to http://www.geomview.org/docs/html/OFF.html + * + * The header is followed by the list of points coordinates (Delaunay triangulation vertices) + * + * And finally is followed by the list of faces (Delaunay triangulation finite full cells) + * + * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page + * http://doc.cgal.org/latest/Triangulation/ + * + * \section Example + * + * This example loads points from an OFF file and builds the Delaunay triangulation. + * Then, the Delaunay triangulation is saved in a new file including the triangulation as a list of faces. + * + * \include Delaunay_triangulation_off_rw.cpp + * + * When launching: + * + * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off + * \endcode + * + * The result will be an OFF file of dimension 2 with the 7 points from alphashapedoc.off followed by the 6 + * triangulations of dimension 3 (the first value on each faces): + * \include dtoffrw_alphashapedoc_result.off + */ +template +class Delaunay_triangulation_off_writer { + public: + typedef typename Complex::Point Point; + + /** \brief Writes the OFF file from the Delaunay triangulation + * + * @param[in] name_file OFF file to write. + * @param[in] complex_ptr pointer on a Delaunay triangulation. + * + * @warning Check with is_valid() function to see if write operation was successful. + */ + Delaunay_triangulation_off_writer(const std::string & name_file, Complex* complex_ptr) + : valid_(false) { + std::ofstream stream(name_file); + if (stream.is_open()) { + if (complex_ptr->current_dimension() == 3) { + // OFF header + stream << "OFF" << std::endl; + // no endl on next line - don't know why... + stream << complex_ptr->number_of_vertices() << " " << complex_ptr->number_of_finite_full_cells() << " 0"; + } else { + // nOFF header + stream << "nOFF" << std::endl; + // no endl on next line - don't know why... + stream << complex_ptr->current_dimension() << " " << complex_ptr->number_of_vertices() << " " << + complex_ptr->number_of_finite_full_cells() << " 0"; + + } + + // bimap to retrieve vertex handles from points and vice versa + std::map< Point, int > points_to_vh; + // Start to insert at default handle value + int vertex_handle = int(); + + // Points list + for (auto vit = complex_ptr->vertices_begin(); vit != complex_ptr->vertices_end(); ++vit) { + for (auto Coord = vit->point().cartesian_begin(); Coord != vit->point().cartesian_end(); ++Coord) { + stream << *Coord << " "; + } + stream << std::endl; + points_to_vh[vit->point()] = vertex_handle; + vertex_handle++; + } + + for (auto cit = complex_ptr->finite_full_cells_begin(); cit != complex_ptr->finite_full_cells_end(); ++cit) { + std::vector vertexVector; + stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " "; + for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { + stream << points_to_vh[(*vit)->point()] << " "; + } + stream << std::endl; + } + stream.close(); + valid_ = true; + } else { + std::cerr << "Delaunay_triangulation_off_writer::Delaunay_triangulation_off_writer could not open file " << + name_file << std::endl; + } + } + + /** \brief Returns if the OFF write operation was successful or not. + * + * @return OFF file write status. + */ + bool is_valid() const { + return valid_; + } + + private: + /* \brief OFF file write status. */ + bool valid_; +}; + +} // namespace Gudhi + +#endif // SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index 618d1b4d..a8abb507 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -36,163 +36,150 @@ namespace Gudhi { -/** - * Read an off file and calls a visitor methods while reading it. - * An off file must have its first/snd line in this format : - * OFF - * num_vert num_faces num_edges - * - * A noff file must have its first/snd line in this format : - * nOFF - * dim num_vert num_faces num_edges - * - * The number of edges num_edges is optional and can be left to zero. +/** \brief OFF file reader top class visitor. + * + * OFF file must be conform to format described here : + * http://www.geomview.org/docs/html/OFF.html */ -class Off_reader{ -public: - Off_reader(std::ifstream& stream):stream_(stream){ - } -// Off_reader(const std::string& name):stream_(name){ -// if(!stream_.is_open()) -// std::cerr <<"could not open file \n"; -// } - - ~Off_reader(){ - stream_.close(); - } - - /** - * read an off file and calls the following methods : - * void init(int dim,int num_vertices,int num_faces,int num_edges); //num_edges may not be set - * void point(const std::vector& point); - * void maximal_face(const std::list& face); - * void done(); - * of the visitor when reading a point or a maximal face. - */ - template - bool read(OffVisitor& off_visitor){ - bool success_read_off_preambule = read_off_preambule(off_visitor); - if(!success_read_off_preambule) { - std::cerr <<"could not read off preambule\n"; - return false; - } - - bool success_read_off_points = read_off_points(off_visitor); - if(!success_read_off_points) { - std::cerr <<"could not read off points\n"; - return false; - } - - bool success_read_off_faces = read_off_faces(off_visitor); - if(!success_read_off_faces) { - std::cerr <<"could not read off faces\n"; - return false; - } - - off_visitor.done(); - return success_read_off_preambule && success_read_off_points && success_read_off_faces; - } - -private: - std::ifstream& stream_; - - struct Off_info{ - int dim; - int num_vertices; - int num_edges; - int num_faces; - }; - - Off_info off_info_; - - template - bool read_off_preambule(OffVisitor& off_visitor){ - std::string line; - if(!goto_next_uncomment_line(line)) return false; - - bool is_off_file = (line.find("OFF") != std::string::npos); - bool is_noff_file = (line.find("nOFF") != std::string::npos); - - if(!is_off_file && !is_noff_file) { - std::cerr << line<> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)){ - std::cerr << "incorrect number of vertices/faces/edges\n"; - return false; - } - } - else - if(!(iss >> off_info_.dim >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)){ - std::cerr << "incorrect number of vertices/faces/edges\n"; - return false; - } - off_visitor.init(off_info_.dim,off_info_.num_vertices,off_info_.num_faces,off_info_.num_edges); - - return true; - } - - bool goto_next_uncomment_line(std::string& uncomment_line){ - uncomment_line.clear(); - do - std::getline(stream_, uncomment_line); - while(uncomment_line[0] == '%');// || uncomment_line.empty()); - return (uncomment_line.size()>0 && uncomment_line[0] != '%'); - } - - - template - bool read_off_points(OffVisitor& visitor){ - int num_vertices_to_read = off_info_.num_vertices; - while(num_vertices_to_read--){ - std::string line; - if(!goto_next_uncomment_line(line)) return false; - std::vector point; - std::istringstream iss(line); - point.assign(std::istream_iterator(iss),std::istream_iterator()); -// if(point.size() != off_info_.dim) return false; - visitor.point(point); - } - return true; - } - - template - bool read_off_faces(OffVisitor& visitor){ - std::string line; - while(goto_next_uncomment_line(line)){ - std::istringstream iss(line); - int num_face_vertices; - iss >> num_face_vertices; - std::vector face; - face.assign(std::istream_iterator(iss),std::istream_iterator()); - if(!face.size() == off_info_.num_vertices) return false; - visitor.maximal_face(face); - } - return true; - } +class Off_reader { + public: + + Off_reader(std::ifstream& stream) : stream_(stream) { } + + ~Off_reader() { + stream_.close(); + } + + /** \brief + * Read an OFF file and calls the following methods : + * + * void init(int dim,int num_vertices,int num_faces,int num_edges); // from file header - num_edges may not be set + * + * void point(const std::vector& point); // for each point read + * + * void maximal_face(const std::list& face); // for each face read + * + * void done(); // upon file read is finished + * + * of the visitor when reading a point or a maximal face. Edges are not taken into account. + */ + template + bool read(OffVisitor& off_visitor) { + bool success_read_off_preambule = read_off_preambule(off_visitor); + if (!success_read_off_preambule) { + std::cerr << "could not read off preambule\n"; + return false; + } + + bool success_read_off_points = read_off_points(off_visitor); + if (!success_read_off_points) { + std::cerr << "could not read off points\n"; + return false; + } + + bool success_read_off_faces = read_off_faces(off_visitor); + if (!success_read_off_faces) { + std::cerr << "could not read off faces\n"; + return false; + } + + off_visitor.done(); + return success_read_off_preambule && success_read_off_points && success_read_off_faces; + } + + private: + std::ifstream& stream_; + + struct Off_info { + int dim; + int num_vertices; + int num_edges; + int num_faces; + }; + + Off_info off_info_; + + template + bool read_off_preambule(OffVisitor& off_visitor) { + std::string line; + if (!goto_next_uncomment_line(line)) return false; + + bool is_off_file = (line.find("OFF") != std::string::npos); + bool is_noff_file = (line.find("nOFF") != std::string::npos); + + if (!is_off_file && !is_noff_file) { + std::cerr << line << std::endl; + std::cerr << "missing off header\n"; + return false; + } + + if (!goto_next_uncomment_line(line)) return false; + std::istringstream iss(line); + if ((is_off_file) && (!is_noff_file)) { + off_info_.dim = 3; + if (!(iss >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)) { + std::cerr << "incorrect number of vertices/faces/edges\n"; + return false; + } + } else + if (!(iss >> off_info_.dim >> off_info_.num_vertices >> off_info_.num_faces >> off_info_.num_edges)) { + std::cerr << "incorrect number of vertices/faces/edges\n"; + return false; + } + off_visitor.init(off_info_.dim, off_info_.num_vertices, off_info_.num_faces, off_info_.num_edges); + + return true; + } + + bool goto_next_uncomment_line(std::string& uncomment_line) { + uncomment_line.clear(); + do + std::getline(stream_, uncomment_line); while (uncomment_line[0] == '%'); + return (uncomment_line.size() > 0 && uncomment_line[0] != '%'); + } + + template + bool read_off_points(OffVisitor& visitor) { + int num_vertices_to_read = off_info_.num_vertices; + while (num_vertices_to_read--) { + std::string line; + if (!goto_next_uncomment_line(line)) return false; + std::vector point; + std::istringstream iss(line); + point.assign(std::istream_iterator(iss), std::istream_iterator()); + // if(point.size() != off_info_.dim) return false; + visitor.point(point); + } + return true; + } + + template + bool read_off_faces(OffVisitor& visitor) { + std::string line; + while (goto_next_uncomment_line(line)) { + std::istringstream iss(line); + int num_face_vertices; + iss >> num_face_vertices; + std::vector face; + face.assign(std::istream_iterator(iss), std::istream_iterator()); + if (!face.size() == off_info_.num_vertices) return false; + visitor.maximal_face(face); + } + return true; + } }; - template -void read_off(const std::string& name_file_off,OFFVisitor& vis){ - std::ifstream stream(name_file_off); - if(!stream.is_open()) - std::cerr <<"could not open file \n"; - else{ - Off_reader off_reader(stream); - off_reader.read(vis); - } +void read_off(const std::string& name_file_off, OFFVisitor& vis) { + std::ifstream stream(name_file_off); + if (!stream.is_open()) + std::cerr << "could not open file \n"; + else { + Off_reader off_reader(stream); + off_reader.read(vis); + } } - - } // namespace Gudhi - -#endif /* GUDHI_OFF_READER_H_ */ +#endif // GUDHI_OFF_READER_H_ diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt new file mode 100644 index 00000000..22783caf --- /dev/null +++ b/src/common/test/CMakeLists.txt @@ -0,0 +1,44 @@ +cmake_minimum_required(VERSION 2.6) +project(GUDHIDelaunayTriangulationOffFileReadWriteUT) + +if(NOT MSVC) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage") + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} --coverage") + set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} --coverage") +endif() + +# need CGAL 4.6 +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.6.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + include( ${CGAL_USE_FILE} ) + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + + add_executable ( dtoffrw_UT dtoffrw_unit_test.cpp ) + target_link_libraries(dtoffrw_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + + # Unitary tests + add_test(dtoffrw_UT ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_UT) + + if (DIFF_PATH) + add_test(diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off) + endif() + + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + endif () +endif() + + + + + +cpplint_add_tests("${CMAKE_SOURCE_DIR}/src/common/include/gudhi") diff --git a/src/common/test/README b/src/common/test/README new file mode 100644 index 00000000..f2a7eb5a --- /dev/null +++ b/src/common/test/README @@ -0,0 +1,14 @@ +To compile: +*********** + +cmake . +make + +To launch with details: +*********************** + +./dtoffrw_UT --report_level=detailed --log_level=all + + ==> echo $? returns 0 in case of success (non-zero otherwise) + + diff --git a/src/common/test/dtoffrw_alphashapedoc_result.off b/src/common/test/dtoffrw_alphashapedoc_result.off new file mode 100644 index 00000000..13c255c6 --- /dev/null +++ b/src/common/test/dtoffrw_alphashapedoc_result.off @@ -0,0 +1,15 @@ +nOFF +2 7 6 0 +1 1 +7 0 +4 6 +9 6 +0 14 +2 19 +9 17 +3 1 2 3 +3 4 3 2 +3 5 1 3 +3 5 3 7 +3 7 3 4 +3 6 5 7 diff --git a/src/common/test/dtoffrw_alphashapedoc_result.txt b/src/common/test/dtoffrw_alphashapedoc_result.txt new file mode 100644 index 00000000..57761d14 --- /dev/null +++ b/src/common/test/dtoffrw_alphashapedoc_result.txt @@ -0,0 +1,3 @@ +Number of vertices= 7 +Number of finite full cells= 6 + diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp new file mode 100644 index 00000000..4905d845 --- /dev/null +++ b/src/common/test/dtoffrw_unit_test.cpp @@ -0,0 +1,91 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#define BOOST_TEST_MODULE DelaunayTriangulationOffFileReadWrite test + +// to construct a Delaunay_triangulation from a OFF file +#include "gudhi/Delaunay_triangulation_off_io.h" + +#include +#include + +#include +#include +#include + +#include +#include +//#include + +// Use dynamic_dimension_tag for the user to be able to set dimension +typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; +typedef CGAL::Delaunay_triangulation T; + +BOOST_AUTO_TEST_CASE( Delaunay_triangulation_doc_test ) +{ + // Read the OFF file (input file name given as parameter) and triangulates points + Gudhi::Delaunay_triangulation_off_reader off_reader("../../../data/points/alphashapedoc.off"); + // Check the read operation was correct + BOOST_CHECK(off_reader.is_valid()); + + // Retrieve the triangulation + T* triangulation = off_reader.get_complex(); + BOOST_CHECK(triangulation != nullptr); + // Operations on triangulation + BOOST_CHECK(triangulation->number_of_vertices() == 7); + BOOST_CHECK(triangulation->number_of_finite_full_cells() == 6); + + // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces + Gudhi::Delaunay_triangulation_off_writer off_writer("UT.off", triangulation); + + // Check the write operation was correct + BOOST_CHECK(off_writer.is_valid()); + + delete triangulation; +} + +BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) +{ + Gudhi::Delaunay_triangulation_off_reader off_reader("pouetpouet_tralala.off"); + // Check the read operation was correct + BOOST_CHECK(!off_reader.is_valid()); + T* triangulation = off_reader.get_complex(); + BOOST_CHECK(triangulation == nullptr); +} + +BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_write_test ) +{ + // Read the OFF file (input file name given as parameter) and triangulates points + Gudhi::Delaunay_triangulation_off_reader off_reader("../../../data/points/alphashapedoc.off"); + + // Retrieve the triangulation + T* triangulation = off_reader.get_complex(); + + // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces + Gudhi::Delaunay_triangulation_off_writer off_writer("/pouetpouet_tralala/pouetpouet_tralala/pouetpouet_tralala.off", triangulation); + + // Check the write operation was correct + BOOST_CHECK(!off_writer.is_valid()); + + delete triangulation; +} + -- cgit v1.2.3 From 9e7a221884346501d5bcb06ad184e08f96938315 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 18 Jun 2015 14:58:02 +0000 Subject: From trunk, cpplint was removed git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@626 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 404ea81f038969fb4d96945cfb9856b8ee4ac12a --- src/Alpha_complex/test/CMakeLists.txt | 1 - src/common/test/CMakeLists.txt | 5 ----- 2 files changed, 6 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index 4fe69ce5..0bd55433 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -28,4 +28,3 @@ if(CGAL_FOUND) endif () endif() -cpplint_add_tests("${CMAKE_SOURCE_DIR}/src/Alpha_complex/include/gudhi") diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 22783caf..e4ac6c7b 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -37,8 +37,3 @@ if(CGAL_FOUND) endif () endif() - - - - -cpplint_add_tests("${CMAKE_SOURCE_DIR}/src/common/include/gudhi") -- cgit v1.2.3 From ea986c68192c7536716d139e5a5f0a30a76f0fc1 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 19 Jun 2015 11:08:21 +0000 Subject: Alpha complex documetation - 1st part git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@630 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: b64e860b0fb6d60e706784605f699349ea17a869 --- biblio/bibliography.bib | 16 ++ .../example/Alpha_complex_from_off.cpp | 22 --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 207 +++++++++++---------- src/Alpha_complex/test/CMakeLists.txt | 8 +- src/Alpha_complex/test/alphaoffreader_for_doc.txt | 27 +++ src/Doxyfile | 5 +- src/common/test/CMakeLists.txt | 6 +- 7 files changed, 163 insertions(+), 128 deletions(-) create mode 100644 src/Alpha_complex/test/alphaoffreader_for_doc.txt (limited to 'src/common') diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index 3fd1c10a..859696b4 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -897,6 +897,22 @@ language={English} bibsource = {DBLP, http://dblp.uni-trier.de} } +@ARTICLE{AlphaShapesDefinition, + author = {N. Akkiraju, H. Edelsbrunner, M. Facello, P. Fu, E. P. Mucke, and C. Varela}, + title = {\href{http://pub.ist.ac.at/~edels/Papers/1995-P-06-AlphaShapesSoftware.pdf}{Alpha shapes: definition and software}}, + journal = {Proc. Internat. Comput. Geom. Software Workshop 1995}, + year = {1995}, + bibsource = {http://pub.ist.ac.at} +} + +@ARTICLE{AlphaShapesIntroduction, + author = {Kaspar Fischer}, + title = {\href{http://www.cs.uu.nl/docs/vakken/ddm/texts/Delaunay/alphashapes.pdf}{Introduction to Alpha Shapes}}, + journal = {Unknown}, + year = {Unknown}, + bibsource = {http://www.cs.uu.nl} +} + misc{buddha_stanford_scan, author = "", title = "The Stanford 3D Scanning Repository", diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index d129ebf7..0d7af117 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -1,25 +1,3 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2014 INRIA Saclay (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - // to construct a Delaunay_triangulation from a OFF file #include "gudhi/Delaunay_triangulation_off_io.h" #include "gudhi/Alpha_complex.h" diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index d25c05cb..44741e3b 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -52,17 +52,37 @@ namespace alphacomplex { #define Kinit(f) =k.f() /** \defgroup alpha_complex Alpha complex in dimension N - * -
Implementations:
- Alpha complex in dimension N are a subset of Delaunay Triangulation in dimension N. - - - * \author Vincent Rouvreau - * \version 1.0 - * \date 2015 - * \copyright GNU General Public License v3. * @{ + * \author Vincent Rouvreau + * + * \section Definition + * + * Alpha_complex is a Simplex_tree constructed from each finite cell of a Delaunay Triangulation in dimension N. + * + * The filtration value of each simplex is computed from the alpha value of the simplex if it is Gabriel or + * from the alpha value of the simplex coface that makes the simplex not Gabriel. + * + * Please refer to \cite AlphaShapesDefinition for the alpha complex definition or to + * \cite AlphaShapesIntroduction for alpha complex concept vulgarization. + * + * \section Example + * + * This example loads points from an OFF file, builds the Delaunay triangulation, and finally initialize the + * alpha complex with it. + * Then, it is asked to display information about the alpha complex. + * + * \include Alpha_complex_from_off.cpp + * + * When launching: + * + * \code $> ./alphaoffreader ../../data/points/alphashapedoc.off + * \endcode + * + * the program output is: + * + * \include alphaoffreader_for_doc.txt */ +/** @} */ // end defgroup alpha_complex /** * \brief Alpha complex data structure. @@ -74,89 +94,107 @@ namespace alphacomplex { * * */ -class Alpha_complex { +template +class Alpha_complex : public Simplex_tree<> { private: // From Simplex_tree - /** \brief Type required to insert into a simplex_tree (with or without subfaces).*/ + // Type required to insert into a simplex_tree (with or without subfaces). typedef std::vector Vector_vertex; - /** \brief Simplex_handle type from simplex_tree.*/ - typedef typename Gudhi::Simplex_tree<>::Simplex_handle Simplex_handle; - /** \brief Simplex_result is the type returned from simplex_tree insert function.*/ + // Simplex_result is the type returned from simplex_tree insert function. typedef typename std::pair Simplex_result; - /** \brief Filtration_simplex_range type from simplex_tree.*/ - typedef typename Gudhi::Simplex_tree<>::Filtration_simplex_range Filtration_simplex_range; - - /** \brief Simplex_vertex_range type from simplex_tree.*/ - typedef typename Gudhi::Simplex_tree<>::Simplex_vertex_range Simplex_vertex_range; - // From CGAL - /** \brief Kernel for the Delaunay_triangulation. Dimension can be set dynamically.*/ + // Kernel for the Delaunay_triangulation. Dimension can be set dynamically. typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; - /** \brief Delaunay_triangulation type required to create an alpha-complex.*/ + + // Delaunay_triangulation type required to create an alpha-complex. typedef CGAL::Delaunay_triangulation Delaunay_triangulation; typedef typename Kernel::Compute_squared_radius_d Squared_Radius; typedef typename Kernel::Side_of_bounded_sphere_d Is_Gabriel; - /** \brief Type required to compute squared radius, or side of bounded sphere on a vector of points.*/ + // Type required to compute squared radius, or side of bounded sphere on a vector of points. typedef std::vector Vector_of_CGAL_points; - /** \brief Vertex_iterator type from CGAL.*/ + // Vertex_iterator type from CGAL. typedef Delaunay_triangulation::Vertex_iterator CGAL_vertex_iterator; - /** \brief Boost bimap type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.*/ + // Boost bimap type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa. typedef boost::bimap< CGAL_vertex_iterator, Vertex_handle > Bimap_vertex; - + private: - /** \brief Alpha complex is represented internally by a simplex tree.*/ - Gudhi::Simplex_tree<> st_; /** \brief Boost bimap to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.*/ Bimap_vertex cgal_simplextree; /** \brief Pointer on the CGAL Delaunay triangulation.*/ Delaunay_triangulation* triangulation; public: + /** \brief Alpha_complex constructor from an OFF file name. + * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize + * the Alpha_complex. + * + * @param[in] off_file_name OFF file [path and] name. + */ Alpha_complex(std::string& off_file_name) - : triangulation(nullptr) { + : triangulation(nullptr) { Gudhi::Delaunay_triangulation_off_reader off_reader(off_file_name); if (!off_reader.is_valid()) { - std::cerr << "Unable to read file " << off_file_name << std::endl; + std::cerr << "Alpha_complex - Unable to read file " << off_file_name << std::endl; exit(-1); // ----- >> } triangulation = off_reader.get_complex(); init(); } + /** \brief Alpha_complex constructor from a Delaunay triangulation. + * + * @param[in] triangulation_ptr Pointer on a Delaunay triangulation. + */ Alpha_complex(Delaunay_triangulation* triangulation_ptr) - : triangulation(triangulation_ptr) { + : triangulation(triangulation_ptr) { init(); } + /** \brief Alpha_complex destructor from a Delaunay triangulation. + * + * @warning Deletes the Delaunay triangulation. + */ ~Alpha_complex() { delete triangulation; } - Filtration_simplex_range filtration_simplex_range() { - return st_.filtration_simplex_range(); - } - - Simplex_vertex_range simplex_vertex_range(Simplex_handle sh) { - return st_.simplex_vertex_range(sh); - } - - /** \brief Returns the filtration value of a simplex. - * - * Called on the null_simplex, returns INFINITY. */ - Gudhi::Simplex_tree<>::Filtration_value filtration(Simplex_handle sh) { - return st_.filtration(sh); - } - private: - + /** \brief Initialize the Alpha_complex from the Delaunay triangulation. + * + * @warning Delaunay triangulation must be already constructed with at least one vertex and dimension must be more + * than 0. + * + * Initialization can be launched once. + */ void init() { - st_.set_dimension(triangulation->maximal_dimension()); + if (triangulation == nullptr) { + std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation" << std::endl; + return; // ----- >> + } + if (triangulation->number_of_vertices() < 1) { + std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices" << std::endl; + return; // ----- >> + } + if (triangulation->maximal_dimension() < 1) { + std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation" << std::endl; + return; // ----- >> + } + if (num_vertices() > 0) { + std::cerr << "Alpha_complex init - Cannot init twice" << std::endl; + return; // ----- >> + } + + set_dimension(triangulation->maximal_dimension()); // -------------------------------------------------------------------------------------------- // bimap to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa @@ -187,7 +225,7 @@ class Alpha_complex { std::cout << std::endl; #endif // DEBUG_TRACES // Insert each simplex and its subfaces in the simplex tree - filtration is NaN - Simplex_result insert_result = st_.insert_simplex_and_subfaces(vertexVector, + Simplex_result insert_result = insert_simplex_and_subfaces(vertexVector, std::numeric_limits::quiet_NaN()); if (!insert_result.second) { std::cerr << "Alpha_complex::init insert_simplex_and_subfaces failed" << std::endl; @@ -198,16 +236,16 @@ class Alpha_complex { Filtration_value filtration_max = 0.0; // -------------------------------------------------------------------------------------------- // ### For i : d -> 0 - for (int decr_dim = st_.dimension(); decr_dim >= 0; decr_dim--) { + for (int decr_dim = dimension(); decr_dim >= 0; decr_dim--) { // ### Foreach Sigma of dim i - for (auto f_simplex : st_.skeleton_simplex_range(decr_dim)) { - int f_simplex_dim = st_.dimension(f_simplex); + for (auto f_simplex : skeleton_simplex_range(decr_dim)) { + int f_simplex_dim = dimension(f_simplex); if (decr_dim == f_simplex_dim) { Vector_of_CGAL_points pointVector; #ifdef DEBUG_TRACES std::cout << "Sigma of dim " << decr_dim << " is"; #endif // DEBUG_TRACES - for (auto vertex : st_.simplex_vertex_range(f_simplex)) { + for (auto vertex : simplex_vertex_range(f_simplex)) { pointVector.push_back((cgal_simplextree.right.at(vertex))->point()); #ifdef DEBUG_TRACES std::cout << " " << vertex; @@ -217,20 +255,20 @@ class Alpha_complex { std::cout << std::endl; #endif // DEBUG_TRACES // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma) - if (isnan(st_.filtration(f_simplex))) { + if (isnan(filtration(f_simplex))) { Filtration_value alpha_complex_filtration = 0.0; // No need to compute squared_radius on a single point - alpha is 0.0 if (f_simplex_dim > 0) { // squared_radius function initialization Kernel k; Squared_Radius squared_radius Kinit(compute_squared_radius_d_object); - + alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end()); } - st_.assign_filtration(f_simplex, alpha_complex_filtration); + assign_filtration(f_simplex, alpha_complex_filtration); filtration_max = fmax(filtration_max, alpha_complex_filtration); #ifdef DEBUG_TRACES - std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << st_.filtration(f_simplex) << std::endl; + std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl; #endif // DEBUG_TRACES } propagate_alpha_filtration(f_simplex, decr_dim); @@ -242,30 +280,30 @@ class Alpha_complex { #ifdef DEBUG_TRACES std::cout << "filtration_max=" << filtration_max << std::endl; #endif // DEBUG_TRACES - st_.set_filtration(filtration_max); + set_filtration(filtration_max); } template void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) { // ### Foreach Tau face of Sigma - for (auto f_boundary : st_.boundary_simplex_range(f_simplex)) { + for (auto f_boundary : boundary_simplex_range(f_simplex)) { #ifdef DEBUG_TRACES std::cout << " | --------------------------------------------------" << std::endl; std::cout << " | Tau "; - for (auto vertex : st_.simplex_vertex_range(f_boundary)) { + for (auto vertex : simplex_vertex_range(f_boundary)) { std::cout << vertex << " "; } std::cout << "is a face of Sigma" << std::endl; - std::cout << " | isnan(filtration(Tau)=" << isnan(st_.filtration(f_boundary)) << std::endl; + std::cout << " | isnan(filtration(Tau)=" << isnan(filtration(f_boundary)) << std::endl; #endif // DEBUG_TRACES // ### If filt(Tau) is not NaN - if (!isnan(st_.filtration(f_boundary))) { + if (!isnan(filtration(f_boundary))) { // ### filt(Tau) = fmin(filt(Tau), filt(Sigma)) - Filtration_value alpha_complex_filtration = fmin(st_.filtration(f_boundary), st_.filtration(f_simplex)); - st_.assign_filtration(f_boundary, alpha_complex_filtration); + Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex)); + assign_filtration(f_boundary, alpha_complex_filtration); // No need to check for filtration_max, alpha_complex_filtration is a min of an existing filtration value #ifdef DEBUG_TRACES - std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << st_.filtration(f_boundary) << std::endl; + std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl; #endif // DEBUG_TRACES // ### Else } else { @@ -275,11 +313,11 @@ class Alpha_complex { // insert the Tau points in a vector for is_gabriel function Vector_of_CGAL_points pointVector; Vertex_handle vertexForGabriel = Vertex_handle(); - for (auto vertex : st_.simplex_vertex_range(f_boundary)) { + for (auto vertex : simplex_vertex_range(f_boundary)) { pointVector.push_back((cgal_simplextree.right.at(vertex))->point()); } // Retrieve the Sigma point that is not part of Tau - parameter for is_gabriel function - for (auto vertex : st_.simplex_vertex_range(f_simplex)) { + for (auto vertex : simplex_vertex_range(f_simplex)) { if (std::find(pointVector.begin(), pointVector.end(), (cgal_simplextree.right.at(vertex))->point()) == pointVector.end()) { // vertex is not found in Tau @@ -300,46 +338,17 @@ class Alpha_complex { if ((is_gabriel(pointVector.begin(), pointVector.end(), (cgal_simplextree.right.at(vertexForGabriel))->point()) == CGAL::ON_BOUNDED_SIDE)) { // ### filt(Tau) = filt(Sigma) - Filtration_value alpha_complex_filtration = st_.filtration(f_simplex); - st_.assign_filtration(f_boundary, alpha_complex_filtration); + Filtration_value alpha_complex_filtration = filtration(f_simplex); + assign_filtration(f_boundary, alpha_complex_filtration); // No need to check for filtration_max, alpha_complex_filtration is an existing filtration value #ifdef DEBUG_TRACES - std::cout << " | filt(Tau) = filt(Sigma) = " << st_.filtration(f_boundary) << std::endl; + std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl; #endif // DEBUG_TRACES } } } } } - public: - - /** \brief Returns the number of vertices in the complex. */ - size_t num_vertices() { - return st_.num_vertices(); - } - - /** \brief Returns the number of simplices in the complex. - * - * Does not count the empty simplex. */ - const unsigned int& num_simplices() const { - return st_.num_simplices(); - } - - /** \brief Returns an upper bound on the dimension of the simplicial complex. */ - int dimension() { - return st_.dimension(); - } - - /** \brief Returns an upper bound of the filtration values of the simplices. */ - Filtration_value filtration() { - return st_.filtration(); - } - - friend std::ostream& operator<<(std::ostream& os, const Alpha_complex & alpha_complex) { - Gudhi::Simplex_tree<> st = alpha_complex.st_; - os << st << std::endl; - return os; - } }; } // namespace alphacomplex diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index 0bd55433..79300790 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -16,9 +16,11 @@ if(CGAL_FOUND) include_directories (BEFORE "../../include") #add_definitions(-DDEBUG_TRACES) - add_executable ( AlphaComplexUnitTest Alpha_complex_unit_test.cpp ) - target_link_libraries(AlphaComplexUnitTest ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) - add_test(AlphaComplexUnitTest ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUnitTest) + add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp ) + target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + add_test(AlphaComplexUT ${CMAKE_CURRENT_BINARY_DIR}/AlphaComplexUT + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/AlphaComplexUT.xml --log_level=test_suite --report_level=no) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex feature.") diff --git a/src/Alpha_complex/test/alphaoffreader_for_doc.txt b/src/Alpha_complex/test/alphaoffreader_for_doc.txt new file mode 100644 index 00000000..1153f097 --- /dev/null +++ b/src/Alpha_complex/test/alphaoffreader_for_doc.txt @@ -0,0 +1,27 @@ +Alpha complex is of dimension 2 - 25 simplices - 7 vertices. +Iterator on alpha complex simplices in the filtration order, with [filtration value]: + ( 1 ) -> [0] + ( 2 ) -> [0] + ( 3 ) -> [0] + ( 4 ) -> [0] + ( 5 ) -> [0] + ( 6 ) -> [0] + ( 7 ) -> [0] + ( 4 3 ) -> [6.25] + ( 6 5 ) -> [7.25] + ( 3 1 ) -> [8.5] + ( 2 1 ) -> [9.25] + ( 4 2 ) -> [10] + ( 3 2 ) -> [11.25] + ( 4 3 2 ) -> [12.5] + ( 3 2 1 ) -> [12.9959] + ( 7 6 ) -> [13.25] + ( 5 3 ) -> [20] + ( 7 5 ) -> [22.7367] + ( 7 6 5 ) -> [22.7367] + ( 7 4 ) -> [30.25] + ( 7 3 ) -> [36.5] + ( 7 4 3 ) -> [36.5] + ( 7 5 3 ) -> [37.2449] + ( 5 1 ) -> [59.7107] + ( 5 3 1 ) -> [59.7107] diff --git a/src/Doxyfile b/src/Doxyfile index 9d4bc9c8..49ec4768 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -812,8 +812,9 @@ EXCLUDE_SYMBOLS = # command). EXAMPLE_PATH = common/example \ - common/test - + common/test \ + Alpha_complex/example \ + Alpha_complex/test \ # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index e4ac6c7b..1b4dd6e0 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -23,10 +23,12 @@ if(CGAL_FOUND) target_link_libraries(dtoffrw_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) # Unitary tests - add_test(dtoffrw_UT ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_UT) + add_test(dtoffrw_UT ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_UT + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/dtoffrw_UT.xml --log_level=test_suite --report_level=no) if (DIFF_PATH) - add_test(diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off) + add_test(dtoffrw_diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off) endif() else() -- cgit v1.2.3 From 85059e058ea651d5d9e849c8462cbe5f01e4743b Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 24 Jun 2015 15:27:19 +0000 Subject: Alpha complex construction from a list of CGAL points git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@641 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 521269f793f9c16c2305db8c97678bea2bf95092 --- .../example/Alpha_complex_from_off.cpp | 2 +- .../example/Alpha_complex_from_points.cpp | 71 +++++++++++++++ src/Alpha_complex/example/CMakeLists.txt | 3 + src/Alpha_complex/include/gudhi/Alpha_complex.h | 32 +++++-- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 100 ++++++++++++++++++--- src/common/doc/main_page.h | 4 +- 6 files changed, 192 insertions(+), 20 deletions(-) create mode 100644 src/Alpha_complex/example/Alpha_complex_from_points.cpp (limited to 'src/common') diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index 0d7af117..ce278419 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -22,7 +22,7 @@ int main(int argc, char **argv) { // ---------------------------------------------------------------------------- // Init of an alpha complex from an OFF file // ---------------------------------------------------------------------------- - Gudhi::alphacomplex::Alpha_complex<> alpha_complex_from_file(off_file_name); + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name); // ---------------------------------------------------------------------------- // Display information about the alpha complex diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp new file mode 100644 index 00000000..fc0e2460 --- /dev/null +++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp @@ -0,0 +1,71 @@ +// to construct a Delaunay_triangulation from a OFF file +#include "gudhi/Delaunay_triangulation_off_io.h" +#include "gudhi/Alpha_complex.h" + +#include +#include + +#include +#include +#include + +typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; +typedef Kernel::Point_d Point; +typedef std::vector Vector_of_points; + +int main(int argc, char **argv) { + + // ---------------------------------------------------------------------------- + // Init of a list of points + // ---------------------------------------------------------------------------- + Vector_of_points points; + std::vector coords; + + coords.clear(); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(1.0); + points.push_back(Point(coords.begin(), coords.end())); + coords.clear(); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(1.0); + coords.push_back(0.0); + points.push_back(Point(coords.begin(), coords.end())); + coords.clear(); + coords.push_back(0.0); + coords.push_back(1.0); + coords.push_back(0.0); + coords.push_back(0.0); + points.push_back(Point(coords.begin(), coords.end())); + coords.clear(); + coords.push_back(1.0); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(0.0); + points.push_back(Point(coords.begin(), coords.end())); + + // ---------------------------------------------------------------------------- + // Init of an alpha complex from the list of points + // ---------------------------------------------------------------------------- + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_points(3, points.size(), points.begin(), points.end()); + + // ---------------------------------------------------------------------------- + // Display information about the alpha complex + // ---------------------------------------------------------------------------- + std::cout << "Alpha complex is of dimension " << alpha_complex_from_points.dimension() << + " - " << alpha_complex_from_points.num_simplices() << " simplices - " << + alpha_complex_from_points.num_vertices() << " vertices." << std::endl; + + std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl; + for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) { + std::cout << " ( "; + for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] "; + std::cout << std::endl; + } + return 0; +} \ No newline at end of file diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 04c0ba58..2e64e4db 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -17,6 +17,9 @@ if(CGAL_FOUND) #add_definitions(-DDEBUG_TRACES) add_executable ( alphaoffreader Alpha_complex_from_off.cpp ) target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + + add_executable ( alphapoints Alpha_complex_from_points.cpp ) + target_link_libraries(alphapoints ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") endif() diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 97c30abb..138270ff 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -44,6 +44,7 @@ #include #include #include // NaN +#include // std::iterator namespace Gudhi { @@ -62,11 +63,6 @@ namespace alphacomplex { * Please refer to \ref alpha_complex for examples. * */ -template class Alpha_complex : public Simplex_tree<> { private: // From Simplex_tree @@ -94,6 +90,9 @@ class Alpha_complex : public Simplex_tree<> { // Boost bimap type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa. typedef boost::bimap< CGAL_vertex_iterator, Vertex_handle > Bimap_vertex; + + // size_type type from CGAL. + typedef Delaunay_triangulation::size_type size_type; private: /** \brief Boost bimap to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa.*/ @@ -108,7 +107,7 @@ class Alpha_complex : public Simplex_tree<> { * * @param[in] off_file_name OFF file [path and] name. */ - Alpha_complex(std::string& off_file_name) + Alpha_complex(const std::string& off_file_name) : triangulation(nullptr) { Gudhi::Delaunay_triangulation_off_reader off_reader(off_file_name); if (!off_reader.is_valid()) { @@ -128,6 +127,27 @@ class Alpha_complex : public Simplex_tree<> { init(); } + /** \brief Alpha_complex constructor from a list of points. + * Uses the Delaunay_triangulation_off_reader to construct the Delaunay triangulation required to initialize + * the Alpha_complex. + * + * @param[in] dimension Dimension of points to be inserted. + * @param[in] size Number of points to be inserted. + * @param[in] firstPoint Iterator on the first point to be inserted. + * @param[in] last Point Iterator on the last point to be inserted. + */ + template + Alpha_complex(int dimension, size_type size, ForwardIterator firstPoint, ForwardIterator lastPoint) + : triangulation(nullptr) { + triangulation = new Delaunay_triangulation(dimension); + Delaunay_triangulation::size_type inserted = triangulation->insert(firstPoint, lastPoint); + if (inserted != size) { + std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << size<< std::endl; + exit(-1); // ----- >> + } + init(); + } + /** \brief Alpha_complex destructor from a Delaunay triangulation. * * @warning Deletes the Delaunay triangulation. diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index 86d4d9c3..9530314c 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -28,15 +28,19 @@ #include "gudhi/Delaunay_triangulation_off_io.h" #include "gudhi/Alpha_complex.h" +#include +#include + #include // float comparison +#include // Use dynamic_dimension_tag for the user to be able to set dimension -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; -typedef CGAL::Delaunay_triangulation T; -// The triangulation uses the default instantiation of the -// TriangulationDataStructure template parameter +typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; +typedef Kernel::Point_d Point; +typedef std::vector Vector_of_points; +// The triangulation uses the default instantiation of the TriangulationDataStructure template parameter -BOOST_AUTO_TEST_CASE( S4_100_OFF_file ) { +BOOST_AUTO_TEST_CASE(S4_100_OFF_file) { // ---------------------------------------------------------------------------- // // Init of an alpha-complex from a OFF file @@ -44,24 +48,24 @@ BOOST_AUTO_TEST_CASE( S4_100_OFF_file ) { // ---------------------------------------------------------------------------- std::string off_file_name("S4_100.off"); std::cout << "========== OFF FILE NAME = " << off_file_name << " ==========" << std::endl; - + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name); const int DIMENSION = 4; std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl; BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION); - + const int NUMBER_OF_VERTICES = 100; std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES); - + const int NUMBER_OF_SIMPLICES = 6879; std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES); } -BOOST_AUTO_TEST_CASE( S8_10_OFF_file ) { +BOOST_AUTO_TEST_CASE(S8_10_OFF_file) { // ---------------------------------------------------------------------------- // // Init of an alpha-complex from a OFF file @@ -69,19 +73,91 @@ BOOST_AUTO_TEST_CASE( S8_10_OFF_file ) { // ---------------------------------------------------------------------------- std::string off_file_name("S8_10.off"); std::cout << "========== OFF FILE NAME = " << off_file_name << " ==========" << std::endl; - + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name); const int DIMENSION = 8; std::cout << "alpha_complex_from_file.dimension()=" << alpha_complex_from_file.dimension() << std::endl; BOOST_CHECK(alpha_complex_from_file.dimension() == DIMENSION); - + const int NUMBER_OF_VERTICES = 10; std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES); - + const int NUMBER_OF_SIMPLICES = 1007; std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES); +} + +bool are_almost_the_same(float a, float b) { + return std::fabs(a - b) < std::numeric_limits::epsilon(); +} + +BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) { + + // ---------------------------------------------------------------------------- + // Init of a list of points + // ---------------------------------------------------------------------------- + Vector_of_points points; + std::vector coords; + + coords.clear(); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(1.0); + points.push_back(Point(coords.begin(), coords.end())); + coords.clear(); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(1.0); + coords.push_back(0.0); + points.push_back(Point(coords.begin(), coords.end())); + coords.clear(); + coords.push_back(0.0); + coords.push_back(1.0); + coords.push_back(0.0); + coords.push_back(0.0); + points.push_back(Point(coords.begin(), coords.end())); + coords.clear(); + coords.push_back(1.0); + coords.push_back(0.0); + coords.push_back(0.0); + coords.push_back(0.0); + points.push_back(Point(coords.begin(), coords.end())); + + // ---------------------------------------------------------------------------- + // Init of an alpha complex from the list of points + // ---------------------------------------------------------------------------- + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_points(3, points.size(), points.begin(), points.end()); + + std::cout << "========== Alpha_complex_from_points ==========" << std::endl; + + std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl; + BOOST_CHECK(alpha_complex_from_points.dimension() == 3); + std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl; + BOOST_CHECK(alpha_complex_from_points.num_simplices() == 15); + std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl; + BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4); + + for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) { + switch (alpha_complex_from_points.dimension(f_simplex)) { + case 0: + BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0)); + break; + case 1: + BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0)); + break; + case 2: + BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 2.0/3.0)); + break; + case 3: + BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 3.0/4.0)); + break; + default: + BOOST_CHECK(false); // Shall not happen + break; + } + } } diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 315aa0ac..770d2216 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -48,8 +48,10 @@ CGAL is a C++ library which provides easy access to efficient and reliable geome The following example requires the Computational Geometry Algorithms Library (CGAL) and will not be built if CGAL is not installed: - Simplex_tree/simplex_tree_from_alpha_shapes_3 + - Alpha_complex/Alpha_complex_from_off + - Alpha_complex/Alpha_complex_from_points -Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to +Having CGAL version 4.7 or higher installed is recommended. The procedure to install this library according to your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html \subsection demos Demos and examples -- cgit v1.2.3 From fc092e56fd3148c40b054d9cd05b05ed3c171295 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 25 Jun 2015 07:39:26 +0000 Subject: Modification for doxygen to be generated from user version (to fix bad path in doc) git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@642 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 68362c2ab9d83a8daa31bcb9d7799216c8ccf94c --- scripts/generate_version.sh | 20 ++++++++-------- .../example/alphaoffreader_for_doc.txt | 27 ++++++++++++++++++++++ src/Alpha_complex/test/alphaoffreader_for_doc.txt | 27 ---------------------- src/Doxyfile | 17 +++++++------- .../example/dtoffrw_alphashapedoc_result.txt | 3 +++ src/common/test/dtoffrw_alphashapedoc_result.txt | 3 --- 6 files changed, 48 insertions(+), 49 deletions(-) create mode 100644 src/Alpha_complex/example/alphaoffreader_for_doc.txt delete mode 100644 src/Alpha_complex/test/alphaoffreader_for_doc.txt create mode 100644 src/common/example/dtoffrw_alphashapedoc_result.txt delete mode 100644 src/common/test/dtoffrw_alphashapedoc_result.txt (limited to 'src/common') diff --git a/scripts/generate_version.sh b/scripts/generate_version.sh index 4d28cf53..e7575282 100755 --- a/scripts/generate_version.sh +++ b/scripts/generate_version.sh @@ -2,6 +2,8 @@ #usage bash generate_version.sh : dont generate if svn st non empty #usage bash generate_version.sh -f : generate even if svn is empty #usage bash generate_version.sh -f DIR : generate even if svn is empty and save library in dir +# +# 23/06/2015 - Remove source, add biblio, and doc # VERSION CHECK ROOT_DIR=.. VERSION_FILE="$ROOT_DIR/Version.txt" @@ -48,11 +50,13 @@ cp $ROOT_DIR/COPYING $VERSION_DIR cp -R $ROOT_DIR/data $VERSION_DIR cp $ROOT_DIR/src/CMakeLists.txt $VERSION_DIR cp $ROOT_DIR/src/Doxyfile $VERSION_DIR +cp -R $ROOT_DIR/biblio $VERSION_DIR # PACKAGE LEVEL COPY PACKAGE_INC_DIR="/include" -PACKAGE_SRC_DIR="/source" +#PACKAGE_SRC_DIR="/source" PACKAGE_EX_DIR="/example" +PACKAGE_DOC_DIR="/doc" for package in `ls $ROOT_DIR/src/` do echo $package @@ -77,20 +81,16 @@ do fi cp -R $ROOT_DIR/src/$package$PACKAGE_INC_DIR/* $VERSION_DIR$PACKAGE_INC_DIR/ fi - if [ -d "$ROOT_DIR/src/$package$PACKAGE_SRC_DIR" ] - then - if [ ! -d "$VERSION_DIR$PACKAGE_SRC_DIR" ] - then - # MUST CREATE DIRECTORY ON FIRST LOOP - mkdir $VERSION_DIR$PACKAGE_INC_DIR - fi - cp -R $ROOT_DIR/src/$package$PACKAGE_SRC_DIR/* $VERSION_DIR$PACKAGE_SRC_DIR/ - fi if [ -d "$ROOT_DIR/src/$package$PACKAGE_EX_DIR" ] then mkdir -p $VERSION_DIR$PACKAGE_EX_DIR/$package cp -R $ROOT_DIR/src/$package$PACKAGE_EX_DIR/* $VERSION_DIR$PACKAGE_EX_DIR/$package fi + if [ -d "$ROOT_DIR/src/$package$PACKAGE_DOC_DIR" ] + then + mkdir -p $VERSION_DIR$PACKAGE_DOC_DIR/$package + cp -R $ROOT_DIR/src/$package$PACKAGE_DOC_DIR/* $VERSION_DIR$PACKAGE_DOC_DIR/$package + fi fi fi done diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc.txt b/src/Alpha_complex/example/alphaoffreader_for_doc.txt new file mode 100644 index 00000000..1153f097 --- /dev/null +++ b/src/Alpha_complex/example/alphaoffreader_for_doc.txt @@ -0,0 +1,27 @@ +Alpha complex is of dimension 2 - 25 simplices - 7 vertices. +Iterator on alpha complex simplices in the filtration order, with [filtration value]: + ( 1 ) -> [0] + ( 2 ) -> [0] + ( 3 ) -> [0] + ( 4 ) -> [0] + ( 5 ) -> [0] + ( 6 ) -> [0] + ( 7 ) -> [0] + ( 4 3 ) -> [6.25] + ( 6 5 ) -> [7.25] + ( 3 1 ) -> [8.5] + ( 2 1 ) -> [9.25] + ( 4 2 ) -> [10] + ( 3 2 ) -> [11.25] + ( 4 3 2 ) -> [12.5] + ( 3 2 1 ) -> [12.9959] + ( 7 6 ) -> [13.25] + ( 5 3 ) -> [20] + ( 7 5 ) -> [22.7367] + ( 7 6 5 ) -> [22.7367] + ( 7 4 ) -> [30.25] + ( 7 3 ) -> [36.5] + ( 7 4 3 ) -> [36.5] + ( 7 5 3 ) -> [37.2449] + ( 5 1 ) -> [59.7107] + ( 5 3 1 ) -> [59.7107] diff --git a/src/Alpha_complex/test/alphaoffreader_for_doc.txt b/src/Alpha_complex/test/alphaoffreader_for_doc.txt deleted file mode 100644 index 1153f097..00000000 --- a/src/Alpha_complex/test/alphaoffreader_for_doc.txt +++ /dev/null @@ -1,27 +0,0 @@ -Alpha complex is of dimension 2 - 25 simplices - 7 vertices. -Iterator on alpha complex simplices in the filtration order, with [filtration value]: - ( 1 ) -> [0] - ( 2 ) -> [0] - ( 3 ) -> [0] - ( 4 ) -> [0] - ( 5 ) -> [0] - ( 6 ) -> [0] - ( 7 ) -> [0] - ( 4 3 ) -> [6.25] - ( 6 5 ) -> [7.25] - ( 3 1 ) -> [8.5] - ( 2 1 ) -> [9.25] - ( 4 2 ) -> [10] - ( 3 2 ) -> [11.25] - ( 4 3 2 ) -> [12.5] - ( 3 2 1 ) -> [12.9959] - ( 7 6 ) -> [13.25] - ( 5 3 ) -> [20] - ( 7 5 ) -> [22.7367] - ( 7 6 5 ) -> [22.7367] - ( 7 4 ) -> [30.25] - ( 7 3 ) -> [36.5] - ( 7 4 3 ) -> [36.5] - ( 7 5 3 ) -> [37.2449] - ( 5 1 ) -> [59.7107] - ( 5 3 1 ) -> [59.7107] diff --git a/src/Doxyfile b/src/Doxyfile index a84c4897..0724de1c 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -672,7 +672,7 @@ LAYOUT_FILE = # search path. Do not use file names with spaces, bibtex cannot handle them. See # also \cite for info how to create references. -CITE_BIB_FILES = ../biblio/bibliography.bib +CITE_BIB_FILES = biblio/bibliography.bib #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages @@ -811,10 +811,9 @@ EXCLUDE_SYMBOLS = # that contain example code fragments that are included (see the \include # command). -EXAMPLE_PATH = common/example \ - common/test \ - Alpha_complex/example \ - Alpha_complex/test \ +EXAMPLE_PATH = example/common/ \ + example/Alpha_complex/ + # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all @@ -833,10 +832,10 @@ EXAMPLE_RECURSIVE = NO # that contain images that are to be included in the documentation (see the # \image command). -IMAGE_PATH = Skeleton_blocker/doc/ \ - Alpha_complex/doc/ \ - common/doc/ \ - Contraction/doc/ +IMAGE_PATH = doc/Skeleton_blocker/ \ + doc/Alpha_complex/ \ + doc/common/ \ + doc/Contraction/ # The INPUT_FILTER tag can be used to specify a program that doxygen should diff --git a/src/common/example/dtoffrw_alphashapedoc_result.txt b/src/common/example/dtoffrw_alphashapedoc_result.txt new file mode 100644 index 00000000..57761d14 --- /dev/null +++ b/src/common/example/dtoffrw_alphashapedoc_result.txt @@ -0,0 +1,3 @@ +Number of vertices= 7 +Number of finite full cells= 6 + diff --git a/src/common/test/dtoffrw_alphashapedoc_result.txt b/src/common/test/dtoffrw_alphashapedoc_result.txt deleted file mode 100644 index 57761d14..00000000 --- a/src/common/test/dtoffrw_alphashapedoc_result.txt +++ /dev/null @@ -1,3 +0,0 @@ -Number of vertices= 7 -Number of finite full cells= 6 - -- cgit v1.2.3 From e1b8ee37a896ddb8d733ecd8752eb70307521f7a Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 25 Jun 2015 08:25:54 +0000 Subject: File was not renamed here git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@643 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ea5a10408d8f44eb508ae68cb3ed0699dc4eba93 --- src/common/example/CMakeLists.txt | 2 +- src/common/test/dtoffrw_unit_test.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'src/common') diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index ae30da54..4cb03dbe 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -15,7 +15,7 @@ if(CGAL_FOUND) add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp ) target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphashapedoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) + add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp index 4905d845..d2705955 100644 --- a/src/common/test/dtoffrw_unit_test.cpp +++ b/src/common/test/dtoffrw_unit_test.cpp @@ -43,7 +43,7 @@ typedef CGAL::Delaunay_triangulation T; BOOST_AUTO_TEST_CASE( Delaunay_triangulation_doc_test ) { // Read the OFF file (input file name given as parameter) and triangulates points - Gudhi::Delaunay_triangulation_off_reader off_reader("../../../data/points/alphashapedoc.off"); + Gudhi::Delaunay_triangulation_off_reader off_reader("../../../data/points/alphacomplexdoc.off"); // Check the read operation was correct BOOST_CHECK(off_reader.is_valid()); @@ -75,7 +75,7 @@ BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_write_test ) { // Read the OFF file (input file name given as parameter) and triangulates points - Gudhi::Delaunay_triangulation_off_reader off_reader("../../../data/points/alphashapedoc.off"); + Gudhi::Delaunay_triangulation_off_reader off_reader("../../../data/points/alphacomplexdoc.off"); // Retrieve the triangulation T* triangulation = off_reader.get_complex(); -- cgit v1.2.3 From 89d8caff43f3c38ee3ce3fd96000eaa549ba0481 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 1 Sep 2015 15:04:46 +0000 Subject: UT fix to compile and run under osX git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@768 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a8351f5bd12a2d5e4869a61c298ddf76ad04f91d --- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 8 ++++---- src/common/test/dtoffrw_unit_test.cpp | 8 +++----- 2 files changed, 7 insertions(+), 9 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index b2597eff..7a0800e4 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -20,10 +20,10 @@ * along with this program. If not, see . */ -#define BOOST_TEST_MODULE alpha_complex -#include -#include -#include +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "alpha_complex" +#include + #include #include diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp index d2705955..ada218ac 100644 --- a/src/common/test/dtoffrw_unit_test.cpp +++ b/src/common/test/dtoffrw_unit_test.cpp @@ -20,8 +20,6 @@ * along with this program. If not, see . */ -#define BOOST_TEST_MODULE DelaunayTriangulationOffFileReadWrite test - // to construct a Delaunay_triangulation from a OFF file #include "gudhi/Delaunay_triangulation_off_io.h" @@ -32,9 +30,9 @@ #include #include -#include -#include -//#include +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "delaunay_triangulation_off_read_write" +#include // Use dynamic_dimension_tag for the user to be able to set dimension typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; -- cgit v1.2.3 From ba47def14a25fb1299ef0980366c2c5479fb1ccc Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 2 Oct 2015 09:34:09 +0000 Subject: Review fix git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@816 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d112bf6b1b07a75947392576baa53321326e65c4 --- src/Alpha_complex/doc/Intro_alpha_complex.h | 8 ++++---- .../example/Alpha_complex_from_off.cpp | 8 +++----- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 4 ++-- .../example/Delaunay_triangulation_off_rw.cpp | 5 ++--- .../include/gudhi/Delaunay_triangulation_off_io.h | 24 +++++++++++----------- 5 files changed, 23 insertions(+), 26 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index d266219b..2cb37578 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -93,8 +93,8 @@ namespace alphacomplex { * \end{algorithmic} * \f} * - * From the example above, it means the algorithm will look into each triangulation ([1,2,3], [2,3,4], [1,3,5], ...), - * will compute the filtration value of the triangulation, and then will propagate the filtration value as described + * From the example above, it means the algorithm will look into each triangle ([1,2,3], [2,3,4], [1,3,5], ...), + * will compute the filtration value of the triangle, and then will propagate the filtration value as described * here : * \image html "alpha_complex_doc_135.png" "Filtration value propagation example" * Then, the algorithm will look into each edge ([1,2], [2,3], [1,3], ...), @@ -105,8 +105,8 @@ namespace alphacomplex { * * \section alpha-shape Alpha shape * - * In the example above, the alpha shape of \f$\alpha^2_{74} < \alpha^2 < \alpha^2_{73}\f$ is the alpha complex where the - * \f$\alpha^2_{74} <\f$ filtration value \f$< \alpha^2_{73}\f$ as described in \cite AlphaShapesIntroduction + * In the example above, the alpha shape of \f$\alpha^2_{63} < \alpha^2 < \alpha^2_{62}\f$ is the alpha complex where the + * \f$\alpha^2_{63} <\f$ filtration value \f$< \alpha^2_{62}\f$ as described in \cite AlphaShapesIntroduction * * \image html "alpha_complex_doc_alpha_shape.png" "Alpha shape example" * \copyright GNU General Public License v3. diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index b698d6d7..e140fe3d 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -1,11 +1,9 @@ -#include -#include - +#include #include // to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Delaunay_triangulation_off_io.h" -#include "gudhi/Alpha_complex.h" +#include +#include void usage(char * const progName) { std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value" << std::endl; diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index 7a0800e4..b630e999 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -33,8 +33,8 @@ #include // to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Delaunay_triangulation_off_io.h" -#include "gudhi/Alpha_complex.h" +#include +#include // Use dynamic_dimension_tag for the user to be able to set dimension typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel_d; diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp index d1aa7988..75e4fafb 100644 --- a/src/common/example/Delaunay_triangulation_off_rw.cpp +++ b/src/common/example/Delaunay_triangulation_off_rw.cpp @@ -1,11 +1,10 @@ // to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Delaunay_triangulation_off_io.h" +#include #include #include -#include -#include +#include #include // Use dynamic_dimension_tag for the user to be able to set dimension diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index de5fa2af..0c5474c9 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -39,7 +39,7 @@ namespace Gudhi { template class Delaunay_triangulation_off_visitor_reader { private: - Complex* _complex; + Complex* complex_; typedef typename Complex::Point Point; public: @@ -48,10 +48,10 @@ class Delaunay_triangulation_off_visitor_reader { /** \brief Delaunay_triangulation_off_visitor_reader constructor * - * @param[in] _complex_ptr pointer on a Delaunay triangulation. + * @param[in] complex_ptr_ pointer on a Delaunay triangulation. */ - Delaunay_triangulation_off_visitor_reader(Complex* _complex_ptr) - : _complex(nullptr) { } + Delaunay_triangulation_off_visitor_reader(Complex* complex_ptr_) + : complex_(nullptr) { } /** \brief Off_reader visitor init implementation. * @@ -77,7 +77,7 @@ class Delaunay_triangulation_off_visitor_reader { "file for Delaunay triangulation - edges are computed." << std::endl; } // Complex construction with dimension from file - _complex = new Complex(dim); + complex_ = new Complex(dim); } /** \brief Off_reader visitor point implementation. @@ -95,7 +95,7 @@ class Delaunay_triangulation_off_visitor_reader { } std::cout << std::endl; #endif // DEBUG_TRACES - _complex->insert(Point(point.size(), point.begin(), point.end())); + complex_->insert(Point(point.size(), point.begin(), point.end())); } // Off_reader visitor maximal_face implementation - not used @@ -115,7 +115,7 @@ class Delaunay_triangulation_off_visitor_reader { * @warning The returned pointer can be nullptr. */ Complex* get_complex() const { - return _complex; + return complex_; } }; @@ -157,12 +157,12 @@ class Delaunay_triangulation_off_reader { : valid_(false) { std::ifstream stream(name_file); if (stream.is_open()) { - Delaunay_triangulation_off_visitor_reader off_visitor(_complex); + Delaunay_triangulation_off_visitor_reader off_visitor(complex_); Off_reader off_reader(stream); valid_ = off_reader.read(off_visitor); if (valid_) { - _complex = off_visitor.get_complex(); - if (_complex == nullptr) { + complex_ = off_visitor.get_complex(); + if (complex_ == nullptr) { std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader off_visitor returns an empty pointer" << std::endl; valid_ = false; } @@ -190,7 +190,7 @@ class Delaunay_triangulation_off_reader { */ Complex* get_complex() const { if (valid_) - return _complex; + return complex_; return nullptr; } @@ -199,7 +199,7 @@ class Delaunay_triangulation_off_reader { /** \brief OFF file read status.*/ bool valid_; /** \brief A pointer on the Delaunay triangulation.*/ - Complex* _complex; + Complex* complex_; }; /** \brief OFF file writer from a Delaunay triangulation. -- cgit v1.2.3 From c14909eae41883308428095758360de3a7202a0d Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 2 Oct 2015 13:13:38 +0000 Subject: Backmerge of trunk git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@820 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 812e0b84d187dd5e30a6a18c12612ebc9bf9206a --- src/GudhUI/gui/MainWindow.h | 2 + src/GudhUI/gui/Menu_edge_contraction.h | 2 + src/GudhUI/view/Viewer.h | 3 + src/GudhUI/view/Viewer_instructor.h | 2 + .../include/gudhi/Persistent_cohomology.h | 28 +- .../Persistent_cohomology_column.h | 14 +- src/Simplex_tree/concept/IndexingTag.h | 2 +- src/Simplex_tree/concept/SimplexKey.h | 4 +- src/Simplex_tree/concept/SimplexTreeOptions.h | 41 +++ src/Simplex_tree/concept/VertexHandle.h | 3 +- src/Simplex_tree/example/CMakeLists.txt | 9 +- src/Simplex_tree/example/mini_simplex_tree.cpp | 68 ++++ src/Simplex_tree/include/gudhi/Simplex_tree.h | 354 ++++++++++++++------- .../Simplex_tree_node_explicit_storage.h | 43 +-- .../gudhi/Simplex_tree/Simplex_tree_siblings.h | 6 +- src/Simplex_tree/test/simplex_tree_unit_test.cpp | 276 +++++++++------- src/common/include/gudhi/Simple_object_pool.h | 64 ++++ 17 files changed, 629 insertions(+), 292 deletions(-) create mode 100644 src/Simplex_tree/concept/SimplexTreeOptions.h create mode 100644 src/Simplex_tree/example/mini_simplex_tree.cpp create mode 100644 src/common/include/gudhi/Simple_object_pool.h (limited to 'src/common') diff --git a/src/GudhUI/gui/MainWindow.h b/src/GudhUI/gui/MainWindow.h index 587f1c6f..e46b72a3 100644 --- a/src/GudhUI/gui/MainWindow.h +++ b/src/GudhUI/gui/MainWindow.h @@ -1,6 +1,8 @@ #ifndef MAIN_WINDOW_H #define MAIN_WINDOW_H +// Workaround for moc-qt4 not parsing boost headers +#include #include #include "ui_main_window.h" diff --git a/src/GudhUI/gui/Menu_edge_contraction.h b/src/GudhUI/gui/Menu_edge_contraction.h index e497a90f..81d37bd8 100644 --- a/src/GudhUI/gui/Menu_edge_contraction.h +++ b/src/GudhUI/gui/Menu_edge_contraction.h @@ -8,6 +8,8 @@ #ifndef MENU_EDGE_CONTRACTION_H_ #define MENU_EDGE_CONTRACTION_H_ +// Workaround for moc-qt4 not parsing boost headers +#include #include "gui/MainWindow.h" #include "gui/ui_MenuEdgeContraction.h" diff --git a/src/GudhUI/view/Viewer.h b/src/GudhUI/view/Viewer.h index 5639aa56..00f9b245 100644 --- a/src/GudhUI/view/Viewer.h +++ b/src/GudhUI/view/Viewer.h @@ -1,6 +1,9 @@ #ifndef VIEWER_H #define VIEWER_H +// Workaround for moc-qt4 not parsing boost headers +#include + #include #include "View_parameter.h" #include "model/Complex_typedefs.h" diff --git a/src/GudhUI/view/Viewer_instructor.h b/src/GudhUI/view/Viewer_instructor.h index 9a2a236b..62b3755e 100644 --- a/src/GudhUI/view/Viewer_instructor.h +++ b/src/GudhUI/view/Viewer_instructor.h @@ -3,6 +3,8 @@ // todo do a viewer instructor that have directely a pointer to a QGLviewer and buffer ot not triangles +// Workaround for moc-qt4 not parsing boost headers +#include #include diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index 4c818ffa..5f399f1a 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -25,12 +25,12 @@ #include #include +#include #include #include #include #include -#include #include #include @@ -243,7 +243,7 @@ class Persistent_cohomology { column_pool_(), // memory pools for the CAM cell_pool_() { Simplex_key idx_fil = 0; - for (auto & sh : cpx_->filtration_simplex_range()) { + for (auto sh : cpx_->filtration_simplex_range()) { cpx_->assign_key(sh, idx_fil); ++idx_fil; dsets_.make_set(cpx_->key(sh)); @@ -266,13 +266,10 @@ class Persistent_cohomology { } ~Persistent_cohomology() { -// Clean the remaining columns in the matrix. - for (auto & cam_ref : cam_) { - cam_ref.col_.clear(); - } -// Clean the transversal lists + // Clean the transversal lists for (auto & transverse_ref : transverse_idx_) { - transverse_ref.second.row_->clear(); + // Destruct all the cells + transverse_ref.second.row_->clear_and_dispose([&](Cell*p){p->~Cell();}); delete transverse_ref.second.row_; } } @@ -572,9 +569,8 @@ class Persistent_cohomology { Column * curr_col = row_cell_it->self_col_; ++row_cell_it; // Disconnect the column from the rows in the CAM. - for (auto col_cell_it = curr_col->col_.begin(); - col_cell_it != curr_col->col_.end(); ++col_cell_it) { - col_cell_it->base_hook_cam_h::unlink(); + for (auto& col_cell : curr_col->col_) { + col_cell.base_hook_cam_h::unlink(); } // Remove the column from the CAM before modifying its value @@ -589,9 +585,9 @@ class Persistent_cohomology { // Find whether the column obtained is already in the CAM result_insert_cam = cam_.insert(*curr_col); if (result_insert_cam.second) { // If it was not in the CAM before: insertion has succeeded - for (auto col_cell_it = curr_col->col_.begin(); col_cell_it != curr_col->col_.end(); ++col_cell_it) { + for (auto& col_cell : curr_col->col_) { // re-establish the row links - transverse_idx_[col_cell_it->key_].row_->push_front(*col_cell_it); + transverse_idx_[col_cell.key_].row_->push_front(col_cell); } } else { // There is already an identical column in the CAM: // merge two disjoint sets. @@ -601,6 +597,8 @@ class Persistent_cohomology { Simplex_key key_tmp = dsets_.find_set(curr_col->class_key_); ds_repr_[key_tmp] = &(*(result_insert_cam.first)); result_insert_cam.first->class_key_ = key_tmp; + // intrusive containers don't own their elements, we have to release them manually + curr_col->col_.clear_and_dispose([&](Cell*p){cell_pool_.destroy(p);}); column_pool_.destroy(curr_col); // delete curr_col; } } @@ -766,8 +764,8 @@ class Persistent_cohomology { std::vector persistent_pairs_; length_interval interval_length_policy; - boost::object_pool column_pool_; - boost::object_pool cell_pool_; + Simple_object_pool column_pool_; + Simple_object_pool cell_pool_; }; /** @} */ // end defgroup persistent_cohomology diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h index fcec819a..5ffd1776 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Persistent_cohomology_column.h @@ -93,27 +93,25 @@ class Persistent_cohomology_column : public boost::intrusive::set_base_hook< boost::intrusive::base_hook > Col_type; /** \brief Creates an empty column.*/ - explicit Persistent_cohomology_column(SimplexKey key) { - class_key_ = key; - col_ = Col_type(); - } + explicit Persistent_cohomology_column(SimplexKey key) + : col_(), + class_key_(key) {} public: /** Copy constructor.*/ Persistent_cohomology_column(Persistent_cohomology_column const &other) : col_(), class_key_(other.class_key_) { - if (!other.col_.empty()) - std::cerr << "Copying a non-empty column.\n"; + assert(other.col_.empty()); } /** \brief Returns true iff the column is null.*/ - bool is_null() { + bool is_null() const { return col_.empty(); } /** \brief Returns the key of the representative simplex of * the set of simplices having this column as annotation vector * in the compressed annotation matrix.*/ - SimplexKey class_key() { + SimplexKey class_key() const { return class_key_; } diff --git a/src/Simplex_tree/concept/IndexingTag.h b/src/Simplex_tree/concept/IndexingTag.h index d690da11..1dcdd756 100644 --- a/src/Simplex_tree/concept/IndexingTag.h +++ b/src/Simplex_tree/concept/IndexingTag.h @@ -25,6 +25,6 @@ * continuous maps to a cell complex, and compute its persistent * homology. * - * Must be linear_indexing_tag. + * Must be `Gudhi::linear_indexing_tag`. */ struct IndexingTag {}; diff --git a/src/Simplex_tree/concept/SimplexKey.h b/src/Simplex_tree/concept/SimplexKey.h index ce5b2382..7fdbdd84 100644 --- a/src/Simplex_tree/concept/SimplexKey.h +++ b/src/Simplex_tree/concept/SimplexKey.h @@ -22,7 +22,7 @@ /** \brief Key type used as simplex identifier. * - * Must be int + * Must be a signed integer type. */ struct SimplexKey {}; - \ No newline at end of file + diff --git a/src/Simplex_tree/concept/SimplexTreeOptions.h b/src/Simplex_tree/concept/SimplexTreeOptions.h new file mode 100644 index 00000000..a50a2bf1 --- /dev/null +++ b/src/Simplex_tree/concept/SimplexTreeOptions.h @@ -0,0 +1,41 @@ + /* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Marc Glisse + * + * Copyright (C) 2015 INRIA Saclay - Ile-de-France (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +/** \brief Concept of the template parameter for the class `Gudhi::Simplex_tree`. + * + * One model for this is `Gudhi::Simplex_tree_options_full_featured`. If you want to provide your own, it is recommended that you derive from it and override some parts instead of writing a class from scratch. + */ +struct SimplexTreeOptions { + /// Forced for now. + typedef IndexingTag Indexing_tag; + /// Must be a signed integer type. It admits a total order <. + typedef VertexHandle Vertex_handle; + /// Must be comparable with operator<. + typedef FiltrationValue Filtration_value; + /// Must be a signed integer type. + typedef SimplexKey Simplex_key; + /// If true, each simplex has extra storage for one `Simplex_key`. Necessary for `Persistent_cohomology`. + static constexpr bool store_key; + /// If true, each simplex has extra storage for one `Filtration_value`, and this value is propagated by operations like `Gudhi::Simplex_tree::expansion`. Without it, `Persistent_cohomology` degenerates to computing usual (non-persistent) cohomology. + static constexpr bool store_filtration; +}; + diff --git a/src/Simplex_tree/concept/VertexHandle.h b/src/Simplex_tree/concept/VertexHandle.h index 491f0f56..3efbba61 100644 --- a/src/Simplex_tree/concept/VertexHandle.h +++ b/src/Simplex_tree/concept/VertexHandle.h @@ -22,5 +22,6 @@ /** \brief Handle type for the vertices of a cell complex. * - * Must be int.*/ + * Must be a signed integer type. operator< defines a total order on it. + */ struct VertexHandle {}; diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt index 1a3cdfbf..2f924490 100644 --- a/src/Simplex_tree/example/CMakeLists.txt +++ b/src/Simplex_tree/example/CMakeLists.txt @@ -7,15 +7,18 @@ add_test(simplex_tree_from_file_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_ add_executable ( simple_simplex_tree simple_simplex_tree.cpp ) add_test(simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/simple_simplex_tree) - + +add_executable ( mini_simplex_tree mini_simplex_tree.cpp ) +add_test(mini_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/mini_simplex_tree) + # An example with Simplex-tree using CGAL alpha_shapes_3 if(GMP_FOUND AND CGAL_FOUND) message("CGAL_lib = ${CGAL_LIBRARIES_DIR}") - message("GMP_LIBRARIES = ${GMP_LIBRARIES}") + message("GMP_LIBRARIES = ${GMP_LIBRARIES}") INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR}) INCLUDE_DIRECTORIES(${CGAL_INCLUDE_DIRS}) add_executable ( simplex_tree_from_alpha_shapes_3 simplex_tree_from_alpha_shapes_3.cpp ) target_link_libraries(simplex_tree_from_alpha_shapes_3 ${GMP_LIBRARIES} ${CGAL_LIBRARY}) add_test(simplex_tree_from_alpha_shapes_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_alpha_shapes_3 ${CMAKE_SOURCE_DIR}/data/points/bunny_5000) -endif() +endif() diff --git a/src/Simplex_tree/example/mini_simplex_tree.cpp b/src/Simplex_tree/example/mini_simplex_tree.cpp new file mode 100644 index 00000000..08d626d3 --- /dev/null +++ b/src/Simplex_tree/example/mini_simplex_tree.cpp @@ -0,0 +1,68 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Marc Glisse + * + * Copyright (C) 2015 INRIA Saclay - Ile-de-France (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include + +using namespace Gudhi; + +struct MyOptions : Simplex_tree_options_full_featured { + // Not doing persistence, so we don't need those + static const bool store_key = false; + static const bool store_filtration = false; + // I have few vertices + typedef short Vertex_handle; +}; +typedef Simplex_tree ST; + +// Dictionary should be private, but for now this is the easiest way. +static_assert(sizeof(ST::Dictionary::value_type) < sizeof(Simplex_tree<>::Dictionary::value_type), + "Not storing the filtration and key should save some space"); + +int main() { + ST st; + + /* Complex to build. */ + /* 1 */ + /* o */ + /* /X\ */ + /* o---o---o */ + /* 2 0 3 */ + + auto triangle012 = {0, 1, 2}; + auto edge03 = {0, 3}; + st.insert_simplex_and_subfaces(triangle012); + st.insert_simplex_and_subfaces(edge03); + // FIXME: Remove this line + st.set_dimension(2); + + auto edge02 = {0, 2}; + ST::Simplex_handle e = st.find(edge02); + assert(st.filtration(e) == 0); // We are not using filtrations so everything has value 0 + for(ST::Simplex_handle t : st.cofaces_simplex_range(e, 1)) // Only coface is 012 + { + for(ST::Vertex_handle v : st.simplex_vertex_range(t)) // v in { 0, 1, 2 } + std::cout << v; + std::cout << '\n'; + } +} diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 279327f7..6a47083c 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -28,6 +28,9 @@ #include #include +#include +#include + #include #include #include @@ -72,6 +75,17 @@ namespace Gudhi { * \copyright GNU General Public License v3. * @{ */ + +/// Model of SimplexTreeOptions. +struct Simplex_tree_options_full_featured { + typedef linear_indexing_tag Indexing_tag; + typedef int Vertex_handle; + typedef double Filtration_value; + typedef int Simplex_key; + static const bool store_key = true; + static const bool store_filtration = true; +}; + /** * \brief Simplex Tree data structure for representing simplicial complexes. * @@ -83,35 +97,63 @@ namespace Gudhi { * \implements FilteredComplex * */ -template + +template class Simplex_tree { public: - typedef IndexingTag Indexing_tag; + typedef SimplexTreeOptions Options; + typedef typename Options::Indexing_tag Indexing_tag; /** \brief Type for the value of the filtration function. * * Must be comparable with <. */ - typedef FiltrationValue Filtration_value; + typedef typename Options::Filtration_value Filtration_value; /** \brief Key associated to each simplex. * * Must be a signed integer type. */ - typedef SimplexKey Simplex_key; + typedef typename Options::Simplex_key Simplex_key; /** \brief Type for the vertex handle. * * Must be a signed integer type. It admits a total order <. */ - typedef VertexHandle Vertex_handle; + typedef typename Options::Vertex_handle Vertex_handle; /* Type of node in the simplex tree. */ typedef Simplex_tree_node_explicit_storage Node; /* Type of dictionary Vertex_handle -> Node for traversing the simplex tree. */ + // Note: this wastes space when Vertex_handle is 32 bits and Node is aligned on 64 bits. It would be better to use a flat_set (with our own comparator) where we can control the layout of the struct (put Vertex_handle and Simplex_key next to each other). typedef typename boost::container::flat_map Dictionary; /* \brief Set of nodes sharing a same parent in the simplex tree. */ /* \brief Set of nodes sharing a same parent in the simplex tree. */ typedef Simplex_tree_siblings Siblings; + struct Key_simplex_base_real { + Key_simplex_base_real() : key_(-1) {} + void assign_key(Simplex_key k) { key_ = k; } + Simplex_key key() const { return key_; } + private: + Simplex_key key_; + }; + struct Key_simplex_base_dummy { + Key_simplex_base_dummy() {} + void assign_key(Simplex_key) { } + Simplex_key key() const { assert(false); return -1; } + }; + typedef typename std::conditional::type Key_simplex_base; + + struct Filtration_simplex_base_real { + Filtration_simplex_base_real() : filt_(0) {} + void assign_filtration(Filtration_value f) { filt_ = f; } + Filtration_value filtration() const { return filt_; } + private: + Filtration_value filt_; + }; + struct Filtration_simplex_base_dummy { + Filtration_simplex_base_dummy() {} + void assign_filtration(Filtration_value f) { assert(f == 0); } + Filtration_value filtration() const { return 0; } + }; + typedef typename std::conditional::type Filtration_simplex_base; + public: /** \brief Handle type to a simplex contained in the simplicial complex represented * by the simplex tree. */ @@ -170,12 +212,12 @@ class Simplex_tree { /** \brief Range over the simplices of the skeleton of the simplicial complex, for a given * dimension. */ typedef boost::iterator_range Skeleton_simplex_range; + /** \brief Range over the simplices of the simplicial complex, ordered by the filtration. */ + typedef std::vector Filtration_simplex_range; /** \brief Iterator over the simplices of the simplicial complex, ordered by the filtration. * * 'value_type' is Simplex_handle. */ - typedef typename std::vector::iterator Filtration_simplex_iterator; - /** \brief Range over the simplices of the simplicial complex, ordered by the filtration. */ - typedef boost::iterator_range Filtration_simplex_range; + typedef typename Filtration_simplex_range::const_iterator Filtration_simplex_iterator; /* @} */ // end name range and iterator types /** \name Range and iterator methods @@ -226,17 +268,13 @@ class Simplex_tree { * order is used. * * The filtration must be valid. If the filtration has not been initialized yet, the - * method initializes it (i.e. order the simplices). */ - Filtration_simplex_range filtration_simplex_range(Indexing_tag) { + * method initializes it (i.e. order the simplices). If the complex has changed since the last time the filtration + * was initialized, please call `initialize_filtration()` to recompute it. */ + Filtration_simplex_range const& filtration_simplex_range(Indexing_tag=Indexing_tag()) { if (filtration_vect_.empty()) { initialize_filtration(); } - return Filtration_simplex_range(filtration_vect_.begin(), - filtration_vect_.end()); - } - - Filtration_simplex_range filtration_simplex_range() { - return filtration_simplex_range(Indexing_tag()); + return filtration_vect_; } /** \brief Returns a range over the vertices of a simplex. @@ -278,9 +316,47 @@ class Simplex_tree { Simplex_tree() : null_vertex_(-1), threshold_(0), - root_(NULL, null_vertex_), + root_(nullptr, null_vertex_), + filtration_vect_(), + dimension_(-1) { } + + /** \brief User-defined copy constructor reproduces the whole tree structure. */ + Simplex_tree(const Simplex_tree& simplex_source) + : null_vertex_(simplex_source.null_vertex_), + threshold_(simplex_source.threshold_), filtration_vect_(), - dimension_(-1) { + dimension_(simplex_source.dimension_) { + auto root_source = simplex_source.root_; + auto memb_source = root_source.members(); + root_ = Siblings(nullptr, null_vertex_, memb_source); + rec_copy(&root_, &root_source); + } + + /** \brief depth first search, inserts simplices when reaching a leaf. */ + void rec_copy(Siblings *sib, Siblings *sib_source) { + for (auto sh = sib->members().begin(), sh_source = sib_source->members().begin(); + sh != sib->members().end(); ++sh, ++sh_source) { + if (has_children(sh_source)) { + Siblings * newsib = new Siblings(sib, sh_source->first); + newsib->members_.reserve(sh_source->second.children()->members().size()); + for (auto & child : sh_source->second.children()->members()) + newsib->members_.emplace_hint(newsib->members_.end(), child.first, Node(sib, child.second.filtration())); + rec_copy(newsib, sh_source->second.children()); + sh->second.assign_children(newsib); + } + } + } + + /** \brief User-defined move constructor moves the whole tree structure. */ + Simplex_tree(Simplex_tree && old) + : null_vertex_(std::move(old.null_vertex_)), + threshold_(std::move(old.threshold_)), + root_(std::move(old.root_)), + filtration_vect_(std::move(old.filtration_vect_)), + dimension_(std::move(old.dimension_)) { + old.dimension_ = -1; + old.threshold_ = 0; + old.root_ = Siblings(nullptr, null_vertex_); } /** \brief Destructor; deallocates the whole tree structure. */ @@ -303,24 +379,64 @@ class Simplex_tree { delete sib; } + public: + /** \brief Checks if two simplex trees are equal. */ + bool operator==(Simplex_tree& st2) { + if ((null_vertex_ != st2.null_vertex_) || + (threshold_ != st2.threshold_) || + (dimension_ != st2.dimension_)) + return false; + return rec_equal(&root_, &st2.root_); + } + + /** \brief Checks if two simplex trees are different. */ + bool operator!=(Simplex_tree& st2) { + return (!(*this == st2)); + } + + private: + /** rec_equal: Checks recursively whether or not two simplex trees are equal, using depth first search. */ + bool rec_equal(Siblings* s1, Siblings* s2) { + if (s1->members().size() != s2->members().size()) + return false; + for (auto sh1 = s1->members().begin(), sh2 = s2->members().begin(); + (sh1 != s1->members().end() && sh2 != s2->members().end()); ++sh1, ++sh2) { + if (sh1->first != sh2->first || sh1->second.filtration() != sh2->second.filtration()) + return false; + if (has_children(sh1) != has_children(sh2)) + return false; + // Recursivity on children only if both have children + else if (has_children(sh1)) + if (!rec_equal(sh1->second.children(), sh2->second.children())) + return false; + } + return true; + } + public: /** \brief Returns the key associated to a simplex. * - * The filtration must be initialized. */ + * The filtration must be initialized. + * \pre SimplexTreeOptions::store_key + */ static Simplex_key key(Simplex_handle sh) { return sh->second.key(); } /** \brief Returns the simplex associated to a key. * - * The filtration must be initialized. */ + * The filtration must be initialized. + * \pre SimplexTreeOptions::store_key + */ Simplex_handle simplex(Simplex_key key) const { return filtration_vect_[key]; } /** \brief Returns the filtration value of a simplex. * - * Called on the null_simplex, returns INFINITY. */ + * Called on the null_simplex, returns INFINITY. + * If SimplexTreeOptions::store_filtration is false, returns 0. + */ static Filtration_value filtration(Simplex_handle sh) { if (sh != null_simplex()) { return sh->second.filtration(); @@ -348,7 +464,7 @@ class Simplex_tree { * * One can call filtration(null_simplex()). */ static Simplex_handle null_simplex() { - return Dictionary_it(NULL); + return Dictionary_it(nullptr); } /** \brief Returns a key different for all keys associated to the @@ -395,7 +511,7 @@ class Simplex_tree { int dimension(Simplex_handle sh) { Siblings * curr_sib = self_siblings(sh); int dim = 0; - while (curr_sib != NULL) { + while (curr_sib != nullptr) { ++dim; curr_sib = curr_sib->oncles(); } @@ -413,26 +529,34 @@ class Simplex_tree { return (sh->second.children()->parent() == sh->first); } - public: - /** \brief Given a range of Vertex_handles, returns the Simplex_handle + /** \brief Given a range of Vertex_handles, returns the Simplex_handle * of the simplex in the simplicial complex containing the corresponding * vertices. Return null_simplex() if the simplex is not in the complex. * - * The type RandomAccessVertexRange must be a range for which .begin() and - * .end() return random access iterators, with value_type - * Vertex_handle. + * The type InputVertexRange must be a range of Vertex_handle + * on which we can call std::begin() function */ - template - Simplex_handle find(RandomAccessVertexRange & s) { - if (s.begin() == s.end()) // Empty simplex - return null_simplex(); - - sort(s.begin(), s.end()); + template + Simplex_handle find(const InputVertexRange & s) { + auto first = std::begin(s); + auto last = std::end(s); + + if (first == last) + return null_simplex(); // ----->> + + // Copy before sorting + std::vector copy(first, last); + std::sort(std::begin(copy), std::end(copy)); + return find_simplex(copy); + } + private: + /** Find function, with a sorted range of vertices. */ + Simplex_handle find_simplex(const std::vector & simplex) { Siblings * tmp_sib = &root_; Dictionary_it tmp_dit; - Vertex_handle last = s[s.size() - 1]; - for (auto v : s) { + Vertex_handle last = simplex.back(); + for (auto v : simplex) { tmp_dit = tmp_sib->members_.find(v); if (tmp_dit == tmp_sib->members_.end()) { return null_simplex(); @@ -450,43 +574,17 @@ class Simplex_tree { Simplex_handle find_vertex(Vertex_handle v) { return root_.members_.begin() + v; } - - /** \brief Insert a simplex, represented by a range of Vertex_handles, in the simplicial complex. - * - * @param[in] simplex range of Vertex_handles, representing the vertices of the new simplex - * @param[in] filtration the filtration value assigned to the new simplex. - * The return type is a pair. If the new simplex is inserted successfully (i.e. it was not in the - * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned - * to the new simplex. - * If the insertion fails (the simplex is already there), the bool is set to false. If the insertion - * fails and the simplex already in the complex has a filtration value strictly bigger than 'filtration', - * we assign this simplex with the new value 'filtration', and set the Simplex_handle filed of the - * output pair to the Simplex_handle of the simplex. Otherwise, we set the Simplex_handle part to - * null_simplex. - * - * All subsimplices do not necessary need to be already in the simplex tree to proceed to an - * insertion. However, the property of being a simplicial complex will be violated. This allows - * us to insert a stream of simplices contained in a simplicial complex without considering any - * order on them. - * - * The filtration value - * assigned to the new simplex must preserve the monotonicity of the filtration. - * - * The type RandomAccessVertexRange must be a range for which .begin() and - * .end() return random access iterators, with 'value_type' Vertex_handle. */ - template - std::pair insert_simplex(RandomAccessVertexRange & simplex, - Filtration_value filtration) { - if (simplex.empty()) { - return std::pair(null_simplex(), true); - } - // must be sorted in increasing order - sort(simplex.begin(), simplex.end()); + //{ return root_.members_.find(v); } + private: + /** \brief Inserts a simplex represented by a vector of vertex. + \warning the vector must be sorted by increasing vertex handle order */ + std::pair insert_vertex_vector(const std::vector& simplex, + Filtration_value filtration) { Siblings * curr_sib = &root_; std::pair res_insert; - typename RandomAccessVertexRange::iterator vi; - for (vi = simplex.begin(); vi != simplex.end() - 1; ++vi) { + auto vi = simplex.begin(); + for (; vi != simplex.end() - 1; ++vi) { res_insert = curr_sib->members_.emplace(*vi, Node(curr_sib, filtration)); if (!(has_children(res_insert.first))) { res_insert.first->second.assign_children(new Siblings(curr_sib, *vi)); @@ -508,24 +606,77 @@ class Simplex_tree { return res_insert; } - /** \brief Insert a N-simplex and all his subfaces, from a N-simplex represented by a range of + public: + /** \brief Insert a simplex, represented by a range of Vertex_handles, in the simplicial complex. + * + * @param[in] simplex range of Vertex_handles, representing the vertices of the new simplex + * @param[in] filtration the filtration value assigned to the new simplex. + * The return type is a pair. If the new simplex is inserted successfully (i.e. it was not in the + * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned + * to the new simplex. + * If the insertion fails (the simplex is already there), the bool is set to false. If the insertion + * fails and the simplex already in the complex has a filtration value strictly bigger than 'filtration', + * we assign this simplex with the new value 'filtration', and set the Simplex_handle field of the + * output pair to the Simplex_handle of the simplex. Otherwise, we set the Simplex_handle part to + * null_simplex. + * + * All subsimplices do not necessary need to be already in the simplex tree to proceed to an + * insertion. However, the property of being a simplicial complex will be violated. This allows + * us to insert a stream of simplices contained in a simplicial complex without considering any + * order on them. + * + * The filtration value + * assigned to the new simplex must preserve the monotonicity of the filtration. + * + * The type InputVertexRange must be a range for which .begin() and + * .end() return input iterators, with 'value_type' Vertex_handle. */ + template + std::pair insert_simplex(const InputVertexRange & simplex, + Filtration_value filtration = 0) { + auto first = std::begin(simplex); + auto last = std::end(simplex); + + if (first == last) + return std::pair(null_simplex(), true); // ----->> + + // Copy before sorting + std::vector copy(first, last); + std::sort(std::begin(copy), std::end(copy)); + return insert_vertex_vector(copy, filtration); + } + + /** \brief Insert a N-simplex and all his subfaces, from a N-simplex represented by a range of * Vertex_handles, in the simplicial complex. * * @param[in] Nsimplex range of Vertex_handles, representing the vertices of the new N-simplex * @param[in] filtration the filtration value assigned to the new N-simplex. + * The return type is a pair. If the new simplex is inserted successfully (i.e. it was not in the + * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned + * to the new simplex. + * If the insertion fails (the simplex is already there), the bool is set to false. If the insertion + * fails and the simplex already in the complex has a filtration value strictly bigger than 'filtration', + * we assign this simplex with the new value 'filtration', and set the Simplex_handle field of the + * output pair to the Simplex_handle of the simplex. Otherwise, we set the Simplex_handle part to + * null_simplex. */ - template - std::pair insert_simplex_and_subfaces(const RandomAccessVertexRange& Nsimplex, - Filtration_value filtration = 0.0) { - // Simplex copy - std::vector the_simplex(Nsimplex.begin(), Nsimplex.end()); - // must be sorted in increasing order - std::sort(the_simplex.begin(), the_simplex.end()); + template + std::pair insert_simplex_and_subfaces(const InputVertexRange& Nsimplex, + Filtration_value filtration = 0) { + auto first = std::begin(Nsimplex); + auto last = std::end(Nsimplex); + + if (first == last) + return std::pair(null_simplex(), true); // ----->> + + // Copy before sorting + std::vector copy(first, last); + std::sort(std::begin(copy), std::end(copy)); + std::vector> to_be_inserted; std::vector> to_be_propagated; - return rec_insert_simplex_and_subfaces(the_simplex, to_be_inserted, to_be_propagated, filtration); + return rec_insert_simplex_and_subfaces(copy, to_be_inserted, to_be_propagated, filtration); } - + private: std::pair rec_insert_simplex_and_subfaces(std::vector& the_simplex, std::vector>& to_be_inserted, @@ -534,15 +685,15 @@ class Simplex_tree { std::pair insert_result; if (the_simplex.size() > 1) { // Get and remove last vertex - Vertex_handle last_vertex= the_simplex.back(); + Vertex_handle last_vertex = the_simplex.back(); the_simplex.pop_back(); // Recursive call after last vertex removal insert_result = rec_insert_simplex_and_subfaces(the_simplex, to_be_inserted, to_be_propagated, filtration); - + // Concatenation of to_be_inserted and to_be_propagated to_be_inserted.insert(to_be_inserted.begin(), to_be_propagated.begin(), to_be_propagated.end()); to_be_propagated = to_be_inserted; - + // to_be_inserted treatment for (auto& simplex_tbi : to_be_inserted) { simplex_tbi.push_back(last_vertex); @@ -557,26 +708,23 @@ class Simplex_tree { // insert all to_be_inserted for (auto& simplex_tbi : to_be_inserted) { - insert_result = insert_simplex(simplex_tbi, filtration); + insert_result = insert_vertex_vector(simplex_tbi, filtration); } - - } else if (the_simplex.size() == 1) { // When reaching the end of recursivity, vector of simplices shall be empty and filled on back recursive - if ((to_be_inserted.size() != 0) || (to_be_propagated.size() != 0)){ + if ((to_be_inserted.size() != 0) || (to_be_propagated.size() != 0)) { std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Error vector not empty" << std::endl; exit(-1); } std::vector first_simplex(1, the_simplex.back()); // i.e. (0,1,2) => [to_be_inserted | to_be_propagated] = [(0) | ] to_be_inserted.push_back(first_simplex); - - insert_result = insert_simplex(first_simplex, filtration); + + insert_result = insert_vertex_vector(first_simplex, filtration); } else { std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Recursivity error" << std::endl; exit(-1); } - return insert_result; } @@ -603,17 +751,6 @@ class Simplex_tree { return sh->second.children(); } - // void display_simplex(Simplex_handle sh) - // { - // std::cout << " " << "[" << filtration(sh) << "] "; - // for( auto vertex : simplex_vertex_range(sh) ) - // { std::cout << vertex << " "; } - // } - - // void print(Simplex_handle sh, std::ostream& os = std::cout) - // { for(auto v : simplex_vertex_range(sh)) {os << v << " ";} - // os << std::endl; } - public: /** Returns a pointer to the root nodes of the simplex tree. */ Siblings * root() { @@ -778,8 +915,9 @@ class Simplex_tree { : st_(st) { } bool operator()(const Simplex_handle sh1, const Simplex_handle sh2) const { - if (st_->filtration(sh1) != st_->filtration(sh2)) { - return st_->filtration(sh1) < st_->filtration(sh2); + // Not using st_->filtration(sh1) because it uselessly tests for null_simplex. + if (sh1->second.filtration() != sh2->second.filtration()) { + return sh1->second.filtration() < sh2->second.filtration(); } // is sh1 a proper subface of sh2 return st_->reverse_lexicographic_order(sh1, sh2); @@ -925,7 +1063,7 @@ class Simplex_tree { while (true) { if (begin1->first == begin2->first) { Filtration_value filt = (std::max)({begin1->second.filtration(), begin2->second.filtration(), filtration_}); - intersection.emplace_back(begin1->first, Node(NULL, filt)); + intersection.emplace_back(begin1->first, Node(nullptr, filt)); if (++begin1 == end1 || ++begin2 == end2) return; // ----->> } else if (begin1->first < begin2->first) { @@ -947,7 +1085,7 @@ class Simplex_tree { * of the simplex, and fil is its filtration value. */ void print_hasse(std::ostream& os) { os << num_simplices() << " " << std::endl; - for (auto sh : filtration_simplex_range(Indexing_tag())) { + for (auto sh : filtration_simplex_range()) { os << dimension(sh) << " "; for (auto b_sh : boundary_simplex_range(sh)) { os << key(b_sh) << " "; diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h index 1f1a34cc..c49e30b9 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_node_explicit_storage.h @@ -39,62 +39,31 @@ namespace Gudhi { * It stores explicitely its own filtration value and its own Simplex_key. */ template -class Simplex_tree_node_explicit_storage { - public: +struct Simplex_tree_node_explicit_storage : SimplexTree::Filtration_simplex_base, SimplexTree::Key_simplex_base { typedef typename SimplexTree::Siblings Siblings; typedef typename SimplexTree::Filtration_value Filtration_value; typedef typename SimplexTree::Simplex_key Simplex_key; - // Default constructor. - Simplex_tree_node_explicit_storage() - : children_(NULL), - simplex_key_(-1), - filtration_(0) { - } - - Simplex_tree_node_explicit_storage(Siblings * sib, - Filtration_value filtration) - : children_(sib), - simplex_key_(-1), - filtration_(filtration) { - } - - void assign_key(Simplex_key key) { - simplex_key_ = key; + Simplex_tree_node_explicit_storage(Siblings * sib = nullptr, + Filtration_value filtration = 0) + : children_(sib) { + this->assign_filtration(filtration); } /* - * Assign a children to the node + * Assign children to the node */ void assign_children(Siblings * children) { children_ = children; } - /* - * - */ - void assign_filtration(double filtration_value) { - filtration_ = filtration_value; - } - - Filtration_value filtration() { - return filtration_; - } /* Careful -> children_ can be NULL*/ Siblings * children() { return children_; } - Simplex_key key() { - return simplex_key_; - } - private: Siblings * children_; - - // Data attached to simplex, explicit storage - Simplex_key simplex_key_; - Filtration_value filtration_; // value in the filtration }; /* @} */ // end addtogroup simplex_tree diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h index de350f2d..d20a91d7 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h @@ -71,15 +71,15 @@ class Simplex_tree_siblings { /* \brief Constructor with initialized set of members. * * 'members' must be sorted and unique.*/ - Simplex_tree_siblings(Simplex_tree_siblings * oncles, Vertex_handle parent, - const std::vector > & members) + template + Simplex_tree_siblings(Simplex_tree_siblings * oncles, Vertex_handle parent, const RandomAccessVertexRange & members) : oncles_(oncles), parent_(parent), members_(boost::container::ordered_unique_range, members.begin(), members.end()) { for (auto& map_el : members_) { map_el.second.assign_children(this); - } + } } /* diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp index 9340aaa3..a4871cfd 100644 --- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp @@ -9,8 +9,8 @@ #define BOOST_TEST_MODULE "simplex_tree" #include -#include "gudhi/graph_simplicial_complex.h" -#include "gudhi/reader_utils.h" +// ^ +// /!\ Nothing else from Simplex_tree shall be included to test includes are well defined. #include "gudhi/Simplex_tree.h" using namespace Gudhi; @@ -59,7 +59,6 @@ void test_iterators_on_empty_simplex_tree(typeST& tst) { BOOST_AUTO_TEST_CASE(simplex_tree_when_empty) { const Filtration_value DEFAULT_FILTRATION_VALUE = 0; - // TEST OF DEFAULT CONSTRUCTOR std::cout << "********************************************************************" << std::endl; std::cout << "TEST OF DEFAULT CONSTRUCTOR" << std::endl; typeST st; @@ -126,6 +125,7 @@ void test_simplex_tree_contains(typeST& simplexTree, typeSimplex& simplex, int p BOOST_CHECK(AreAlmostTheSame(simplexTree.filtration(*f_simplex), simplex.second)); int simplexIndex = simplex.first.size() - 1; + std::sort(simplex.first.begin(), simplex.first.end()); // if the simplex wasn't sorted, the next test could fail for (auto vertex : simplexTree.simplex_vertex_range(*f_simplex)) { std::cout << "test_simplex_tree_contains - vertex=" << vertex << "||" << simplex.first.at(simplexIndex) << std::endl; BOOST_CHECK(vertex == simplex.first.at(simplexIndex)); @@ -170,22 +170,6 @@ void set_and_test_simplex_tree_dim_fil(typeST& simplexTree, int vectorSize, cons BOOST_CHECK(simplexTree.num_simplices() == num_simp); } -void test_cofaces(typeST& st, std::vector v, int dim, std::vector res) { - typeST::Cofaces_simplex_range cofaces; - if (dim == 0) - cofaces = st.star_simplex_range(st.find(v)); - else - cofaces = st.cofaces_simplex_range(st.find(v), dim); - for (auto simplex = cofaces.begin(); simplex != cofaces.end(); ++simplex) { - typeST::Simplex_vertex_range rg = st.simplex_vertex_range(*simplex); - for (auto vertex = rg.begin(); vertex != rg.end(); ++vertex) { - std::cout << "(" << *vertex << ")"; - } - std::cout << std::endl; - BOOST_CHECK(std::find(res.begin(), res.end(), *simplex) != res.end()); - } -} - BOOST_AUTO_TEST_CASE(simplex_tree_insertion) { const Filtration_value FIRST_FILTRATION_VALUE = 0.1; const Filtration_value SECOND_FILTRATION_VALUE = 0.2; @@ -378,9 +362,8 @@ BOOST_AUTO_TEST_CASE(simplex_tree_insertion) { bool sort_in_decr_order (Vertex_handle i,Vertex_handle j) { return (i>j); } BOOST_AUTO_TEST_CASE(NSimplexAndSubfaces_tree_insertion) { - // TEST OF INSERTION WITH SUBFACES std::cout << "********************************************************************" << std::endl; - std::cout << "TEST OF INSERTION WITH SUBFACES" << std::endl; + std::cout << "TEST OF RECURSIVE INSERTION" << std::endl; typeST st; typePairSimplexBool returnValue; int position = 0; @@ -569,114 +552,179 @@ BOOST_AUTO_TEST_CASE(NSimplexAndSubfaces_tree_insertion) { } std::cout << std::endl; } +} +void test_cofaces(typeST& st, std::vector expected, int dim, std::vector res) { + typeST::Cofaces_simplex_range cofaces; + if (dim == 0) + cofaces = st.star_simplex_range(st.find(expected)); + else + cofaces = st.cofaces_simplex_range(st.find(expected), dim); + for (auto simplex = cofaces.begin(); simplex != cofaces.end(); ++simplex) { + typeST::Simplex_vertex_range rg = st.simplex_vertex_range(*simplex); + for (auto vertex = rg.begin(); vertex != rg.end(); ++vertex) { + std::cout << "(" << *vertex << ")"; + } + std::cout << std::endl; + BOOST_CHECK(std::find(res.begin(), res.end(), *simplex) != res.end()); + } +} + +BOOST_AUTO_TEST_CASE(coface_on_simplex_tree) { std::cout << "********************************************************************" << std::endl; - // TEST COFACE ALGORITHM + std::cout << "TEST COFACE ALGORITHM" << std::endl; + typeST st; + + typeVectorVertex SimplexVector{2, 1, 0}; + st.insert_simplex_and_subfaces(SimplexVector); + + SimplexVector = {3, 0}; + st.insert_simplex_and_subfaces(SimplexVector); + + SimplexVector = {3, 4, 5}; + st.insert_simplex_and_subfaces(SimplexVector); + + SimplexVector = {0, 1, 6, 7}; + st.insert_simplex_and_subfaces(SimplexVector); + + /* Inserted simplex: */ + /* 1 6 */ + /* o---o */ + /* /X\7/ */ + /* o---o---o---o */ + /* 2 0 3\X/4 */ + /* o */ + /* 5 */ + + // FIXME st.set_dimension(3); - std::cout << "COFACE ALGORITHM" << std::endl; - std::vector v; - std::vector simplex; + + std::vector simplex_result; std::vector result; - v.push_back(3); - std::cout << "First test : " << std::endl; - std::cout << "Star of (3):" << std::endl; - - simplex.push_back(3); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(3); - simplex.push_back(0); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(4); - simplex.push_back(3); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(5); - simplex.push_back(4); - simplex.push_back(3); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(5); - simplex.push_back(3); - result.push_back(st.find(simplex)); - simplex.clear(); - - test_cofaces(st, v, 0, result); - v.clear(); + std::cout << "First test - Star of (3):" << std::endl; + + simplex_result = {3}; + result.push_back(st.find(simplex_result)); + + simplex_result = {3, 0}; + result.push_back(st.find(simplex_result)); + + simplex_result = {4, 3}; + result.push_back(st.find(simplex_result)); + + simplex_result = {5, 4, 3}; + result.push_back(st.find(simplex_result)); + + simplex_result = {5, 3}; + result.push_back(st.find(simplex_result)); + simplex_result.clear(); + + std::vector vertex = {3}; + test_cofaces(st, vertex, 0, result); + vertex.clear(); result.clear(); - v.push_back(1); - v.push_back(7); - std::cout << "Second test : " << std::endl; - std::cout << "Star of (1,7): " << std::endl; - - simplex.push_back(7); - simplex.push_back(1); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(7); - simplex.push_back(6); - simplex.push_back(1); - simplex.push_back(0); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(7); - simplex.push_back(1); - simplex.push_back(0); - result.push_back(st.find(simplex)); - simplex.clear(); - - simplex.push_back(7); - simplex.push_back(6); - simplex.push_back(1); - result.push_back(st.find(simplex)); - simplex.clear(); - - test_cofaces(st, v, 0, result); + vertex.push_back(1); + vertex.push_back(7); + std::cout << "Second test - Star of (1,7): " << std::endl; + + simplex_result = {7, 1}; + result.push_back(st.find(simplex_result)); + + simplex_result = {7, 6, 1, 0}; + result.push_back(st.find(simplex_result)); + + simplex_result = {7, 1, 0}; + result.push_back(st.find(simplex_result)); + + simplex_result = {7, 6, 1}; + result.push_back(st.find(simplex_result)); + + test_cofaces(st, vertex, 0, result); result.clear(); - std::cout << "Third test : " << std::endl; - std::cout << "2-dimension Cofaces of simplex(1,7) : " << std::endl; + std::cout << "Third test - 2-dimension Cofaces of simplex(1,7) : " << std::endl; - simplex.push_back(7); - simplex.push_back(1); - simplex.push_back(0); - result.push_back(st.find(simplex)); - simplex.clear(); + simplex_result = {7, 1, 0}; + result.push_back(st.find(simplex_result)); - simplex.push_back(7); - simplex.push_back(6); - simplex.push_back(1); - result.push_back(st.find(simplex)); - simplex.clear(); + simplex_result = {7, 6, 1}; + result.push_back(st.find(simplex_result)); - test_cofaces(st, v, 1, result); + test_cofaces(st, vertex, 1, result); result.clear(); std::cout << "Cofaces with a codimension too high (codimension + vetices > tree.dimension) :" << std::endl; - test_cofaces(st, v, 5, result); - // std::cout << "Cofaces with an empty codimension" << std::endl; - // test_cofaces(st, v, -1, result); + test_cofaces(st, vertex, 5, result); + + //std::cout << "Cofaces with an empty codimension" << std::endl; + //test_cofaces(st, vertex, -1, result); // std::cout << "Cofaces in an empty simplex tree" << std::endl; // typeST empty_tree; - // test_cofaces(empty_tree, v, 1, result); - // std::cout << "Cofaces of an empty simplex" << std::endl; - // v.clear(); - // test_cofaces(st, v, 1, result); - - /* - // TEST Off read - std::cout << "********************************************************************" << std::endl; - typeST st2; - st2.tree_from_off("test.off"); - std::cout << st2; - */ + // test_cofaces(empty_tree, vertex, 1, result); + //std::cout << "Cofaces of an empty simplex" << std::endl; + //vertex.clear(); + // test_cofaces(st, vertex, 1, result); + +} + +BOOST_AUTO_TEST_CASE(copy_move_on_simplex_tree) { + std::cout << "********************************************************************" << std::endl; + std::cout << "TEST COPY MOVE CONSTRUCTORS" << std::endl; + typeST st; + + typeVectorVertex SimplexVector{2, 1, 0}; + st.insert_simplex_and_subfaces(SimplexVector); + SimplexVector = {3, 0}; + st.insert_simplex_and_subfaces(SimplexVector); + + SimplexVector = {3, 4, 5}; + st.insert_simplex_and_subfaces(SimplexVector); + + SimplexVector = {0, 1, 6, 7}; + st.insert_simplex_and_subfaces(SimplexVector); + + /* Inserted simplex: */ + /* 1 6 */ + /* o---o */ + /* /X\7/ */ + /* o---o---o---o */ + /* 2 0 3\X/4 */ + /* o */ + /* 5 */ + + // FIXME + st.set_dimension(3); + + std::cout << "Printing st - address = " << &st << std::endl; + + // Copy constructor + typeST st_copy = st; + std::cout << "Printing a copy of st - address = " << &st_copy << std::endl; + + // Check the data are the same + BOOST_CHECK(st == st_copy); + // Check there is a new simplex tree reference + BOOST_CHECK(&st != &st_copy); + + // Move constructor + typeST st_move = std::move(st); + std::cout << "Printing a move of st - address = " << &st_move << std::endl; + + // Check the data are the same + BOOST_CHECK(st_move == st_copy); + // Check there is a new simplex tree reference + BOOST_CHECK(&st_move != &st_copy); + BOOST_CHECK(&st_move != &st); + + typeST st_empty; + // Check st has been emptied by the move + BOOST_CHECK(st == st_empty); + BOOST_CHECK(st.filtration() == 0); + BOOST_CHECK(st.dimension() == -1); + BOOST_CHECK(st.num_simplices() == 0); + BOOST_CHECK(st.num_vertices() == (size_t)0); + + std::cout << "Printing st once again- address = " << &st << std::endl; } diff --git a/src/common/include/gudhi/Simple_object_pool.h b/src/common/include/gudhi/Simple_object_pool.h new file mode 100644 index 00000000..fffcb2ef --- /dev/null +++ b/src/common/include/gudhi/Simple_object_pool.h @@ -0,0 +1,64 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Marc Glisse + * + * Copyright (C) 2015 INRIA Saclay - Ile de France + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include + +namespace Gudhi { + /** \private + * This is a simpler version of boost::object_pool, that requires + * that users explicitly destroy all objects. This lets the + * performance scale much better, see + * https://svn.boost.org/trac/boost/ticket/3789 . + */ +template +class Simple_object_pool : protected boost::pool +{ + protected: + typedef boost::pool Base; + typedef T* pointer; + Base& base(){return *this;} + Base const& base()const{return *this;} + public: + typedef T element_type; + typedef boost::default_user_allocator_malloc_free user_allocator; + typedef typename Base::size_type size_type; + typedef typename Base::difference_type difference_type; + template + Simple_object_pool(U&&...u) : Base(sizeof(T), std::forward(u)...) {} + template + pointer construct(U&&...u){ + void* p=base().malloc BOOST_PREVENT_MACRO_SUBSTITUTION(); + assert(p); + try { new(p) T(std::forward(u)...); } + catch(...) { + base().free BOOST_PREVENT_MACRO_SUBSTITUTION(p); + throw; + } + return static_cast(p); + } + void destroy(pointer p){ + p->~T(); + base().free BOOST_PREVENT_MACRO_SUBSTITUTION(p); + } +}; +} -- cgit v1.2.3 From 91a2adbaeec76b4ee172123a5a833065f910f5ab Mon Sep 17 00:00:00 2001 From: glisse Date: Sun, 18 Oct 2015 20:31:25 +0000 Subject: Also use TBB to build Hasse_complex in parallel. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/tbb@870 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 6633f9b8261cfb2acfb62d1f4748df73375086bd --- src/Hasse_complex/include/gudhi/Hasse_complex.h | 30 +++++++++----- src/common/include/gudhi/allocator.h | 55 +++++++++++++++++++++++++ 2 files changed, 75 insertions(+), 10 deletions(-) create mode 100644 src/common/include/gudhi/allocator.h (limited to 'src/common') diff --git a/src/Hasse_complex/include/gudhi/Hasse_complex.h b/src/Hasse_complex/include/gudhi/Hasse_complex.h index 67079687..af9ae5e9 100644 --- a/src/Hasse_complex/include/gudhi/Hasse_complex.h +++ b/src/Hasse_complex/include/gudhi/Hasse_complex.h @@ -29,6 +29,12 @@ #include // for pair #include +#include + +#ifdef GUDHI_USE_TBB +#include +#endif + namespace Gudhi { template < class HasseCpx > @@ -97,20 +103,24 @@ class Hasse_complex { template < class Complex_ds > Hasse_complex(Complex_ds & cpx) - : complex_() + : complex_(cpx.num_simplices()) , vertices_() , threshold_(cpx.filtration()) , num_vertices_() , dim_max_(cpx.dimension()) { - complex_.reserve(cpx.num_simplices()); - int idx = 0; - for (auto cpx_sh : cpx.filtration_simplex_range()) { - complex_.push_back(Hasse_simp(cpx, cpx_sh)); - if (dimension(idx) == 0) { + int size = complex_.size(); +#ifdef GUDHI_USE_TBB + tbb::parallel_for(0,size,[&](int idx){new (&complex_[idx]) Hasse_simp(cpx, cpx.simplex(idx));}); + for (int idx=0; idx complex_; + std::vector< Hasse_simp, Gudhi::no_init_allocator > complex_; std::vector vertices_; Filtration_value threshold_; size_t num_vertices_; @@ -218,7 +228,7 @@ std::istream& operator>>(std::istream & is // read all simplices in the file as a list of vertices while (read_hasse_simplex(is, boundary, fil)) { // insert every simplex in the simplex tree - hcpx.complex_.push_back(Hasse_simplex< Hasse_complex >(key, fil, boundary)); + hcpx.complex_.emplace_back(key, fil, boundary); if (max_dim < hcpx.dimension(key)) { max_dim = hcpx.dimension(key); diff --git a/src/common/include/gudhi/allocator.h b/src/common/include/gudhi/allocator.h new file mode 100644 index 00000000..b825173b --- /dev/null +++ b/src/common/include/gudhi/allocator.h @@ -0,0 +1,55 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Marc Glisse + * + * Copyright (C) 2015 INRIA Saclay - Ile de France + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef GUDHI_ALLOCATOR_H_ +#define GUDHI_ALLOCATOR_H_ + +#include +#include + +namespace Gudhi { + +/** \private + * An allocator that can be used to build an uninitialized vector. + */ +template > +struct no_init_allocator : Base { + typedef std::allocator_traits Base_traits; + template struct rebind { + typedef no_init_allocator> other; + }; + + // Inherit constructors. + using Base::Base; + + // Do nothing: that's the whole point! + template + void construct(P*)noexcept{} + + template void construct(P*p, U&&...u){ + Base_traits::construct(*(Base*)this, p, std::forward(u)...); + } +}; + +} // namespace Gudhi + +#endif // GUDHI_ALLOCATOR_H_ -- cgit v1.2.3 From 3b22ae31478387efd64ae5f185128857f17ca9ee Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 23 Oct 2015 09:40:36 +0000 Subject: generate_version excludes bottleneck for version 1.2.0 - to be removed for 1.3.0 generate_version copies concept for doxygen purpose Contact gudhi-users on skbl and contraction page Doxygen warning fixes Doxygen Software section git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@871 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: bd7e252c2d0528f4bbc40c33a9221e1d6b386510 --- scripts/generate_version.sh | 10 ++++- src/Contraction/include/gudhi/Edge_contraction.h | 2 +- src/Doxyfile | 6 +-- src/Simplex_tree/concept/SimplexTreeOptions.h | 6 +-- .../include/gudhi/Skeleton_blocker.h | 2 +- .../include/gudhi/Skeleton_blocker_complex.h | 4 +- src/common/doc/main_page.h | 51 +++++++++++++++++++++- 7 files changed, 68 insertions(+), 13 deletions(-) (limited to 'src/common') diff --git a/scripts/generate_version.sh b/scripts/generate_version.sh index 43a54c1c..323396dc 100755 --- a/scripts/generate_version.sh +++ b/scripts/generate_version.sh @@ -61,12 +61,13 @@ cp $ROOT_DIR/GUDHIVersion.cmake.in $VERSION_DIR PACKAGE_INC_DIR="/include" #PACKAGE_SRC_DIR="/source" PACKAGE_EX_DIR="/example" +PACKAGE_CONCEPT_DIR="/concept" PACKAGE_DOC_DIR="/doc" for package in `ls $ROOT_DIR/src/` do - echo $package - if [ -d "$ROOT_DIR/src/$package" ] + if [ -d "$ROOT_DIR/src/$package" ] && [ $package != "Bottleneck" ] then + echo $package if [ "$package" == "cmake" ] then # SPECIFIC FOR CMAKE MODULES @@ -91,6 +92,11 @@ do mkdir -p $VERSION_DIR$PACKAGE_EX_DIR/$package cp -R $ROOT_DIR/src/$package$PACKAGE_EX_DIR/* $VERSION_DIR$PACKAGE_EX_DIR/$package fi + if [ -d "$ROOT_DIR/src/$package$PACKAGE_CONCEPT_DIR" ] + then + mkdir -p $VERSION_DIR$PACKAGE_CONCEPT_DIR/$package + cp -R $ROOT_DIR/src/$package$PACKAGE_CONCEPT_DIR/* $VERSION_DIR$PACKAGE_CONCEPT_DIR/$package + fi if [ -d "$ROOT_DIR/src/$package$PACKAGE_DOC_DIR" ] then mkdir -p $VERSION_DIR$PACKAGE_DOC_DIR/$package diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index dfce8d1b..f3076057 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -226,7 +226,7 @@ Time to simplify and enumerate simplices: \copyright GNU General Public License v3. -\verbatim Contact: David Salinas, david.salinas@inria.fr \endverbatim +\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ /** @} */ // end defgroup } // namespace contraction diff --git a/src/Doxyfile b/src/Doxyfile index 85c496a8..084a9abb 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -38,7 +38,7 @@ PROJECT_NAME = "Gudhi" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = "1.1.0" +PROJECT_NUMBER = "1.2.0" # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a @@ -1338,7 +1338,7 @@ ECLIPSE_DOC_ID = org.doxygen.Project # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. -DISABLE_INDEX = NO +DISABLE_INDEX = YES # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag @@ -1355,7 +1355,7 @@ DISABLE_INDEX = NO # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. -GENERATE_TREEVIEW = NO +GENERATE_TREEVIEW = YES # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. diff --git a/src/Simplex_tree/concept/SimplexTreeOptions.h b/src/Simplex_tree/concept/SimplexTreeOptions.h index a50a2bf1..add3ebdd 100644 --- a/src/Simplex_tree/concept/SimplexTreeOptions.h +++ b/src/Simplex_tree/concept/SimplexTreeOptions.h @@ -34,8 +34,8 @@ struct SimplexTreeOptions { /// Must be a signed integer type. typedef SimplexKey Simplex_key; /// If true, each simplex has extra storage for one `Simplex_key`. Necessary for `Persistent_cohomology`. - static constexpr bool store_key; - /// If true, each simplex has extra storage for one `Filtration_value`, and this value is propagated by operations like `Gudhi::Simplex_tree::expansion`. Without it, `Persistent_cohomology` degenerates to computing usual (non-persistent) cohomology. - static constexpr bool store_filtration; + static const bool store_key; + /// If true, each simplex has extra storage for one `Filtration_value`, and this value is propagated by operations like `Gudhi::Simplex_tree::expansion`. Without it, `Persistent_cohomology` degenerates to computing usual (non-persistent) cohomology. + static const bool store_filtration; }; diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h index 792a7994..3be480fd 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h @@ -241,7 +241,7 @@ their collaboration to write the two initial papers \copyright GNU General Public License v3. -\verbatim Contact: David Salinas, david.salinas@inria.fr \endverbatim +\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ /** @} */ // end defgroup diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h index 07f371a2..d26d12b0 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h @@ -1018,7 +1018,7 @@ class Skeleton_blocker_complex { } //@} - /** @Simplification operations + /** @name Simplification operations */ //@{ @@ -1131,7 +1131,7 @@ class Skeleton_blocker_complex { } //@} - /** @Edge contraction operations + /** @name Edge contraction operations */ //@{ diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 315aa0ac..d1060740 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -7,7 +7,7 @@ The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic C+ topological analysis of high-dimensional data whose goal is to provide robust, efficient, flexible and easy to use implementations of state-of-the-art algorithms and data structures for computational topology. -This library is part of the Gudhi project. +This library is part of the Gudhi project. The current release of the library allows to use several data-structures for simplicial complexes : simplex tree, Hasse diagram or skeleton-blocker. Several operations can then be done on top of these @@ -70,3 +70,52 @@ make \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ + +/*! \page Software Software + * \tableofcontents + * \section SoftwareIntroduction Introduction + * The GUDHI open source library will provide the central data structures and algorithms that underly applications in geometry understanding in higher dimensions. It is intended to both help the development of new algorithmic solutions inside and outside the project, and to facilitate the transfer of results in applied fields. + * + * The current release of the GUDHI library includes: + * + * – Data structures to represent, construct and manipulate simplicial complexes. + * + * – Algorithms to compute persistent homology and multi-field persistent homology. + * + * – Simplification methods via implicit representations. + * + * + * The library is available here and the documentation is + * available at this webpage. + * + * \section ReleaseHistory Release history + * + * – ??-??-2015; release v.1.2.0, Skeleton-Blocker simplex insertion, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. + * + * – 12-18-2014; release v.1.1, Skeleton-Blocker data-structure, simplification package, additional examples for topological persistence. + * + * – 08-12-2014; release v. 1.0.2, initialize simplex keys in initialize_filtration in Simplex_tree + * + * – 07-11-2014: release v. 1.0.1, bug fix in summing columns in Persistent_cohomology + * + * – 06-23-2014: release v. 1.0 + * + * \section Citation How to cite Gudhi + * Each Gudhi module (either data structures or algorithms) has an author section. + * + * Thank you to refer to this section, and to cite the author(s) of all the module you are using. + * + * \section Soon Coming soon + * + * – Alpha complex. + * + * – Bottleneck distance. + * + * – Zig zag persistence. + * + * – Witness complex. + * + * – Tangential complex. + * + * – Hard clustering. +*/ -- cgit v1.2.3 From 03f17b1d0bfd5473dc10b3d2d31ad9da60954d30 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 23 Oct 2015 15:53:32 +0000 Subject: Software page text modification git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@872 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 43ef4b9ee503de54decee6a25bcd88c9b79be34f --- src/CMakeLists.txt | 2 +- src/common/doc/main_page.h | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) (limited to 'src/common') diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index a84090e9..68eca65e 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -75,7 +75,7 @@ else() # Install the GUDHIConfig.cmake and GUDHIConfigVersion.cmake install(FILES - "${PROJECT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/GUDHIConfig.cmake" + "${PROJECT_BINARY_DIR}/GUDHIConfig.cmake" "${PROJECT_BINARY_DIR}/GUDHIConfigVersion.cmake" DESTINATION share/gudhi) diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index d1060740..55cf53a6 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -38,7 +38,6 @@ arithmetic, operating on signed integers, rational numbers, and floating point n The following examples require the GNU Multiple Precision Arithmetic Library (GMP) and will not be built if GMP is not installed: - Persistent_cohomology/rips_multifield_persistence - - Simplex_tree/simplex_tree_from_alpha_shapes_3 Having GMP version 4.2 or higher installed is recommended. @@ -47,6 +46,8 @@ CGAL is a C++ library which provides easy access to efficient and reliable geome The following example requires the Computational Geometry Algorithms Library (CGAL) and will not be built if CGAL is not installed: + - GudhUI + - Persistent_cohomology/alpha_shapes_persistence - Simplex_tree/simplex_tree_from_alpha_shapes_3 Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to @@ -90,7 +91,7 @@ make * * \section ReleaseHistory Release history * - * – ??-??-2015; release v.1.2.0, Skeleton-Blocker simplex insertion, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. + * – ??-??-2015; release v.1.2.0, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. * * – 12-18-2014; release v.1.1, Skeleton-Blocker data-structure, simplification package, additional examples for topological persistence. * @@ -105,7 +106,7 @@ make * * Thank you to refer to this section, and to cite the author(s) of all the module you are using. * - * \section Soon Coming soon + * \section Upcoming Upcoming * * – Alpha complex. * @@ -117,5 +118,5 @@ make * * – Tangential complex. * - * – Hard clustering. + * – Clustering. */ -- cgit v1.2.3 From deeae2331c2b720a07c093ac797e825ed3d04ebe Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Sat, 24 Oct 2015 07:19:34 +0000 Subject: Add contributions section git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@873 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d251621a0a143139b8ea787017bd195d26bd2e9f --- src/common/doc/main_page.h | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 55cf53a6..1aea2716 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -91,7 +91,7 @@ make * * \section ReleaseHistory Release history * - * – ??-??-2015; release v.1.2.0, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. + * – 24-10-2015; release v.1.2.0, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. * * – 12-18-2014; release v.1.1, Skeleton-Blocker data-structure, simplification package, additional examples for topological persistence. * @@ -119,4 +119,16 @@ make * – Tangential complex. * * – Clustering. + * + * \section Contributions Contributions + * Gudhi is opened to external contributions. If you just want to report bugs, feel free to contact us. + * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim + * + * If you want to join our development team, you will have to create an accout on the + * INRIA forge and ask to join the GUDHI project. + * + * Your development will have to follow our + * submitting + * process (code, documentation, and unitary tests review) and not to break the existing + * test suite. */ -- cgit v1.2.3 From d60a8b6f8f09f377c425c02b6b9e602c188d51eb Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Sat, 24 Oct 2015 07:46:21 +0000 Subject: target="_blank" for external websites. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@874 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 17b1803f2eaae3052d65a8d02a7bec3c9473e9cd --- src/common/doc/main_page.h | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 1aea2716..ce6ef96a 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -27,7 +27,7 @@ Examples of Gudhi headers inclusion can be found in \ref demos. \section compiling Compiling -The library uses c++11 and requires Boost with version 1.48.0 or more recent. +The library uses c++11 and requires Boost with version 1.48.0 or more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2013. @@ -35,7 +35,7 @@ It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision arithmetic, operating on signed integers, rational numbers, and floating point numbers. -The following examples require the GNU Multiple Precision Arithmetic Library (GMP) +The following examples require the GNU Multiple Precision Arithmetic Library (GMP) and will not be built if GMP is not installed: - Persistent_cohomology/rips_multifield_persistence @@ -44,7 +44,7 @@ Having GMP version 4.2 or higher installed is recommended. \subsection cgal CGAL: CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms. -The following example requires the Computational Geometry Algorithms Library (CGAL) +The following example requires the Computational Geometry Algorithms Library (CGAL) and will not be built if CGAL is not installed: - GudhUI - Persistent_cohomology/alpha_shapes_persistence @@ -86,8 +86,8 @@ make * – Simplification methods via implicit representations. * * - * The library is available here and the documentation is - * available at this webpage. + * The library is available here and the documentation is + * available at this webpage. * * \section ReleaseHistory Release history * @@ -125,10 +125,10 @@ make * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim * * If you want to join our development team, you will have to create an accout on the - * INRIA forge and ask to join the GUDHI project. + * INRIA forge and ask to join the GUDHI project. * * Your development will have to follow our - * submitting + * submitting * process (code, documentation, and unitary tests review) and not to break the existing - * test suite. + * test suite. */ -- cgit v1.2.3 From 6b9fa936e42bd0c48f9320716937294dbcd7a21b Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 3 Nov 2015 10:50:47 +0000 Subject: Fix doxygen after doc review for 1.2.0 git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@881 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: cffd514dde3fd716bac3768c4af56d3ff5777931 --- biblio/how_to_cite_gudhi.bib | 44 +++++++++++++++++ src/Doxyfile | 5 +- src/common/doc/main_page.h | 109 ++++++++++++++++++++++++++----------------- 3 files changed, 112 insertions(+), 46 deletions(-) create mode 100644 biblio/how_to_cite_gudhi.bib (limited to 'src/common') diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib new file mode 100644 index 00000000..851dd5d9 --- /dev/null +++ b/biblio/how_to_cite_gudhi.bib @@ -0,0 +1,44 @@ +@book{gudhi:urm +, title = "{GUDHI} User and Reference Manual" +, author = "{The GUDHI Project}" +, publisher = "{GUDHI Editorial Board}" +, year = 2015 +, url = "http://gudhi.gforge.inria.fr/doc/latest/" +} + +@incollection{gudhi:FilteredComplexes +, author = "Cl\'ement Maria" +, title = "Filtered Complexes" +, publisher = "{GUDHI Editorial Board}" +, booktitle = "{GUDHI} User and Reference Manual" +, url = "http://gudhi.gforge.inria.fr/doc/latest/group__simplex__tree.html" +, year = 2015 +} + +@incollection{gudhi:PersistentCohomology +, author = "Cl\'ement Maria" +, title = "Persistent Cohomology" +, publisher = "{GUDHI Editorial Board}" +, booktitle = "{GUDHI} User and Reference Manual" +, url = "http://gudhi.gforge.inria.fr/doc/latest/group__persistent__cohomology.html" +, year = 2015 +} + +@incollection{gudhi:Contraction +, author = "David Salinas" +, title = "Contraction" +, publisher = "{GUDHI Editorial Board}" +, booktitle = "{GUDHI} User and Reference Manual" +, url = "http://gudhi.gforge.inria.fr/doc/latest/group__contr.html" +, year = 2015 +} + +@incollection{gudhi:Skeleton-Blocker +, author = "David Salinas" +, title = "Skeleton-Blocker" +, publisher = "{GUDHI Editorial Board}" +, booktitle = "{GUDHI} User and Reference Manual" +, url = "http://gudhi.gforge.inria.fr/doc/latest/group__skbl.html" +, year = 2015 +} + diff --git a/src/Doxyfile b/src/Doxyfile index 084a9abb..faa0d3fe 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -672,7 +672,8 @@ LAYOUT_FILE = # search path. Do not use file names with spaces, bibtex cannot handle them. See # also \cite for info how to create references. -CITE_BIB_FILES = biblio/bibliography.bib +CITE_BIB_FILES = biblio/bibliography.bib \ + biblio/how_to_cite_gudhi.bib #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages @@ -811,7 +812,7 @@ EXCLUDE_SYMBOLS = # that contain example code fragments that are included (see the \include # command). -EXAMPLE_PATH = +EXAMPLE_PATH = biblio/ # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index ce6ef96a..43297b45 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -65,6 +65,14 @@ cmake .. make \endverbatim +\subsection testsuites Test suites + +To test your build, run the following command in a terminal: + +\verbatim +make test +\endverbatim + \details \copyright GNU General Public License v3. @@ -75,60 +83,73 @@ make /*! \page Software Software * \tableofcontents * \section SoftwareIntroduction Introduction - * The GUDHI open source library will provide the central data structures and algorithms that underly applications in geometry understanding in higher dimensions. It is intended to both help the development of new algorithmic solutions inside and outside the project, and to facilitate the transfer of results in applied fields. - * - * The current release of the GUDHI library includes: + * The GUDHI library is a C++ open source library **intended to provide** the central data structures and algorithms + * that underly applications in Geometric and Topological Data Analysis + * (TDA). The GUDHI + * library is developed as part of the GUDHI + * project supported by the European Research Council. The GUDHI library can both help the development of new + * algorithmic solutions and to facilitate the transfer of state of the art results and new applications of TDA. * - * – Data structures to represent, construct and manipulate simplicial complexes. - * - * – Algorithms to compute persistent homology and multi-field persistent homology. + * The current release of the GUDHI library includes: * - * – Simplification methods via implicit representations. + * \li Data structures to represent, construct and manipulate simplicial complexes. + * \li Algorithms to compute persistent homology and multi-field persistent homology. + * \li Simplification methods via implicit representations. * * - * The library is available here and the documentation is - * available at this webpage. + * The library is available here + * and the documentation is available at this + * webpage. * - * \section ReleaseHistory Release history - * - * – 24-10-2015; release v.1.2.0, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. - * - * – 12-18-2014; release v.1.1, Skeleton-Blocker data-structure, simplification package, additional examples for topological persistence. - * - * – 08-12-2014; release v. 1.0.2, initialize simplex keys in initialize_filtration in Simplex_tree - * - * – 07-11-2014: release v. 1.0.1, bug fix in summing columns in Persistent_cohomology - * - * – 06-23-2014: release v. 1.0 + * The library comes with data sets, \ref demos and \ref testsuites. * - * \section Citation How to cite Gudhi - * Each Gudhi module (either data structures or algorithms) has an author section. + * \subsection People People * - * Thank you to refer to this section, and to cite the author(s) of all the module you are using. + * The development of the GUDHI library is steered by an Editorial Board, which is responsible for guiding the + * development of the library, developers, and the user community. * - * \section Upcoming Upcoming - * - * – Alpha complex. - * - * – Bottleneck distance. - * - * – Zig zag persistence. - * - * – Witness complex. - * - * – Tangential complex. - * - * – Clustering. + * The Editorial board is composed of: + * + * \li + * Jean-Daniel Boissonnat | INRIA Sophia Antipolis - Méditerranée + * \li Marc Glisse | INRIA Saclay - Ile de France + * \li Clément Jamin | INRIA Sophia Antipolis - Méditerranée + * \li Vincent Rouvreau | INRIA Saclay - Ile de France * - * \section Contributions Contributions - * Gudhi is opened to external contributions. If you just want to report bugs, feel free to contact us. + * \section Contributions Bug reports and contributions + * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim * - * If you want to join our development team, you will have to create an accout on the - * INRIA forge and ask to join the GUDHI project. + * Gudhi is **open** to external contributions. If you want to join our development team, please contact us. + * + * + * \section ReleaseHistory Release history + * + * \li 24-10-2015; release v.1.2.0, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. + * \li 18-12-2014; release v.1.1, Skeleton-Blocker data-structure, simplification package, additional examples for topological persistence. + * \li 08-12-2014; release v. 1.0.2, initialize simplex keys in initialize_filtration in Simplex_tree + * \li 07-11-2014: release v. 1.0.1, bug fix in summing columns in Persistent_cohomology + * \li 23-06-2014: release v. 1.0 + * + * \section Upcoming Upcoming + * + * The library is under active development. New packages to be released next include: + * \li Alpha complex. + * \li Bottleneck distance. + * \li Zig zag persistence. + * \li Witness complex. + * \li Tangential complex. + * \li Clustering. +*/ + +/*! \page Citation Acknowledging the GUDHI library + * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use + * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages. + * Feel free to contact us in case you have any question or remark on this topic. * - * Your development will have to follow our - * submitting - * process (code, documentation, and unitary tests review) and not to break the existing - * test suite. + * We provide \ref GudhiBibtex entries for the modules of the User and Reference Manual, as well as for publications + * directly related to the GUDHI library. + * \section GudhiBibtex GUDHI bibtex + * \verbinclude biblio/how_to_cite_gudhi.bib */ + -- cgit v1.2.3 From 393daefc5078498b472a28ed0763c34b8351ba87 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 3 Nov 2015 15:39:42 +0000 Subject: Doxygen fix after doc review git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@882 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: e702746a6f8bdc7bb06bee2710ebe60dff504c69 --- src/Contraction/include/gudhi/Edge_contraction.h | 2 +- src/common/doc/main_page.h | 175 +++++++++-------------- 2 files changed, 71 insertions(+), 106 deletions(-) (limited to 'src/common') diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index f3076057..349bb7d8 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -37,7 +37,7 @@ namespace Gudhi { namespace contraction { -/** \defgroup contr Contraction +/** \defgroup contr Edge contraction \author David Salinas diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 43297b45..730b74df 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -1,101 +1,24 @@ -/** -\mainpage - -\image html "Gudhi_banner.jpg" "" width=20cm - -The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic C++ library for -topological analysis of high-dimensional data whose goal is to provide robust, efficient, flexible and easy to use -implementations of -state-of-the-art algorithms and data structures for computational topology. -This library is part of the Gudhi project. - -The current release of the library allows to use several data-structures for simplicial complexes : -simplex tree, Hasse diagram or skeleton-blocker. Several operations can then be done on top of these -representations such as persistent homology computation or simplification. -All data-structures are generic and several of their aspects (such as stored elements, policies) -can be parameterized via template classes. -We refer to -\cite gudhilibrary_ICMS14 -for a detailed description of the design of the library. - -\section installation Gudhi installation - -As Gudhi is a header only library, there is no need to install the library. - -Examples of Gudhi headers inclusion can be found in \ref demos. - - -\section compiling Compiling - -The library uses c++11 and requires Boost with version 1.48.0 or more recent. -It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2013. - - -\subsection gmp GMP: -The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision -arithmetic, operating on signed integers, rational numbers, and floating point numbers. - -The following examples require the GNU Multiple Precision Arithmetic Library (GMP) -and will not be built if GMP is not installed: - - Persistent_cohomology/rips_multifield_persistence - -Having GMP version 4.2 or higher installed is recommended. - -\subsection cgal CGAL: -CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms. - -The following example requires the Computational Geometry Algorithms Library (CGAL) -and will not be built if CGAL is not installed: - - GudhUI - - Persistent_cohomology/alpha_shapes_persistence - - Simplex_tree/simplex_tree_from_alpha_shapes_3 - -Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to -your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html - -\subsection demos Demos and examples - -To build the demos and libraries, run the following commands in a terminal: - -\verbatim -cd /path-to-gudhi/ -mkdir build -cd build/ -cmake .. -make -\endverbatim - -\subsection testsuites Test suites - -To test your build, run the following command in a terminal: - -\verbatim -make test -\endverbatim - -\details - -\copyright GNU General Public License v3. -\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim - -*/ - -/*! \page Software Software - * \tableofcontents - * \section SoftwareIntroduction Introduction - * The GUDHI library is a C++ open source library **intended to provide** the central data structures and algorithms - * that underly applications in Geometric and Topological Data Analysis - * (TDA). The GUDHI - * library is developed as part of the GUDHI - * project supported by the European Research Council. The GUDHI library can both help the development of new - * algorithmic solutions and to facilitate the transfer of state of the art results and new applications of TDA. +/*! \mainpage + * \image html "Gudhi_banner.jpg" "" width=20cm + * + * \section Introduction Introduction + * The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic open source C++ library for + * Computational Topology and Topological Data Analysis + * (TDA). + * The GUDHI library is developed as part of the + * GUDHI project supported by the European + * Research Council. The GUDHI library intends to help the development of new algorithmic solutions in TDA and their + * transfer to applications. It provides robust, efficient, flexible and easy to use implementations of + * state-of-the-art algorithms and data structures. * * The current release of the GUDHI library includes: * * \li Data structures to represent, construct and manipulate simplicial complexes. * \li Algorithms to compute persistent homology and multi-field persistent homology. - * \li Simplification methods via implicit representations. + * \li Simplication of simplicial complexes by edge contraction. * + * All data-structures are generic and several of their aspects can be parameterized via template classes. + * We refer to \cite gudhilibrary_ICMS14 for a detailed description of the design of the library. * * The library is available here * and the documentation is available at this @@ -103,9 +26,11 @@ make test * * The library comes with data sets, \ref demos and \ref testsuites. * - * \subsection People People + * Gudhi is also accessible though the + * R package TDA + * (Statistical Tools for Topological Data Analysis). * - * The development of the GUDHI library is steered by an Editorial Board, which is responsible for guiding the + * The development of the GUDHI library is steered by an Editorial Board, who is responsible for guiding the * development of the library, developers, and the user community. * * The Editorial board is composed of: @@ -116,22 +41,62 @@ make test * \li Clément Jamin | INRIA Sophia Antipolis - Méditerranée * \li Vincent Rouvreau | INRIA Saclay - Ile de France * +*/ + +/*! \page installation Gudhi installation + * As Gudhi is a header only library, there is no need to install the library. + * + * Examples of Gudhi headers inclusion can be found in \ref demos. + * + * \section compiling Compiling + * The library uses c++11 and requires Boost with version 1.48.0 or + * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2013. + * + * \subsection gmp GMP: + * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision + * arithmetic, operating on signed integers, rational numbers, and floating point numbers. + * + * The following example requires the GNU Multiple Precision Arithmetic + * Library (GMP) and will not be built if GMP is not installed: + * \li Persistent_cohomology/rips_multifield_persistence + * Having GMP version 4.2 or higher installed is recommended. + * + * \subsection cgal CGAL: + * CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms. + * + * The following examples require the Computational Geometry Algorithms + * Library (CGAL) and will not be built if CGAL is not installed: + * \li GudhUI + * \li Persistent_cohomology/alpha_shapes_persistence + * \li Simplex_tree/simplex_tree_from_alpha_shapes_3 + * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to + * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html + * + * \subsection demos Demos and examples + * To build the demos and libraries, run the following commands in a terminal: + * \verbatim + * cd /path-to-gudhi/ + * mkdir build + * cd build/ + * cmake .. + * make + * \endverbatim + * + * \subsection testsuites Test suites + * To test your build, run the following command in a terminal: + * \verbatim + * make test + * \endverbatim + * * \section Contributions Bug reports and contributions * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim * - * Gudhi is **open** to external contributions. If you want to join our development team, please contact us. + * Gudhi is open to external contributions. If you want to join our development team, please contact us. * - * - * \section ReleaseHistory Release history - * - * \li 24-10-2015; release v.1.2.0, GudhUI (Gudhi Qt demo), Simplex tree coface function, Clang build issue fix. - * \li 18-12-2014; release v.1.1, Skeleton-Blocker data-structure, simplification package, additional examples for topological persistence. - * \li 08-12-2014; release v. 1.0.2, initialize simplex keys in initialize_filtration in Simplex_tree - * \li 07-11-2014: release v. 1.0.1, bug fix in summing columns in Persistent_cohomology - * \li 23-06-2014: release v. 1.0 - * - * \section Upcoming Upcoming +*/ + +/*! \page Upcoming Upcoming * * The library is under active development. New packages to be released next include: * \li Alpha complex. -- cgit v1.2.3 From f958607003d8c36902cc7eb856b9e75d6752077a Mon Sep 17 00:00:00 2001 From: glisse Date: Tue, 3 Nov 2015 16:03:09 +0000 Subject: Lien vers ma page web. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@883 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 28883bac2ff28977b30615bedd4f7d33b41d07c3 --- src/common/doc/main_page.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 730b74df..8b07bb18 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -37,7 +37,7 @@ * * \li * Jean-Daniel Boissonnat | INRIA Sophia Antipolis - Méditerranée - * \li Marc Glisse | INRIA Saclay - Ile de France + * \li Marc Glisse | INRIA Saclay - Ile de France * \li Clément Jamin | INRIA Sophia Antipolis - Méditerranée * \li Vincent Rouvreau | INRIA Saclay - Ile de France * -- cgit v1.2.3 From a8d0580ab66f309056dcfbfb33f581cd2cbaba5e Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 4 Nov 2015 06:50:52 +0000 Subject: Missing a space for not to be on the same line git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@884 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 28ce24bf496e1df4360845dffe1c8ec70c1da72a --- src/common/doc/main_page.h | 1 + 1 file changed, 1 insertion(+) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 8b07bb18..ee8e1008 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -69,6 +69,7 @@ * \li GudhUI * \li Persistent_cohomology/alpha_shapes_persistence * \li Simplex_tree/simplex_tree_from_alpha_shapes_3 + * * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html * -- cgit v1.2.3 From 72f647ad4b802d08072925a374324e7ace4b2c4d Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 4 Nov 2015 08:21:19 +0000 Subject: doc review git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@886 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a1729600550c0065173364d50775ce15d40dd026 --- src/common/doc/main_page.h | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index ee8e1008..689e7a4d 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -30,10 +30,7 @@ * R package TDA * (Statistical Tools for Topological Data Analysis). * - * The development of the GUDHI library is steered by an Editorial Board, who is responsible for guiding the - * development of the library, developers, and the user community. - * - * The Editorial board is composed of: + * The development of the GUDHI library is steered by an Editorial Board composed of: * * \li * Jean-Daniel Boissonnat | INRIA Sophia Antipolis - Méditerranée -- cgit v1.2.3 From 03b68a5206201bf5bbffb7e4f6a6718907f23b2a Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 9 Nov 2015 09:18:55 +0000 Subject: Modification for Porquerolles Workshop Bar_code_persistence and alpha_complex_persistence git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@893 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 63d2d21d5ff7c9a94a67eafc326bf8a561bf8166 --- src/Alpha_complex/example/CMakeLists.txt | 14 ++++ src/GudhUI/CMakeLists.txt | 40 +++++++++++ src/GudhUI/alpha_complex_persistence.cpp | 78 ++++++++++++++++++++ src/GudhUI/model/Model.h | 2 +- src/GudhUI/utils/Bar_code_persistence.h | 84 ++++++++++++++++++++++ src/GudhUI/view/FirstCoordProjector.h | 5 +- src/Persistent_cohomology/example/CMakeLists.txt | 44 ++++++++++++ .../example/alpha_complex_persistence.cpp | 55 ++++++++++++++ .../example/alpha_shapes_persistence.cpp | 14 ++-- .../include/gudhi/Persistent_cohomology.h | 8 +++ src/Simplex_tree/include/gudhi/Simplex_tree.h | 2 +- .../include/gudhi/Delaunay_triangulation_off_io.h | 2 +- src/common/include/gudhi/Off_reader.h | 2 +- 13 files changed, 338 insertions(+), 12 deletions(-) create mode 100644 src/GudhUI/alpha_complex_persistence.cpp create mode 100644 src/GudhUI/utils/Bar_code_persistence.h create mode 100644 src/Persistent_cohomology/example/alpha_complex_persistence.cpp (limited to 'src/common') diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 04fc34af..10b87f04 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -8,6 +8,20 @@ if(CGAL_FOUND) message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) + # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten. + # cf. http://doc.cgal.org/latest/Manual/installation.html#title40 + # A workaround is to add "-std=c++11" again. + # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html + # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html + # but it implies to use cmake version 3.1 at least. + if(NOT MSVC) + include(CheckCXXCompilerFlag) + CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11) + if(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + endif() + endif() + # - End of workaround find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt index 71f4fd1a..5c2afdd0 100644 --- a/src/GudhUI/CMakeLists.txt +++ b/src/GudhUI/CMakeLists.txt @@ -78,6 +78,46 @@ if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND ) target_link_libraries( GudhUI ${QT_LIBRARIES} ${QGLVIEWER_LIBRARIES} ) target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} ) +############################################################################### + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + include( ${CGAL_USE_FILE} ) + # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten. + # cf. http://doc.cgal.org/latest/Manual/installation.html#title40 + # A workaround is to add "-std=c++11" again. + # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html + # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html + # but it implies to use cmake version 3.1 at least. + if(NOT MSVC) + include(CheckCXXCompilerFlag) + CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11) + if(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + endif() + endif() + # - End of workaround + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + if (CMAKE_BUILD_TYPE MATCHES Debug) + # For programs to be more verbose + add_definitions(-DDEBUG_TRACES) + endif() + + add_executable (acp alpha_complex_persistence.cpp) + target_link_libraries(acp ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${QT_LIBRARIES}) + + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + endif () +############################################################################### + else() message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt4, and will not be compiled.") endif() diff --git a/src/GudhUI/alpha_complex_persistence.cpp b/src/GudhUI/alpha_complex_persistence.cpp new file mode 100644 index 00000000..4f85459a --- /dev/null +++ b/src/GudhUI/alpha_complex_persistence.cpp @@ -0,0 +1,78 @@ +#include +#include + + +#include + +// to construct a Delaunay_triangulation from a OFF file +#include +#include +#include + +#include "utils/Bar_code_persistence.h" + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " filename.off " << // alpha_square_max_value[double] " << + "coeff_field_characteristic[integer > 0] min_persistence[double >= -1.0]" << std::endl; + std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0 2 0.02" << std::endl; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if (argc != 4) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + QApplication qtapp(argc, argv); + + std::string off_file_name(argv[1]); + // double alpha_square_max_value = atof(argv[2]); + double alpha_square_max_value = 1e20; + int coeff_field_characteristic = atoi(argv[2]); // argv[3] + double min_persistence = atof(argv[3]); // argv[4] + + // ---------------------------------------------------------------------------- + // Init of an alpha complex from an OFF file + // ---------------------------------------------------------------------------- + typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name, alpha_square_max_value); + + // ---------------------------------------------------------------------------- + // Display information about the alpha complex + // ---------------------------------------------------------------------------- + std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() << + " - " << alpha_complex_from_file.num_simplices() << " simplices - " << + alpha_complex_from_file.num_vertices() << " vertices." << std::endl; + + // Sort the simplices in the order of the filtration + alpha_complex_from_file.initialize_filtration(); + + std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl; + // Compute the persistence diagram of the complex + Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alphacomplex::Alpha_complex, + Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file); + + std::cout << "coeff_field_characteristic " << coeff_field_characteristic << + " - min_persistence " << min_persistence << std::endl; + + // initializes the coefficient field for homology + pcoh.init_coefficients(coeff_field_characteristic); + + pcoh.compute_persistent_cohomology(min_persistence); + + pcoh.output_diagram(); + + std::vector> persistence_vector; + pcoh.get_persistence(persistence_vector); + + Bar_code_persistence bc_persistence; + + for (auto persistence : persistence_vector) { + bc_persistence.insert(persistence.first, persistence.second); + } + + bc_persistence.show(); + + return qtapp.exec(); +} diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index d78cbad9..4f8d48ee 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -71,7 +71,7 @@ class CGAL_geometric_flag_complex_wrapper { void maximal_face(std::vector vertices) { if (!load_only_points_) { - std::cout << "size:" << vertices.size() << std::endl; + //std::cout << "size:" << vertices.size() << std::endl; for (int i = 0; i < vertices.size(); ++i) for (int j = i + 1; j < vertices.size(); ++j) complex_.add_edge(Vertex_handle(vertices[i]), Vertex_handle(vertices[j])); diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h new file mode 100644 index 00000000..a1a46ea8 --- /dev/null +++ b/src/GudhUI/utils/Bar_code_persistence.h @@ -0,0 +1,84 @@ +#include // isfinite + +#include + +#include +#include +#include +#include +#include + +#include +#include +#include // NaN, infinity +#include // for pair + +class Bar_code_persistence { + private: + typedef std::vector> Persistence; + Persistence persistence_vector; + double min_birth; + double max_death; + + public: + + Bar_code_persistence() + : min_birth(std::numeric_limits::quiet_NaN()), + max_death(std::numeric_limits::quiet_NaN()) { } + + void insert(double birth, double death) { + persistence_vector.push_back(std::make_pair(birth, death)); + if (std::isfinite(birth)) { + if ((birth < min_birth) || (std::isnan(min_birth))) + min_birth = birth; + if ((birth > max_death) || (std::isnan(max_death))) + max_death = birth; + } + if (std::isfinite(death)) + if ((death > max_death) || (std::isnan(max_death))) + max_death = death; + } + + void show() { + // Create a view, put a scene in it + QGraphicsView * view = new QGraphicsView(); + QGraphicsScene * scene = new QGraphicsScene(); + view->setScene(scene); + double ratio = 600.0 / (max_death - min_birth); + //std::cout << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl; + + double height = 0.0, birth = 0.0, death = 0.0; + int pers_num = 1; + for (auto& persistence : persistence_vector) { + height = 5.0 * pers_num; + //std::cout << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl; + if (std::isfinite(persistence.first)) + birth = ((persistence.first - min_birth) * ratio) + 50.0; + else + birth = 0.0; + + if (std::isfinite(persistence.second)) + death = ((persistence.second - min_birth) * ratio) + 50.0; + else + death = 700.0; + + scene->addLine(birth, height, death, height, QPen(Qt::blue, 2)); + pers_num++; + } + height += 10.0; + // scale line + scene->addLine(0, height, 700.0, height, QPen(Qt::black, 1)); + int modulo = 0; + for (double scale = 50.0; scale < 700.0; scale += 50.0) { + modulo++; + // scale small dash + scene->addLine(scale, height - 3.0, scale, height + 3.0, QPen(Qt::black, 1)); + // scale text + QString scale_value = QString::number(((scale - 50.0) / ratio) + min_birth); + QGraphicsTextItem* dimText = scene->addText(scale_value, QFont("Helvetica", 8)); + dimText->setPos(scale - (3.0 * scale_value.size()), height + 9.0 * (modulo % 2)); + } + // Show the view + view->show(); + } +}; diff --git a/src/GudhUI/view/FirstCoordProjector.h b/src/GudhUI/view/FirstCoordProjector.h index 529d2d42..3ceda3f5 100644 --- a/src/GudhUI/view/FirstCoordProjector.h +++ b/src/GudhUI/view/FirstCoordProjector.h @@ -32,8 +32,11 @@ class FirstCoordProjector3D : public Projector3D { typedef Projector3D::Point_3 Point_3; Point_3 operator()(const Point& p) const { - assert(p.dimension() >= 3); + if (p.dimension() >= 3) return Point_3(p.x(), p.y(), p.z()); + else if (p.dimension() >= 2) + return Point_3(p.x(), p.y(), 0.0); + } }; diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index ea69352e..74c2e778 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -39,6 +39,50 @@ if(GMPXX_FOUND AND GMP_FOUND) target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5) #add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) + + + + + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + include( ${CGAL_USE_FILE} ) + # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten. + # cf. http://doc.cgal.org/latest/Manual/installation.html#title40 + # A workaround is to add "-std=c++11" again. + # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html + # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html + # but it implies to use cmake version 3.1 at least. + if(NOT MSVC) + include(CheckCXXCompilerFlag) + CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11) + if(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + endif() + endif() + # - End of workaround + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + if (CMAKE_BUILD_TYPE MATCHES Debug) + # For programs to be more verbose + add_definitions(-DDEBUG_TRACES) + endif() + + add_executable (alphacomplexpersistence alpha_complex_persistence.cpp) + target_link_libraries(alphacomplexpersistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + endif () + + + endif() endif() diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp new file mode 100644 index 00000000..fbadf673 --- /dev/null +++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp @@ -0,0 +1,55 @@ +#include +#include + +// to construct a Delaunay_triangulation from a OFF file +#include +#include +#include + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value[double] " << + "coeff_field_characteristic[integer > 0] min_persistence[double >= -1.0]" << std::endl; + std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0 2 0.02" << std::endl; + exit(-1); // ----- >> +} + +int main(int argc, char **argv) { + if (argc != 5) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + std::string off_file_name(argv[1]); + double alpha_square_max_value = atof(argv[2]); + int coeff_field_characteristic = atoi(argv[3]); + double min_persistence = atof(argv[4]); + + // ---------------------------------------------------------------------------- + // Init of an alpha complex from an OFF file + // ---------------------------------------------------------------------------- + typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name, alpha_square_max_value); + + // ---------------------------------------------------------------------------- + // Display information about the alpha complex + // ---------------------------------------------------------------------------- + std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() << + " - " << alpha_complex_from_file.num_simplices() << " simplices - " << + alpha_complex_from_file.num_vertices() << " vertices." << std::endl; + + // Sort the simplices in the order of the filtration + alpha_complex_from_file.initialize_filtration(); + + std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl; + // Compute the persistence diagram of the complex + Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alphacomplex::Alpha_complex, + Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file); + // initializes the coefficient field for homology + pcoh.init_coefficients(coeff_field_characteristic); + + pcoh.compute_persistent_cohomology(min_persistence); + + pcoh.output_diagram(); + + return 0; +} diff --git a/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp b/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp index 6d5eebcf..235ea141 100644 --- a/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp +++ b/src/Persistent_cohomology/example/alpha_shapes_persistence.cpp @@ -124,6 +124,12 @@ void usage(char * const progName) { } int main(int argc, char * const argv[]) { + // program args management + if (argc != 4) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n"; + usage(argv[0]); + } + int coeff_field_characteristic = 0; int returnedScanValue = sscanf(argv[2], "%d", &coeff_field_characteristic); if ((returnedScanValue == EOF) || (coeff_field_characteristic <= 0)) { @@ -138,12 +144,6 @@ int main(int argc, char * const argv[]) { usage(argv[0]); } - // program args management - if (argc != 4) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n"; - usage(argv[0]); - } - // Read points from file std::string filegraph = argv[1]; std::list lp; @@ -239,7 +239,7 @@ int main(int argc, char * const argv[]) { } } // Construction of the simplex_tree - Filtration_value filtr = std::sqrt(*the_alpha_value_iterator); + Filtration_value filtr = /*std::sqrt*/(*the_alpha_value_iterator); #ifdef DEBUG_TRACES std::cout << "filtration = " << filtr << std::endl; #endif // DEBUG_TRACES diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index d096792f..f6773bac 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -713,6 +713,14 @@ class Persistent_cohomology { } } + void get_persistence(std::vector>& persistence) { + cmp_intervals_by_length cmp(cpx_); + std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp); + for (auto pair : persistent_pairs_) { + persistence.push_back(std::make_pair(cpx_->filtration(get<0>(pair)), cpx_->filtration(get<1>(pair)))); + } + } + void write_output_diagram(std::string diagram_name) { std::ofstream diagram_out(diagram_name.c_str()); cmp_intervals_by_length cmp(cpx_); diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index c7a0b5d5..1ca43ff9 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1096,7 +1096,7 @@ class Simplex_tree { os << filtration(sh) << " \n"; } } - + private: Vertex_handle null_vertex_; /** \brief Upper bound on the filtration values of the simplices.*/ diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index 0c5474c9..47066a94 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -278,7 +278,7 @@ class Delaunay_triangulation_off_writer { std::vector vertexVector; stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " "; for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { - stream << points_to_vh[(*vit)->point()] << " "; + stream << points_to_vh[(*vit)->point()] - 1 << " "; } stream << std::endl; } diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index bba5a63a..e45a7600 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -160,7 +160,7 @@ class Off_reader { iss >> num_face_vertices; std::vector face; face.assign(std::istream_iterator(iss), std::istream_iterator()); - if (face.size() != off_info_.dim) return false; + //if (face.size() != (off_info_.dim + 1)) return false; visitor.maximal_face(face); } return true; -- cgit v1.2.3 From 8881190bccba9da4af0a07c701369099fd7f2277 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 12 Nov 2015 16:26:05 +0000 Subject: code review fix prune_above_filtration and remove_maximal_simplex in Simplex_tree.h make_filtration_non_decreasing and rec_make_filtration_non_decreasing in Simplex_tree.h git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@910 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a20c9da65a5a3294e42ad2dd45a399d77fb5ad30 --- src/Alpha_complex/doc/Intro_alpha_complex.h | 5 ++ .../example/Alpha_complex_from_off.cpp | 6 +- src/Alpha_complex/example/CMakeLists.txt | 2 + src/Alpha_complex/include/gudhi/Alpha_complex.h | 24 ++++-- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 54 +++++++++---- src/Simplex_tree/include/gudhi/Simplex_tree.h | 91 ++++++++++++++++++++++ .../include/gudhi/Delaunay_triangulation_off_io.h | 7 +- src/common/test/dtoffrw_alphashapedoc_result.off | 12 +-- 8 files changed, 166 insertions(+), 35 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index 2cb37578..1fb8fdee 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -20,6 +20,9 @@ * along with this program. If not, see . */ +#ifndef INTRO_ALPHA_COMPLEX_H_ +#define INTRO_ALPHA_COMPLEX_H_ + // needs namespace for Doxygen to link on classes namespace Gudhi { // needs namespace for Doxygen to link on classes @@ -117,3 +120,5 @@ namespace alphacomplex { } // namespace alphacomplex } // namespace Gudhi + +#endif // INTRO_ALPHA_COMPLEX_H_ diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index e140fe3d..cd6f5a4b 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -25,7 +25,7 @@ int main(int argc, char **argv) { // ---------------------------------------------------------------------------- typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name, alpha_square_max_value); - + // ---------------------------------------------------------------------------- // Display information about the alpha complex // ---------------------------------------------------------------------------- @@ -35,14 +35,14 @@ int main(int argc, char **argv) { std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl; for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) { - if (alpha_complex_from_file.filtration(f_simplex) <= alpha_complex_from_file.filtration()) { + //if (alpha_complex_from_file.filtration(f_simplex) <= alpha_complex_from_file.filtration()) { std::cout << " ( "; for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) { std::cout << vertex << " "; } std::cout << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] "; std::cout << std::endl; - } + //} } return 0; } diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 10b87f04..24f3a9dc 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -1,6 +1,8 @@ cmake_minimum_required(VERSION 2.6) project(GUDHIAlphaShapesExample) +add_executable ( flat flat.cpp ) + # need CGAL 4.7 # cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../.. if(CGAL_FOUND) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 562b80c3..10b290b5 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -105,6 +105,7 @@ class Alpha_complex : public Simplex_tree<> { * the Alpha_complex. * * @param[in] off_file_name OFF file [path and] name. + * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$. */ Alpha_complex(const std::string& off_file_name, Filtration_value max_alpha_square = std::numeric_limits::infinity()) @@ -115,25 +116,24 @@ class Alpha_complex : public Simplex_tree<> { exit(-1); // ----- >> } triangulation_ = off_reader.get_complex(); - set_filtration(max_alpha_square); - init(); + init(max_alpha_square); } /** \brief Alpha_complex constructor from a Delaunay triangulation. * * @param[in] triangulation_ptr Pointer on a Delaunay triangulation. + * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$. */ Alpha_complex(Delaunay_triangulation* triangulation_ptr, Filtration_value max_alpha_square = std::numeric_limits::infinity()) : triangulation_(triangulation_ptr) { - set_filtration(max_alpha_square); - init(); + init(max_alpha_square); } /** \brief Alpha_complex constructor from a list of points. * - * @param[in] dimension Dimension of points to be inserted. * @param[in] points Range of points to triangulate. Points must be in Kernel::Point_d + * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$. * * The type InputPointRange must be a range for which std::begin and * std::end return input iterators on a Kernel::Point_d. @@ -155,8 +155,7 @@ class Alpha_complex : public Simplex_tree<> { std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << std::endl; exit(-1); // ----- >> } - set_filtration(max_alpha_square); - init(); + init(max_alpha_square); } /** \brief Alpha_complex destructor from a Delaunay triangulation. @@ -180,12 +179,14 @@ class Alpha_complex : public Simplex_tree<> { private: /** \brief Initialize the Alpha_complex from the Delaunay triangulation. * + * @param[in] max_alpha_square maximum for alpha square value. + * * @warning Delaunay triangulation must be already constructed with at least one vertex and dimension must be more * than 0. * * Initialization can be launched once. */ - void init() { + void init(Filtration_value max_alpha_square) { if (triangulation_ == nullptr) { std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation" << std::endl; return; // ----- >> @@ -287,6 +288,13 @@ class Alpha_complex : public Simplex_tree<> { } } // -------------------------------------------------------------------------------------------- + + // -------------------------------------------------------------------------------------------- + // As Alpha value is an approximation, we have to make filtration non decreasing while increasing the dimension + make_filtration_non_decreasing(); + // Remove all simplices that have a filtration value greater than max_alpha_square + prune_above_filtration(max_alpha_square); + // -------------------------------------------------------------------------------------------- } template diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index f64a8ea9..2912019d 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -88,20 +88,9 @@ BOOST_AUTO_TEST_CASE(ALPHA_DOC_OFF_file_filtered) { std::cout << "alpha_complex_from_file.num_vertices()=" << alpha_complex_from_file.num_vertices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_vertices() == NUMBER_OF_VERTICES); - const int NUMBER_OF_SIMPLICES = 25; + const int NUMBER_OF_SIMPLICES = 23; std::cout << "alpha_complex_from_file.num_simplices()=" << alpha_complex_from_file.num_simplices() << std::endl; BOOST_CHECK(alpha_complex_from_file.num_simplices() == NUMBER_OF_SIMPLICES); - - int num_filtered_simplices = 0; - for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) { - if (alpha_complex_from_file.filtration(f_simplex) <= alpha_complex_from_file.filtration()) { - num_filtered_simplices++; - } - } - const int NUMBER_OF_FILTERED_SIMPLICES = 23; - std::cout << "num_filtered_simplices=" << num_filtered_simplices << std::endl; - BOOST_CHECK(num_filtered_simplices == NUMBER_OF_FILTERED_SIMPLICES); - } bool are_almost_the_same(float a, float b) { @@ -140,8 +129,7 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) { // ---------------------------------------------------------------------------- // Init of an alpha complex from the list of points // ---------------------------------------------------------------------------- - double max_alpha_square_value = 1e10; - Gudhi::alphacomplex::Alpha_complex alpha_complex_from_points(points, max_alpha_square_value); + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_points(points); std::cout << "========== Alpha_complex_from_points ==========" << std::endl; @@ -210,4 +198,42 @@ BOOST_AUTO_TEST_CASE(Alpha_complex_from_points) { BOOST_CHECK_THROW (alpha_complex_from_points.get_point(4), std::out_of_range); BOOST_CHECK_THROW (alpha_complex_from_points.get_point(-1), std::out_of_range); BOOST_CHECK_THROW (alpha_complex_from_points.get_point(1234), std::out_of_range); + + // Test after prune_above_filtration + alpha_complex_from_points.prune_above_filtration(0.6); + // Another way to check num_simplices + std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl; + num_simplices = 0; + for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) { + num_simplices++; + std::cout << " ( "; + for (auto vertex : alpha_complex_from_points.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << ") -> " << "[" << alpha_complex_from_points.filtration(f_simplex) << "] "; + std::cout << std::endl; + } + BOOST_CHECK(num_simplices == 10); + std::cout << "alpha_complex_from_points.num_simplices()=" << alpha_complex_from_points.num_simplices() << std::endl; + BOOST_CHECK(alpha_complex_from_points.num_simplices() == 10); + + std::cout << "alpha_complex_from_points.dimension()=" << alpha_complex_from_points.dimension() << std::endl; + BOOST_CHECK(alpha_complex_from_points.dimension() == 4); + std::cout << "alpha_complex_from_points.num_vertices()=" << alpha_complex_from_points.num_vertices() << std::endl; + BOOST_CHECK(alpha_complex_from_points.num_vertices() == 4); + + for (auto f_simplex : alpha_complex_from_points.filtration_simplex_range()) { + switch (alpha_complex_from_points.dimension(f_simplex)) { + case 0: + BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 0.0)); + break; + case 1: + BOOST_CHECK(are_almost_the_same(alpha_complex_from_points.filtration(f_simplex), 1.0/2.0)); + break; + default: + BOOST_CHECK(false); // Shall not happen + break; + } + } + } diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 35d839e2..8c1beaef 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -39,6 +39,7 @@ #include #include #include // for greater<> +#include // for numeric_limits infinity namespace Gudhi { /** \defgroup simplex_tree Filtered Complexes @@ -1098,6 +1099,96 @@ class Simplex_tree { os << filtration(sh) << " \n"; } } + + public: + /** \brief Browse the simplex tree to ensure the filtration is not decreasing. + * @return The filtration modification information in order to trigger initialize_filtration. + * \warning initialize_filtration is launched again in case of filtration modification change. + */ + bool make_filtration_non_decreasing() { + bool modified = false; + for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh) { + if (has_children(sh)) { + modified = modified || rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration()); + } + } + if (modified) { + initialize_filtration(); + } + return modified; + } + + private: + /** \brief Recursively Browse the simplex tree to ensure the filtration is not decreasing. + * @param[in] sib Siblings to be parsed. + * @param[in] upper_filtration Upper level filtration value in the simplex tree. + * @return The filtration modification information in order to trigger initialize_filtration. + */ + bool rec_make_filtration_non_decreasing(Siblings * sib, Filtration_value upper_filtration) { + bool modified = false; + for (auto sh = sib->members().begin(); sh != sib->members().end(); ++sh) { + if (sh->second.filtration() < upper_filtration) { + // Store the filtration modification information + modified = true; + std::cout << "modified" << std::endl; + sh->second.assign_filtration(upper_filtration); + } + if (has_children(sh)) { + modified = modified || rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration()); + } + } + // Make the modified information to be traced by upper call + return modified; + } + + public: + /** \brief Prune above filtration value given as parameter. + * @param[in] filtration Maximum threshold value. + * \warning threshold_ is set from filtration given as parameter. + * \warning The filtration must be valid. If the filtration has not been initialized yet, the method initializes it + * (i.e. order the simplices). If the complex has changed since the last time the filtration was initialized, please + * call `initialize_filtration()` to recompute it. + */ + void prune_above_filtration(Filtration_value filtration) { + threshold_ = filtration; + if (filtration != std::numeric_limits::infinity()) { + // Initialize filtration_vect_ if required + if (filtration_vect_.empty()) { + initialize_filtration(); + } + + // Loop in reverse mode until threshold is reached + auto f_simplex = filtration_vect_.rbegin(); + for (; f_simplex != filtration_vect_.rend() && ((*f_simplex)->second.filtration() > threshold_); f_simplex++) { + remove_maximal_simplex(*f_simplex); + } + // Do not forget to update filtration_vect_ - resize is enough + std::size_t new_size = filtration_vect_.size() - (f_simplex - filtration_vect_.rbegin()); + filtration_vect_.resize(new_size); + } + } + + private: + /** \brief Remove a maximal simplex. + * @param[in] sh Simplex handle on the maximal simplex to remove. + * \warning Exception std::invalid_argument is thrown in sh has children. + */ + void remove_maximal_simplex(Simplex_handle sh) { + // Guarantee the simplex is maximal + if (has_children(sh)) { + throw std::invalid_argument ("Simplex_tree::remove_maximal_simplex - argument is not a maximal simplex"); + } + // Simplex is a leaf, it means the child is the Siblings owning the leaf. + Siblings* child = sh->second.children(); + if (child->size() > 1) { + // Not alone, just remove it from members + child->members().erase(sh->first); + } else { + // Sibling is emptied : must be deleted, and its parent must point on his own Sibling + child->oncles()->members().at(child->parent()).assign_children(child->oncles()); + delete child; + } + } private: Vertex_handle null_vertex_; diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index 47066a94..4d26bb71 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -19,8 +19,8 @@ * You should have received a copy of the GNU General Public License * along with this program. If not, see . */ -#ifndef SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ -#define SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ +#ifndef DELAUNAY_TRIANGULATION_OFF_IO_H_ +#define DELAUNAY_TRIANGULATION_OFF_IO_H_ #include #include @@ -256,7 +256,6 @@ class Delaunay_triangulation_off_writer { // no endl on next line - don't know why... stream << complex_ptr->current_dimension() << " " << complex_ptr->number_of_vertices() << " " << complex_ptr->number_of_finite_full_cells() << " 0"; - } // bimap to retrieve vertex handles from points and vice versa @@ -305,4 +304,4 @@ class Delaunay_triangulation_off_writer { } // namespace Gudhi -#endif // SRC_ALPHA_SHAPES_INCLUDE_GUDHI_ALPHA_SHAPES_DELAUNAY_TRIANGULATION_OFF_IO_H_ +#endif // DELAUNAY_TRIANGULATION_OFF_IO_H_ diff --git a/src/common/test/dtoffrw_alphashapedoc_result.off b/src/common/test/dtoffrw_alphashapedoc_result.off index 13c255c6..03b7ca75 100644 --- a/src/common/test/dtoffrw_alphashapedoc_result.off +++ b/src/common/test/dtoffrw_alphashapedoc_result.off @@ -7,9 +7,9 @@ nOFF 0 14 2 19 9 17 -3 1 2 3 -3 4 3 2 -3 5 1 3 -3 5 3 7 -3 7 3 4 -3 6 5 7 +3 0 1 2 +3 3 2 1 +3 4 0 2 +3 4 2 6 +3 6 2 3 +3 5 4 6 -- cgit v1.2.3 From c972b77524faec5d6f297d442539f65b9351654e Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 13 Nov 2015 16:41:12 +0000 Subject: Utils.h -> Debug_utils.h More verbose in debug mode (use NDEBUG instead of DEBUG_TRACES) GUDHI_CHECK function to throw in debug or ignore in release mode git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@911 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 250dc0c0f5146f0b9e3fce0e9a8ca0da6af7cf98 --- CMakeLists.txt | 21 +++-- src/Alpha_complex/example/CMakeLists.txt | 4 - src/Alpha_complex/include/gudhi/Alpha_complex.h | 96 +++++++++------------- src/Alpha_complex/test/CMakeLists.txt | 4 - src/CMakeLists.txt | 13 ++- .../policies/Link_condition_valid_contraction.h | 2 +- src/Contraction/include/gudhi/Edge_contraction.h | 2 +- .../include/gudhi/Skeleton_blocker_contractor.h | 2 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 40 ++++----- .../include/gudhi/Skeleton_blocker.h | 2 +- .../Skeleton_blocker_sub_complex.h | 2 +- .../Skeleton_blockers_simplices_iterators.h | 2 +- .../include/gudhi/Skeleton_blocker_complex.h | 2 +- .../gudhi/Skeleton_blocker_geometric_complex.h | 2 +- .../include/gudhi/Skeleton_blocker_link_complex.h | 2 +- .../test/TestSkeletonBlockerComplex.cpp | 2 +- .../example/Delaunay_triangulation_off_rw.cpp | 5 ++ src/common/include/gudhi/Debug_utils.h | 53 ++++++++++++ .../include/gudhi/Delaunay_triangulation_off_io.h | 19 ++--- src/common/include/gudhi/Utils.h | 46 ----------- 20 files changed, 162 insertions(+), 159 deletions(-) create mode 100644 src/common/include/gudhi/Debug_utils.h delete mode 100644 src/common/include/gudhi/Utils.h (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index 460196d7..b7fb4540 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -2,17 +2,26 @@ cmake_minimum_required(VERSION 2.6) project(GUDHIdev) include(CMakeGUDHIVersion.txt) -# Generate GUDHI official version file -configure_file(GUDHIVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIVersion.cmake" @ONLY) -find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer program_options thread REQUIRED) +if (NOT CMAKE_BUILD_TYPE) + # Set default build type to Release + set(CMAKE_BUILD_TYPE "Release") +endif() + +if (CMAKE_BUILD_TYPE MATCHES Debug) + # For programs to be more verbose + add_definitions(-DNDEBUG) +endif() + +enable_testing() set(CMAKE_PREFIX_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/") -set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/") -message("CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") message("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}") -enable_testing() +# Generate GUDHI official version file +configure_file(GUDHIVersion.cmake.in "${PROJECT_BINARY_DIR}/GUDHIVersion.cmake" @ONLY) + +find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer program_options thread REQUIRED) if(MSVC) # Turn off some VC++ warnings diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 24f3a9dc..47e42b72 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -29,10 +29,6 @@ if(CGAL_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable ( alphaoffreader Alpha_complex_from_off.cpp ) target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 10b290b5..2cc93a0a 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -26,6 +26,7 @@ // to construct a simplex_tree from Delaunay_triangulation #include #include +#include #include #include // isnan, fmax @@ -39,6 +40,7 @@ #include // NaN #include #include // std::pair +#include namespace Gudhi { @@ -112,7 +114,7 @@ class Alpha_complex : public Simplex_tree<> { : triangulation_(nullptr) { Gudhi::Delaunay_triangulation_off_reader off_reader(off_file_name); if (!off_reader.is_valid()) { - std::cerr << "Alpha_complex - Unable to read file " << off_file_name << std::endl; + std::cerr << "Alpha_complex - Unable to read file " << off_file_name; exit(-1); // ----- >> } triangulation_ = off_reader.get_complex(); @@ -137,6 +139,8 @@ class Alpha_complex : public Simplex_tree<> { * * The type InputPointRange must be a range for which std::begin and * std::end return input iterators on a Kernel::Point_d. + * \warning In debug mode, the exception std::invalid_argument is thrown if an empty input point range is passed as + * argument. */ template Alpha_complex(const InputPointRange& points, @@ -144,18 +148,24 @@ class Alpha_complex : public Simplex_tree<> { : triangulation_(nullptr) { auto first = std::begin(points); auto last = std::end(points); - // point_dimension function initialization - Point_Dimension point_dimension = kernel_.point_dimension_d_object(); + + GUDHI_CHECK((first == last), + std::invalid_argument ("Alpha_complex::Alpha_complex(InputPointRange) - Empty input point range")); + + if (first != last) { + // point_dimension function initialization + Point_Dimension point_dimension = kernel_.point_dimension_d_object(); - // Delaunay triangulation is point dimension minus one. - triangulation_ = new Delaunay_triangulation(point_dimension(*first) - 1); + // Delaunay triangulation is point dimension minus one. + triangulation_ = new Delaunay_triangulation(point_dimension(*first) - 1); - size_type inserted = triangulation_->insert(first, last); - if (inserted != (last -first)) { - std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << std::endl; - exit(-1); // ----- >> + size_type inserted = triangulation_->insert(first, last); + if (inserted != (last -first)) { + std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first); + exit(-1); // ----- >> + } + init(max_alpha_square); } - init(max_alpha_square); } /** \brief Alpha_complex destructor from a Delaunay triangulation. @@ -188,23 +198,25 @@ class Alpha_complex : public Simplex_tree<> { */ void init(Filtration_value max_alpha_square) { if (triangulation_ == nullptr) { - std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation" << std::endl; + std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation"; return; // ----- >> } if (triangulation_->number_of_vertices() < 1) { - std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices" << std::endl; + std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices"; return; // ----- >> } if (triangulation_->maximal_dimension() < 1) { - std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation" << std::endl; + std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation"; return; // ----- >> } if (num_vertices() > 0) { - std::cerr << "Alpha_complex init - Cannot init twice" << std::endl; + std::cerr << "Alpha_complex init - Cannot init twice"; return; // ----- >> } set_dimension(triangulation_->maximal_dimension()); + // set_filtration to +inf for prune_above_filtration to be done (if necessary) + set_filtration(std::numeric_limits::infinity()); // -------------------------------------------------------------------------------------------- // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa @@ -213,9 +225,9 @@ class Alpha_complex : public Simplex_tree<> { // Loop on triangulation vertices list for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) { if (!triangulation_->is_infinite(*vit)) { -#ifdef DEBUG_TRACES - std::cout << "Vertex insertion - " << vertex_handle << " -> " << vit->point() << std::endl; -#endif // DEBUG_TRACES + DBGMSG("Vertex insertion - ", vertex_handle); + DBGMSG(" -> ", vit->point()); + vertex_iterator_to_handle_.emplace(vit, vertex_handle); vertex_handle_to_iterator_.push_back(vit); vertex_handle++; @@ -227,21 +239,12 @@ class Alpha_complex : public Simplex_tree<> { // Simplex_tree construction from loop on triangulation finite full cells list for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) { Vector_vertex vertexVector; -#ifdef DEBUG_TRACES - std::cout << "Simplex_tree insertion "; -#endif // DEBUG_TRACES for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { if (*vit != nullptr) { -#ifdef DEBUG_TRACES - std::cout << " " << vertex_iterator_to_handle_[*vit]; -#endif // DEBUG_TRACES // Vector of vertex construction for simplex_tree structure vertexVector.push_back(vertex_iterator_to_handle_[*vit]); } } -#ifdef DEBUG_TRACES - std::cout << std::endl; -#endif // DEBUG_TRACES // Insert each simplex and its subfaces in the simplex tree - filtration is NaN Simplex_result insert_result = insert_simplex_and_subfaces(vertexVector, std::numeric_limits::quiet_NaN()); @@ -256,18 +259,11 @@ class Alpha_complex : public Simplex_tree<> { int f_simplex_dim = dimension(f_simplex); if (decr_dim == f_simplex_dim) { Vector_of_CGAL_points pointVector; -#ifdef DEBUG_TRACES - std::cout << "Sigma of dim " << decr_dim << " is"; -#endif // DEBUG_TRACES + DBGMSG("Sigma of dim ", decr_dim); for (auto vertex : simplex_vertex_range(f_simplex)) { pointVector.push_back(get_point(vertex)); -#ifdef DEBUG_TRACES - std::cout << " " << vertex; -#endif // DEBUG_TRACES } -#ifdef DEBUG_TRACES - std::cout << std::endl; -#endif // DEBUG_TRACES + DBGCONT(simplex_vertex_range(f_simplex)); // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma) if (isnan(filtration(f_simplex))) { Filtration_value alpha_complex_filtration = 0.0; @@ -279,9 +275,7 @@ class Alpha_complex : public Simplex_tree<> { alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end()); } assign_filtration(f_simplex, alpha_complex_filtration); -#ifdef DEBUG_TRACES - std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl; -#endif // DEBUG_TRACES + DBGMSG("filt(Sigma) is NaN : filt(Sigma) =", filtration(f_simplex)); } propagate_alpha_filtration(f_simplex, decr_dim); } @@ -301,23 +295,16 @@ class Alpha_complex : public Simplex_tree<> { void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) { // ### Foreach Tau face of Sigma for (auto f_boundary : boundary_simplex_range(f_simplex)) { -#ifdef DEBUG_TRACES - std::cout << " | --------------------------------------------------\n"; - std::cout << " | Tau "; - for (auto vertex : simplex_vertex_range(f_boundary)) { - std::cout << vertex << " "; - } - std::cout << "is a face of Sigma\n"; - std::cout << " | isnan(filtration(Tau)=" << isnan(filtration(f_boundary)) << std::endl; -#endif // DEBUG_TRACES + DBG("------------- TAU -------------"); + DBGCONT(simplex_vertex_range(f_boundary)); + DBG("is a face of Sigma"); + DBGMSG("isnan(filtration(Tau)=", isnan(filtration(f_boundary))); // ### If filt(Tau) is not NaN if (!isnan(filtration(f_boundary))) { // ### filt(Tau) = fmin(filt(Tau), filt(Sigma)) Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex)); assign_filtration(f_boundary, alpha_complex_filtration); -#ifdef DEBUG_TRACES - std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl; -#endif // DEBUG_TRACES + DBGMSG("filt(Tau) = fmin(filt(Tau), filt(Sigma)) = ", filtration(f_boundary)); // ### Else } else { // No need to compute is_gabriel for dimension <= 2 @@ -344,17 +331,14 @@ class Alpha_complex : public Simplex_tree<> { Is_Gabriel is_gabriel = kernel_.side_of_bounded_sphere_d_object(); bool is_gab = is_gabriel(pointVector.begin(), pointVector.end(), point_for_gabriel) != CGAL::ON_BOUNDED_SIDE; -#ifdef DEBUG_TRACES - std::cout << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << vertexForGabriel << std::endl; -#endif // DEBUG_TRACES + DBGMSG("Tau is_gabriel(Sigma)=", is_gab); + DBGMSG(" - vertexForGabriel=", vertexForGabriel); // ### If Tau is not Gabriel of Sigma if (false == is_gab) { // ### filt(Tau) = filt(Sigma) Filtration_value alpha_complex_filtration = filtration(f_simplex); assign_filtration(f_boundary, alpha_complex_filtration); -#ifdef DEBUG_TRACES - std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl; -#endif // DEBUG_TRACES + DBGMSG("filt(Tau) = filt(Sigma) = ", filtration(f_boundary)); } } } diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index 847581aa..fa24e1b1 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -14,10 +14,6 @@ if(CGAL_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) include_directories (BEFORE "../../include") - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp ) target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index cd7f4991..0f946e3b 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -3,15 +3,22 @@ project(GUDHI) include("CMakeGUDHIVersion.txt") +if (NOT CMAKE_BUILD_TYPE) + # Set default build type to Release + set(CMAKE_BUILD_TYPE "Release") +endif() + +if (CMAKE_BUILD_TYPE MATCHES Debug) + # For programs to be more verbose + add_definitions(-DNDEBUG) +endif() + enable_testing() list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/") find_package(Boost REQUIRED COMPONENTS system filesystem program_options chrono timer REQUIRED) -if (NOT CMAKE_BUILD_TYPE) - set(CMAKE_BUILD_TYPE "Release") -endif() if(MSVC) SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018") else() diff --git a/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h b/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h index 919df243..250bba27 100644 --- a/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h +++ b/src/Contraction/include/gudhi/Contraction/policies/Link_condition_valid_contraction.h @@ -23,8 +23,8 @@ #ifndef CONTRACTION_POLICIES_LINK_CONDITION_VALID_CONTRACTION_H_ #define CONTRACTION_POLICIES_LINK_CONDITION_VALID_CONTRACTION_H_ -#include #include +#include namespace Gudhi { diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index 349bb7d8..011ca9bd 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -30,7 +30,7 @@ #include #include #include -#include +#include namespace Gudhi { diff --git a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h index 2759b540..47d798c0 100644 --- a/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h +++ b/src/Contraction/include/gudhi/Skeleton_blocker_contractor.h @@ -37,7 +37,7 @@ #include #include -#include +#include #include diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 8c1beaef..dc8591fc 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -30,6 +30,7 @@ #include #include +#include #include #include @@ -39,7 +40,8 @@ #include #include #include // for greater<> -#include // for numeric_limits infinity +#include +#include // Inf namespace Gudhi { /** \defgroup simplex_tree Filtered Complexes @@ -717,7 +719,7 @@ class Simplex_tree { } else if (the_simplex.size() == 1) { // When reaching the end of recursivity, vector of simplices shall be empty and filled on back recursive if ((to_be_inserted.size() != 0) || (to_be_propagated.size() != 0)) { - std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Error vector not empty" << std::endl; + std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Error vector not empty"; exit(-1); } std::vector first_simplex(1, the_simplex.back()); @@ -726,7 +728,7 @@ class Simplex_tree { insert_result = insert_vertex_vector(first_simplex, filtration); } else { - std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Recursivity error" << std::endl; + std::cerr << "Simplex_tree::rec_insert_simplex_and_subfaces - Recursivity error"; exit(-1); } return insert_result; @@ -1099,22 +1101,23 @@ class Simplex_tree { os << filtration(sh) << " \n"; } } - + public: /** \brief Browse the simplex tree to ensure the filtration is not decreasing. - * @return The filtration modification information in order to trigger initialize_filtration. - * \warning initialize_filtration is launched again in case of filtration modification change. + * The simplex tree is browsed starting from the root until the leaf, and the filtration values are set with their + * parent value (increased), in case the values are decreasing. + * @return The filtration modification information. + * \warning Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()` + * function is not launching `initialize_filtration()` but returns the filtration modification information. If the + * complex has changed , please call `initialize_filtration()` to recompute it. */ bool make_filtration_non_decreasing() { bool modified = false; for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh) { if (has_children(sh)) { - modified = modified || rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration()); + modified |= rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration()); } } - if (modified) { - initialize_filtration(); - } return modified; } @@ -1134,7 +1137,7 @@ class Simplex_tree { sh->second.assign_filtration(upper_filtration); } if (has_children(sh)) { - modified = modified || rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration()); + modified |= rec_make_filtration_non_decreasing(sh->second.children(), sh->second.filtration()); } } // Make the modified information to be traced by upper call @@ -1150,8 +1153,8 @@ class Simplex_tree { * call `initialize_filtration()` to recompute it. */ void prune_above_filtration(Filtration_value filtration) { - threshold_ = filtration; - if (filtration != std::numeric_limits::infinity()) { + if (filtration < threshold_) { + threshold_ = filtration; // Initialize filtration_vect_ if required if (filtration_vect_.empty()) { initialize_filtration(); @@ -1168,16 +1171,15 @@ class Simplex_tree { } } - private: /** \brief Remove a maximal simplex. * @param[in] sh Simplex handle on the maximal simplex to remove. - * \warning Exception std::invalid_argument is thrown in sh has children. + * \warning In debug mode, the exception std::invalid_argument is thrown if sh has children. */ void remove_maximal_simplex(Simplex_handle sh) { - // Guarantee the simplex is maximal - if (has_children(sh)) { - throw std::invalid_argument ("Simplex_tree::remove_maximal_simplex - argument is not a maximal simplex"); - } + // Guarantee the simplex has no children + GUDHI_CHECK(has_children(sh), + std::invalid_argument ("Simplex_tree::remove_maximal_simplex - argument is not a maximal simplex")); + // Simplex is a leaf, it means the child is the Siblings owning the leaf. Siblings* child = sh->second.children(); if (child->size() > 1) { diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h index 3be480fd..20df93eb 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h @@ -31,7 +31,7 @@ #include #include -#include // xxx +#include namespace Gudhi { diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h index b33b9606..1b1fe3f0 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/Skeleton_blocker_sub_complex.h @@ -25,7 +25,7 @@ #include #include -#include +#include #include #include diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h index 4d71b3f5..27411fc1 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker/iterators/Skeleton_blockers_simplices_iterators.h @@ -25,7 +25,7 @@ #include #include #include -#include +#include #include diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h index d26d12b0..dc2d9e29 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_complex.h @@ -33,7 +33,7 @@ #include #include -#include +#include #include #include diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h index b8395251..3725b7a2 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_geometric_complex.h @@ -22,9 +22,9 @@ #ifndef SKELETON_BLOCKER_GEOMETRIC_COMPLEX_H_ #define SKELETON_BLOCKER_GEOMETRIC_COMPLEX_H_ -#include #include #include +#include namespace Gudhi { diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h index 95d8fa97..3d0039a1 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker_link_complex.h @@ -22,8 +22,8 @@ #ifndef SKELETON_BLOCKER_LINK_COMPLEX_H_ #define SKELETON_BLOCKER_LINK_COMPLEX_H_ -#include #include +#include namespace Gudhi { diff --git a/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp b/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp index 319e3c43..d56a5c91 100644 --- a/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp +++ b/src/Skeleton_blocker/test/TestSkeletonBlockerComplex.cpp @@ -24,7 +24,7 @@ #include #include #include -#include "gudhi/Utils.h" +#include "gudhi/Debug_utils.h" #include "gudhi/Test.h" #include "gudhi/Skeleton_blocker.h" //#include "gudhi/Skeleton_blocker_link_complex.h" diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp index 75e4fafb..12accd10 100644 --- a/src/common/example/Delaunay_triangulation_off_rw.cpp +++ b/src/common/example/Delaunay_triangulation_off_rw.cpp @@ -24,6 +24,11 @@ int main(int argc, char **argv) { usage(argv[0]); } + +#ifdef GUDHI_NDEBUG + std::cout << "pouet pouet !!" << std::endl; +#endif + std::string offInputFile(argv[1]); // Read the OFF file (input file name given as parameter) and triangulates points Gudhi::Delaunay_triangulation_off_reader off_reader(offInputFile); diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h new file mode 100644 index 00000000..c479d435 --- /dev/null +++ b/src/common/include/gudhi/Debug_utils.h @@ -0,0 +1,53 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): David Salinas + * + * Copyright (C) 2014 INRIA Sophia Antipolis-Mediterranee (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#ifndef DEBUG_UTILS_H_ +#define DEBUG_UTILS_H_ + +#include + +#ifdef NDEBUG + // GUDHI_NDEBUG is the Gudhi official flag for debug mode. + #define GUDHI_NDEBUG +#endif + +#define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl + +#ifdef GUDHI_NDEBUG + #define DBG(a) std::cout << "DBG: " << (a) << std::endl + #define DBGMSG(a, b) std::cout << "DBG: " << a << b << std::endl + #define DBGVALUE(a) std::cout << "DBG: " << #a << ": " << a << std::endl + #define DBGCONT(a) std::cout << "DBG: container " << #a << " -> "; for (auto x : a) std::cout << x << ","; std::cout << std::endl +#else + #define DBG(a) (void) 0 + #define DBGMSG(a, b) (void) 0 + #define DBGVALUE(a) (void) 0 + #define DBGCONT(a) (void) 0 +#endif + +// GUDHI_CHECK throw an exception on condition in debug mode, but does nothing in release mode +#ifdef GUDHI_NDEBUG + #define GUDHI_CHECK(cond, excpt) if (cond) throw excpt +#else + #define GUDHI_CHECK(cond, excpt) (void) 0 +#endif + +#endif // DEBUG_UTILS_H_ diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index 4d26bb71..dfa70e40 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -22,6 +22,8 @@ #ifndef DELAUNAY_TRIANGULATION_OFF_IO_H_ #define DELAUNAY_TRIANGULATION_OFF_IO_H_ +#include + #include #include #include @@ -64,10 +66,10 @@ class Delaunay_triangulation_off_visitor_reader { * @param[in] num_edges number of edges in the OFF file (not used). */ void init(int dim, int num_vertices, int num_faces, int num_edges) { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << - num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; -#endif // DEBUG_TRACES + DBGMSG("Delaunay_triangulation_off_visitor_reader::init - dim=", dim); + DBGMSG(" - num_vertices=", num_vertices); + DBGMSG(" - num_faces=", num_faces); + DBGMSG(" - num_edges=", num_edges); if (num_faces > 0) { std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " << "file for Delaunay triangulation - faces are computed." << std::endl; @@ -88,13 +90,8 @@ class Delaunay_triangulation_off_visitor_reader { * @param[in] point vector of vertex coordinates. */ void point(const std::vector& point) { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::point "; - for (auto coordinate : point) { - std::cout << coordinate << " | "; - } - std::cout << std::endl; -#endif // DEBUG_TRACES + DBG("Delaunay_triangulation_off_visitor_reader::point"); + DBGCONT(point); complex_->insert(Point(point.size(), point.begin(), point.end())); } diff --git a/src/common/include/gudhi/Utils.h b/src/common/include/gudhi/Utils.h deleted file mode 100644 index 43916f11..00000000 --- a/src/common/include/gudhi/Utils.h +++ /dev/null @@ -1,46 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): David Salinas - * - * Copyright (C) 2014 INRIA Sophia Antipolis-Mediterranee (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ -#ifndef UTILS_H_ -#define UTILS_H_ - - -#define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl - -// #define DBG_VERBOSE -#ifdef DBG_VERBOSE -#define DBG(a) std::cerr << "DBG: " << (a) << std::endl -#define DBGMSG(a, b) std::cerr << "DBG: " << a << b << std::endl -#define DBGVALUE(a) std::cerr << "DBG: " << #a << ": " << a << std::endl -#define DBGCONT(a) std::cerr << "DBG: container " << #a << " -> "; for (auto x : a) std::cerr << x << ","; std::cerr << -std::endl -#else -// #define DBG(a) a -// #define DBGMSG(a,b) b -// #define DBGVALUE(a) a -// #define DBGCONT(a) a -#define DBG(a) -#define DBGMSG(a, b) -#define DBGVALUE(a) -#define DBGCONT(a) -#endif - -#endif // UTILS_H_ -- cgit v1.2.3 From fe78c077b00e91b3d316f1d64541008a615664e7 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 18 Nov 2015 12:03:43 +0000 Subject: rev911 rollback code review fix fix prune_above_filtration removing every simplices git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@927 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 1660da972bd5198214a0f0384be9be66a5312e74 --- CMakeLists.txt | 5 -- src/Alpha_complex/example/CMakeLists.txt | 6 +- src/Alpha_complex/include/gudhi/Alpha_complex.h | 67 ++++++++++++++++------ src/CMakeLists.txt | 5 -- src/Simplex_tree/include/gudhi/Simplex_tree.h | 10 ++-- src/common/example/CMakeLists.txt | 4 ++ .../example/Delaunay_triangulation_off_rw.cpp | 5 -- src/common/include/gudhi/Debug_utils.h | 24 ++++---- .../include/gudhi/Delaunay_triangulation_off_io.h | 19 +++--- 9 files changed, 86 insertions(+), 59 deletions(-) (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index b7fb4540..197b6f95 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -8,11 +8,6 @@ if (NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "Release") endif() -if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DNDEBUG) -endif() - enable_testing() set(CMAKE_PREFIX_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/") diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 47e42b72..10b87f04 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -1,8 +1,6 @@ cmake_minimum_required(VERSION 2.6) project(GUDHIAlphaShapesExample) -add_executable ( flat flat.cpp ) - # need CGAL 4.7 # cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../.. if(CGAL_FOUND) @@ -29,6 +27,10 @@ if(CGAL_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) + if (CMAKE_BUILD_TYPE MATCHES Debug) + # For programs to be more verbose + add_definitions(-DDEBUG_TRACES) + endif() add_executable ( alphaoffreader Alpha_complex_from_off.cpp ) target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 2cc93a0a..6adfa2e6 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -114,7 +114,7 @@ class Alpha_complex : public Simplex_tree<> { : triangulation_(nullptr) { Gudhi::Delaunay_triangulation_off_reader off_reader(off_file_name); if (!off_reader.is_valid()) { - std::cerr << "Alpha_complex - Unable to read file " << off_file_name; + std::cerr << "Alpha_complex - Unable to read file " << off_file_name << "\n"; exit(-1); // ----- >> } triangulation_ = off_reader.get_complex(); @@ -161,7 +161,7 @@ class Alpha_complex : public Simplex_tree<> { size_type inserted = triangulation_->insert(first, last); if (inserted != (last -first)) { - std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first); + std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << "\n"; exit(-1); // ----- >> } init(max_alpha_square); @@ -198,19 +198,19 @@ class Alpha_complex : public Simplex_tree<> { */ void init(Filtration_value max_alpha_square) { if (triangulation_ == nullptr) { - std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation"; + std::cerr << "Alpha_complex init - Cannot init from a NULL triangulation\n"; return; // ----- >> } if (triangulation_->number_of_vertices() < 1) { - std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices"; + std::cerr << "Alpha_complex init - Cannot init from a triangulation without vertices\n"; return; // ----- >> } if (triangulation_->maximal_dimension() < 1) { - std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation"; + std::cerr << "Alpha_complex init - Cannot init from a zero-dimension triangulation\n"; return; // ----- >> } if (num_vertices() > 0) { - std::cerr << "Alpha_complex init - Cannot init twice"; + std::cerr << "Alpha_complex init - Cannot init twice\n"; return; // ----- >> } @@ -225,8 +225,9 @@ class Alpha_complex : public Simplex_tree<> { // Loop on triangulation vertices list for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) { if (!triangulation_->is_infinite(*vit)) { - DBGMSG("Vertex insertion - ", vertex_handle); - DBGMSG(" -> ", vit->point()); +#ifdef DEBUG_TRACES + std::cout << "Vertex insertion - " << vertex_handle << " -> " << vit->point() << std::endl; +#endif // DEBUG_TRACES vertex_iterator_to_handle_.emplace(vit, vertex_handle); vertex_handle_to_iterator_.push_back(vit); @@ -239,12 +240,21 @@ class Alpha_complex : public Simplex_tree<> { // Simplex_tree construction from loop on triangulation finite full cells list for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) { Vector_vertex vertexVector; +#ifdef DEBUG_TRACES + std::cout << "Simplex_tree insertion "; +#endif // DEBUG_TRACES for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { if (*vit != nullptr) { +#ifdef DEBUG_TRACES + std::cout << " " << vertex_iterator_to_handle_[*vit]; +#endif // DEBUG_TRACES // Vector of vertex construction for simplex_tree structure vertexVector.push_back(vertex_iterator_to_handle_[*vit]); } } +#ifdef DEBUG_TRACES + std::cout << std::endl; +#endif // DEBUG_TRACES // Insert each simplex and its subfaces in the simplex tree - filtration is NaN Simplex_result insert_result = insert_simplex_and_subfaces(vertexVector, std::numeric_limits::quiet_NaN()); @@ -259,11 +269,18 @@ class Alpha_complex : public Simplex_tree<> { int f_simplex_dim = dimension(f_simplex); if (decr_dim == f_simplex_dim) { Vector_of_CGAL_points pointVector; - DBGMSG("Sigma of dim ", decr_dim); +#ifdef DEBUG_TRACES + std::cout << "Sigma of dim " << decr_dim << " is"; +#endif // DEBUG_TRACES for (auto vertex : simplex_vertex_range(f_simplex)) { pointVector.push_back(get_point(vertex)); +#ifdef DEBUG_TRACES + std::cout << " " << vertex; +#endif // DEBUG_TRACES } - DBGCONT(simplex_vertex_range(f_simplex)); +#ifdef DEBUG_TRACES + std::cout << std::endl; +#endif // DEBUG_TRACES // ### If filt(Sigma) is NaN : filt(Sigma) = alpha(Sigma) if (isnan(filtration(f_simplex))) { Filtration_value alpha_complex_filtration = 0.0; @@ -275,7 +292,9 @@ class Alpha_complex : public Simplex_tree<> { alpha_complex_filtration = squared_radius(pointVector.begin(), pointVector.end()); } assign_filtration(f_simplex, alpha_complex_filtration); - DBGMSG("filt(Sigma) is NaN : filt(Sigma) =", filtration(f_simplex)); +#ifdef DEBUG_TRACES + std::cout << "filt(Sigma) is NaN : filt(Sigma) =" << filtration(f_simplex) << std::endl; +#endif // DEBUG_TRACES } propagate_alpha_filtration(f_simplex, decr_dim); } @@ -295,16 +314,23 @@ class Alpha_complex : public Simplex_tree<> { void propagate_alpha_filtration(Simplex_handle f_simplex, int decr_dim) { // ### Foreach Tau face of Sigma for (auto f_boundary : boundary_simplex_range(f_simplex)) { - DBG("------------- TAU -------------"); - DBGCONT(simplex_vertex_range(f_boundary)); - DBG("is a face of Sigma"); - DBGMSG("isnan(filtration(Tau)=", isnan(filtration(f_boundary))); +#ifdef DEBUG_TRACES + std::cout << " | --------------------------------------------------\n"; + std::cout << " | Tau "; + for (auto vertex : simplex_vertex_range(f_boundary)) { + std::cout << vertex << " "; + } + std::cout << "is a face of Sigma\n"; + std::cout << " | isnan(filtration(Tau)=" << isnan(filtration(f_boundary)) << std::endl; +#endif // DEBUG_TRACES // ### If filt(Tau) is not NaN if (!isnan(filtration(f_boundary))) { // ### filt(Tau) = fmin(filt(Tau), filt(Sigma)) Filtration_value alpha_complex_filtration = fmin(filtration(f_boundary), filtration(f_simplex)); assign_filtration(f_boundary, alpha_complex_filtration); - DBGMSG("filt(Tau) = fmin(filt(Tau), filt(Sigma)) = ", filtration(f_boundary)); +#ifdef DEBUG_TRACES + std::cout << " | filt(Tau) = fmin(filt(Tau), filt(Sigma)) = " << filtration(f_boundary) << std::endl; +#endif // DEBUG_TRACES // ### Else } else { // No need to compute is_gabriel for dimension <= 2 @@ -331,14 +357,17 @@ class Alpha_complex : public Simplex_tree<> { Is_Gabriel is_gabriel = kernel_.side_of_bounded_sphere_d_object(); bool is_gab = is_gabriel(pointVector.begin(), pointVector.end(), point_for_gabriel) != CGAL::ON_BOUNDED_SIDE; - DBGMSG("Tau is_gabriel(Sigma)=", is_gab); - DBGMSG(" - vertexForGabriel=", vertexForGabriel); +#ifdef DEBUG_TRACES + std::cout << " | Tau is_gabriel(Sigma)=" << is_gab << " - vertexForGabriel=" << vertexForGabriel << std::endl; +#endif // DEBUG_TRACES // ### If Tau is not Gabriel of Sigma if (false == is_gab) { // ### filt(Tau) = filt(Sigma) Filtration_value alpha_complex_filtration = filtration(f_simplex); assign_filtration(f_boundary, alpha_complex_filtration); - DBGMSG("filt(Tau) = filt(Sigma) = ", filtration(f_boundary)); +#ifdef DEBUG_TRACES + std::cout << " | filt(Tau) = filt(Sigma) = " << filtration(f_boundary) << std::endl; +#endif // DEBUG_TRACES } } } diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 0f946e3b..9d1eac80 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -8,11 +8,6 @@ if (NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "Release") endif() -if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DNDEBUG) -endif() - enable_testing() list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/") diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index dc8591fc..9b4d6af6 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1173,17 +1173,19 @@ class Simplex_tree { /** \brief Remove a maximal simplex. * @param[in] sh Simplex handle on the maximal simplex to remove. + * \pre Please check the simplex has no coface before removing it. * \warning In debug mode, the exception std::invalid_argument is thrown if sh has children. */ void remove_maximal_simplex(Simplex_handle sh) { // Guarantee the simplex has no children GUDHI_CHECK(has_children(sh), - std::invalid_argument ("Simplex_tree::remove_maximal_simplex - argument is not a maximal simplex")); - - // Simplex is a leaf, it means the child is the Siblings owning the leaf. + std::invalid_argument ("Simplex_tree::remove_maximal_simplex - argument has children")); + + // Simplex is a leaf, it means the child is the Siblings owning the leaf Siblings* child = sh->second.children(); - if (child->size() > 1) { + if ((child->size() > 1) || (child == root())) { // Not alone, just remove it from members + // Special case when child is the root of the simplex tree, just remove it from members child->members().erase(sh->first); } else { // Sibling is emptied : must be deleted, and its parent must point on his own Sibling diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index d29e31e7..089f0c04 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -26,6 +26,10 @@ if(CGAL_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) + if (CMAKE_BUILD_TYPE MATCHES Debug) + # For programs to be more verbose + add_definitions(-DDEBUG_TRACES) + endif() add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp ) target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp index 12accd10..75e4fafb 100644 --- a/src/common/example/Delaunay_triangulation_off_rw.cpp +++ b/src/common/example/Delaunay_triangulation_off_rw.cpp @@ -24,11 +24,6 @@ int main(int argc, char **argv) { usage(argv[0]); } - -#ifdef GUDHI_NDEBUG - std::cout << "pouet pouet !!" << std::endl; -#endif - std::string offInputFile(argv[1]); // Read the OFF file (input file name given as parameter) and triangulates points Gudhi::Delaunay_triangulation_off_reader off_reader(offInputFile); diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h index c479d435..48d61fef 100644 --- a/src/common/include/gudhi/Debug_utils.h +++ b/src/common/include/gudhi/Debug_utils.h @@ -24,14 +24,23 @@ #include -#ifdef NDEBUG - // GUDHI_NDEBUG is the Gudhi official flag for debug mode. - #define GUDHI_NDEBUG +#ifndef NDEBUG + // GUDHI_DEBUG is the Gudhi official flag for debug mode. + #define GUDHI_DEBUG +#endif + +// GUDHI_CHECK throw an exception on condition in debug mode, but does nothing in release mode +// Could assert in release mode, but cmake sets NDEBUG (for "NO DEBUG") in this mode, means assert does nothing. +#ifdef GUDHI_DEBUG + #define GUDHI_CHECK(cond, excpt) if (cond) throw excpt +#else + #define GUDHI_CHECK(cond, excpt) (void) 0 #endif #define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl -#ifdef GUDHI_NDEBUG +// #define DBG_VERBOSE +#ifdef DBG_VERBOSE #define DBG(a) std::cout << "DBG: " << (a) << std::endl #define DBGMSG(a, b) std::cout << "DBG: " << a << b << std::endl #define DBGVALUE(a) std::cout << "DBG: " << #a << ": " << a << std::endl @@ -43,11 +52,4 @@ #define DBGCONT(a) (void) 0 #endif -// GUDHI_CHECK throw an exception on condition in debug mode, but does nothing in release mode -#ifdef GUDHI_NDEBUG - #define GUDHI_CHECK(cond, excpt) if (cond) throw excpt -#else - #define GUDHI_CHECK(cond, excpt) (void) 0 -#endif - #endif // DEBUG_UTILS_H_ diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index dfa70e40..4d26bb71 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -22,8 +22,6 @@ #ifndef DELAUNAY_TRIANGULATION_OFF_IO_H_ #define DELAUNAY_TRIANGULATION_OFF_IO_H_ -#include - #include #include #include @@ -66,10 +64,10 @@ class Delaunay_triangulation_off_visitor_reader { * @param[in] num_edges number of edges in the OFF file (not used). */ void init(int dim, int num_vertices, int num_faces, int num_edges) { - DBGMSG("Delaunay_triangulation_off_visitor_reader::init - dim=", dim); - DBGMSG(" - num_vertices=", num_vertices); - DBGMSG(" - num_faces=", num_faces); - DBGMSG(" - num_edges=", num_edges); +#ifdef DEBUG_TRACES + std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << + num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; +#endif // DEBUG_TRACES if (num_faces > 0) { std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " << "file for Delaunay triangulation - faces are computed." << std::endl; @@ -90,8 +88,13 @@ class Delaunay_triangulation_off_visitor_reader { * @param[in] point vector of vertex coordinates. */ void point(const std::vector& point) { - DBG("Delaunay_triangulation_off_visitor_reader::point"); - DBGCONT(point); +#ifdef DEBUG_TRACES + std::cout << "Delaunay_triangulation_off_visitor_reader::point "; + for (auto coordinate : point) { + std::cout << coordinate << " | "; + } + std::cout << std::endl; +#endif // DEBUG_TRACES complex_->insert(Point(point.size(), point.begin(), point.end())); } -- cgit v1.2.3 From 061e43a2a48525bc5a69482a1ea80f20ff505e55 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 20 Nov 2015 16:13:33 +0000 Subject: Bug on gudhi installation doc page git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@928 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 7fb83a82e11210bce9842a80779455681d6508f3 --- src/common/doc/main_page.h | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 689e7a4d..41b8ba1e 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -56,6 +56,7 @@ * The following example requires the GNU Multiple Precision Arithmetic * Library (GMP) and will not be built if GMP is not installed: * \li Persistent_cohomology/rips_multifield_persistence + * * Having GMP version 4.2 or higher installed is recommended. * * \subsection cgal CGAL: @@ -72,19 +73,15 @@ * * \subsection demos Demos and examples * To build the demos and libraries, run the following commands in a terminal: - * \verbatim - * cd /path-to-gudhi/ - * mkdir build - * cd build/ - * cmake .. - * make - * \endverbatim +\verbatim cd /path-to-gudhi/ +mkdir build +cd build/ +cmake .. +make \endverbatim * * \subsection testsuites Test suites * To test your build, run the following command in a terminal: - * \verbatim - * make test - * \endverbatim + * \verbatim make test \endverbatim * * \section Contributions Bug reports and contributions * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: -- cgit v1.2.3 From c79ddda239336378d50255ef99ea6c34ceefbb47 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 27 Nov 2015 17:05:22 +0000 Subject: After doc review git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@931 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 578da0e6fff453560e666e8f00147f9e10cb6de6 --- CMakeLists.txt | 6 + src/Alpha_complex/doc/Intro_alpha_complex.h | 91 +++- .../doc/alpha_complex_doc_alpha_shape.ipe | 482 --------------------- .../doc/alpha_complex_doc_alpha_shape.png | Bin 62759 -> 0 bytes .../doc/alpha_complex_representation.ipe | 321 ++++++++++++++ .../example/Alpha_complex_from_off.cpp | 16 +- .../example/Alpha_complex_from_points.cpp | 36 +- src/Alpha_complex/example/CMakeLists.txt | 4 - .../example/alphaoffreader_for_doc.txt | 27 -- .../example/alphaoffreader_for_doc_32.txt | 22 + .../example/alphaoffreader_for_doc_60.txt | 27 ++ src/Alpha_complex/include/gudhi/Alpha_complex.h | 2 + src/CMakeLists.txt | 6 + src/Doxyfile | 3 +- src/GudhUI/CMakeLists.txt | 4 - src/Persistent_cohomology/example/CMakeLists.txt | 8 - src/common/doc/main_page.h | 2 +- src/common/example/CMakeLists.txt | 4 - 18 files changed, 486 insertions(+), 575 deletions(-) delete mode 100644 src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe delete mode 100644 src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png create mode 100644 src/Alpha_complex/doc/alpha_complex_representation.ipe delete mode 100644 src/Alpha_complex/example/alphaoffreader_for_doc.txt create mode 100644 src/Alpha_complex/example/alphaoffreader_for_doc_32.txt create mode 100644 src/Alpha_complex/example/alphaoffreader_for_doc_60.txt (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index 197b6f95..d42f7af7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -69,6 +69,12 @@ else() message(STATUS "boost include dirs:" ${Boost_INCLUDE_DIRS}) message(STATUS "boost library dirs:" ${Boost_LIBRARY_DIRS}) + if (DEBUG_TRACES) + # For programs to be more verbose + message(STATUS "DEBUG_TRACES are activated") + add_definitions(-DDEBUG_TRACES) + endif() + include_directories(src/common/include/) include_directories(src/Alpha_complex/include/) include_directories(src/Bottleneck/include/) diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index 1fb8fdee..685a4c2f 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -36,48 +36,58 @@ namespace alphacomplex { * * \section definition Definition * - * Alpha_complex is a Simplex_tree constructed from each finite cell of a Delaunay Triangulation. + * Alpha_complex is a simplicial complex + * constructed from each finite cell of a Delaunay Triangulation. * * The filtration value of each simplex is computed from the alpha square value of the simplex if it is Gabriel or * from the alpha value of the simplex coface that makes the simplex not Gabriel. * - * Please refer to \cite AlphaShapesDefinition for a more complete alpha complex definition. + * All simplices that have a filtration value strictly greater than a given alpha square value are not inserted into + * the simplex. * - * Alpha complex are interesting because it looks like an \ref alpha-shape "Alpha shape" as described in - * \cite AlphaShapesIntroduction (an alpha complex concept vulgarization). + * \image html "alpha_complex_representation.png" "Alpha simplicial complex representation" * - * \section example Example + * Alpha_complex is constructing a `Simplex_tree` using Delaunay Triangulation + * \cite cgal:hdj-t-15b from CGAL (the Computational Geometry + * Algorithms Library \cite cgal:eb-15b). * - * This example loads points from an OFF file, builds the Delaunay triangulation from the points, and finally - * initialize the alpha complex with it. + * The complex is a template class requiring a dD Geometry Kernel + * \cite cgal:s-gkd-15b from CGAL as template. + * + * \section pointsexample Example from points + * + * This example builds the Delaunay triangulation from the given points in a 2D static kernel, and initializes the + * alpha complex with it. * * Then, it is asked to display information about the alpha complex. * - * \include Alpha_complex_from_off.cpp + * \include Alpha_complex_from_points.cpp * * When launching: * - * \code $> ./alphaoffreader ../../data/points/alphacomplexdoc.off 60.0 + * \code $> ./alphapoints 60.0 * \endcode * * the program output is: * - * \include alphaoffreader_for_doc.txt + * \include alphaoffreader_for_doc_60.txt * * \section algorithm Algorithm * - * Data structure + * \subsection datastructure Data structure * * In order to build the alpha complex, first, a Simplex tree is build from the cells of a Delaunay Triangulation. * (The filtration value is set to NaN, which stands for unknown value): * \image html "alpha_complex_doc.png" "Simplex tree structure construction example" * - * Filtration value computation algorithm - * + * \subsection filtrationcomputation Filtration value computation algorithm + * * \f{algorithm}{ * \caption{Filtration value computation algorithm}\label{alpha} * \begin{algorithmic} - * \For{i : dimension $\rightarrow$ 1} + * \For{i : dimension $\rightarrow$ 0} * \ForAll{$\sigma$ of dimension i} * \If {filtration($\sigma$) is NaN} * \State filtration($\sigma$) = $\alpha^2(\sigma)$ @@ -93,25 +103,58 @@ namespace alphacomplex { * \EndFor * \EndFor * \EndFor + * \State make\_filtration\_non\_decreasing() + * \State prune\_above\_filtration() * \end{algorithmic} * \f} * - * From the example above, it means the algorithm will look into each triangle ([1,2,3], [2,3,4], [1,3,5], ...), - * will compute the filtration value of the triangle, and then will propagate the filtration value as described + * \subsubsection dimension2 Dimension 2 + * + * From the example above, it means the algorithm looks into each triangle ([1,2,3], [2,3,4], [1,3,5], ...), + * computes the filtration value of the triangle, and then propagates the filtration value as described * here : * \image html "alpha_complex_doc_135.png" "Filtration value propagation example" - * Then, the algorithm will look into each edge ([1,2], [2,3], [1,3], ...), - * will compute the filtration value of the edge (in this case, propagation will have no effect). * - * Finally, the algorithm will look into each vertex ([1], [2], [3], [4], [5], [6] and [7]), - * will set the filtration value (0 in case of a vertex - propagation will have no effect). + * \subsubsection dimension1 Dimension 1 + * + * Then, the algorithm looks into each edge ([1,2], [2,3], [1,3], ...), + * computes the filtration value of the edge (in this case, propagation will have no effect). + * + * \subsubsection dimension0 Dimension 0 + * + * Finally, the algorithm looks into each vertex ([1], [2], [3], [4], [5], [6] and [7]) and + * sets the filtration value (0 in case of a vertex - propagation will have no effect). + * + * \subsubsection nondecreasing Non decreasing filtration values + * + * As Alpha square value computed from CGAL is an approximation, we have to make filtration non decreasing while + * increasing the dimension for our simplicial complex to be valid (cf. + * `Simplex_tree::make_filtration_non_decreasing()`). + * + * \subsubsection pruneabove Prune above given filtration value + * + * The simplex tree is pruned from the given maximum alpha square value (cf. `Simplex_tree::prune_above_filtration()`). + * In this example, the value is given by the user as argument of the program. * - * \section alpha-shape Alpha shape * - * In the example above, the alpha shape of \f$\alpha^2_{63} < \alpha^2 < \alpha^2_{62}\f$ is the alpha complex where the - * \f$\alpha^2_{63} <\f$ filtration value \f$< \alpha^2_{62}\f$ as described in \cite AlphaShapesIntroduction + * \section offexample Example from OFF file + * + * This example builds the Delaunay triangulation in a dynamic kernel, and initializes the alpha complex with it. + * + * + * Then, it is asked to display information about the alpha complex. + * + * \include Alpha_complex_from_off.cpp + * + * When launching: + * + * \code $> ./alphaoffreader ../../data/points/alphacomplexdoc.off 32.0 + * \endcode + * + * the program output is: + * + * \include alphaoffreader_for_doc_32.txt * - * \image html "alpha_complex_doc_alpha_shape.png" "Alpha shape example" * \copyright GNU General Public License v3. * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ diff --git a/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe b/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe deleted file mode 100644 index 7563cf3d..00000000 --- a/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.ipe +++ /dev/null @@ -1,482 +0,0 @@ - - - - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0.6 0 0 0.6 0 0 e -0.4 0 0 0.4 0 0 e - - - - -0.6 0 0 0.6 0 0 e - - - - - -0.5 0 0 0.5 0 0 e - - -0.6 0 0 0.6 0 0 e -0.4 0 0 0.4 0 0 e - - - - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h --0.4 -0.4 m -0.4 -0.4 l -0.4 0.4 l --0.4 0.4 l -h - - - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h - - - - - --0.5 -0.5 m -0.5 -0.5 l -0.5 0.5 l --0.5 0.5 l -h - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h --0.4 -0.4 m -0.4 -0.4 l -0.4 0.4 l --0.4 0.4 l -h - - - - - - --0.43 -0.57 m -0.57 0.43 l -0.43 0.57 l --0.57 -0.43 l -h - - --0.43 0.57 m -0.57 -0.43 l -0.43 -0.57 l --0.57 0.43 l -h - - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --0.8 0 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --0.8 0 l --1 -0.333 l -h - - - - --1 0.333 m -0 0 l --1 -0.333 l - - - - -0 0 m --1 0.333 l --1 -0.333 l -h --1 0 m --2 0.333 l --2 -0.333 l -h - - - - -0 0 m --1 0.333 l --1 -0.333 l -h --1 0 m --2 0.333 l --2 -0.333 l -h - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Alpha shape -0 -1 -2 -3 -4 -5 -6 -0 -1 -2 -2 -1 -2 -2 -3 -3 -3 -3 -4 -4 -4 -4 -6 -6 -6 -6 -6 -6 -5 -6 -5 - -4 0 0 4 320 704 e - - -322.919 706.788 m -317.189 701.058 l -317.189 701.203 l - - -317.551 706.934 m -322.629 701.058 l - - -230 680 m -240 670 l - - -230 680 m -240 670 l - - -230 680 m -240 670 l - - -230 680 m -240 670 l - - -230 680 m -220 670 l - - -230 680 m -230 670 l - - -220 660 m -220 650 l - - -230 660 m -230 650 l - - -260 680 m -260 670 l - - -260 660 m -260 650 l - - -300 680 m -300 670 l - - -300 680 m -290 670 l - - -290 660 m -290 650 l - - -300 660 m -300 650 l - - -330 680 m -330 670 l - - -350 680 m -350 670 l - - -350 660 m -350 650 l - - -320 700 m -240 690 l - - -320 700 m -270 690 l - - -320 700 m -310 690 l - - -320 700 m -330 690 l - - -320 700 m -350 690 l - - -320 700 m -380 690 l - - -320 700 m -400 690 l - -Alpha complex structure - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -58.1341 0 0 58.1341 218.925 692.601 e - - -60 710 m -40 660 l - - -40 660 m -130 690 l - - -130 690 m -60 710 l - - -40 660 m -80 580 l - - -80 580 m -130 580 l -130 580 l - - -130 580 m -110 520 l - - -110 520 m -50 530 l -50 530 l -50 530 l - - -50 530 m -80 580 l - - -130 580 m -130 690 l - - - -108.275 743.531 m -166.45 743.531 l - -$\alpha$ -filtration value $> \alpha$ are greyed - -280 660 m -300 680 l - - -280 660 m -300 640 l - - -370 660 m -350 680 l - - -370 660 m -350 640 l - - -290 670 m -360 670 l - - -290 650 m -360 650 l - -equivalent - - diff --git a/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png b/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png deleted file mode 100644 index 2d5f59a3..00000000 Binary files a/src/Alpha_complex/doc/alpha_complex_doc_alpha_shape.png and /dev/null differ diff --git a/src/Alpha_complex/doc/alpha_complex_representation.ipe b/src/Alpha_complex/doc/alpha_complex_representation.ipe new file mode 100644 index 00000000..8687d694 --- /dev/null +++ b/src/Alpha_complex/doc/alpha_complex_representation.ipe @@ -0,0 +1,321 @@ + + + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + +0.6 0 0 0.6 0 0 e + + + + + +0.5 0 0 0.5 0 0 e + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h + + + + + +-0.5 -0.5 m +0.5 -0.5 l +0.5 0.5 l +-0.5 0.5 l +h + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + + + +-0.43 -0.57 m +0.57 0.43 l +0.43 0.57 l +-0.57 -0.43 l +h + + +-0.43 0.57 m +0.57 -0.43 l +0.43 -0.57 l +-0.57 0.43 l +h + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +-1 0.333 m +0 0 l +-1 -0.333 l + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +109.771 601.912 m +159.595 601.797 l +140.058 541.915 l +h + + +79.8776 552.169 m +109.756 601.699 l +139.812 542.209 l +h + + +69.8453 682.419 m +159.925 712.208 l +90.12 732.039 l +h + +Alpha complex +0 +1 +2 +3 +4 +5 +6 + +58.1341 0 0 58.1341 218.925 692.601 e + + +60 710 m +40 660 l + + +40 660 m +130 690 l + + +130 690 m +60 710 l + + +40 660 m +80 580 l + + +80 580 m +130 580 l +130 580 l + + +130 580 m +110 520 l + + +110 520 m +50 530 l +50 530 l +50 530 l + + +50 530 m +80 580 l + + +130 580 m +130 690 l + + + +108.275 743.531 m +166.45 743.531 l + +$\alpha$ + + + + + + +150.038 609.9 m +179.929 549.727 l + + + + + diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index cd6f5a4b..4f381892 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -1,8 +1,6 @@ #include #include -// to construct a Delaunay_triangulation from a OFF file -#include #include void usage(char * const progName) { @@ -35,14 +33,12 @@ int main(int argc, char **argv) { std::cout << "Iterator on alpha complex simplices in the filtration order, with [filtration value]:" << std::endl; for (auto f_simplex : alpha_complex_from_file.filtration_simplex_range()) { - //if (alpha_complex_from_file.filtration(f_simplex) <= alpha_complex_from_file.filtration()) { - std::cout << " ( "; - for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) { - std::cout << vertex << " "; - } - std::cout << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] "; - std::cout << std::endl; - //} + std::cout << " ( "; + for (auto vertex : alpha_complex_from_file.simplex_vertex_range(f_simplex)) { + std::cout << vertex << " "; + } + std::cout << ") -> " << "[" << alpha_complex_from_file.filtration(f_simplex) << "] "; + std::cout << std::endl; } return 0; } diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp index e460f177..62f594d1 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp @@ -6,34 +6,50 @@ #include #include -// to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Delaunay_triangulation_off_io.h" -#include "gudhi/Alpha_complex.h" +#include -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; +typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel; typedef Kernel::Point_d Point; typedef std::vector Vector_of_points; +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " alpha_square_max_value" << std::endl; + std::cerr << " i.e.: " << progName << " 32.0" << std::endl; + exit(-1); // ----- >> +} + int main(int argc, char **argv) { + if (argc != 2) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + double alpha_square_max_value = atof(argv[1]); + // ---------------------------------------------------------------------------- // Init of a list of points // ---------------------------------------------------------------------------- Vector_of_points points; - std::vector coords = { 0.0, 0.0, 0.0, 1.0 }; + std::vector coords = { 1.0, 1.0 }; + points.push_back(Point(coords.begin(), coords.end())); + coords = { 7.0, 0.0 }; + points.push_back(Point(coords.begin(), coords.end())); + coords = { 4.0, 6.0 }; + points.push_back(Point(coords.begin(), coords.end())); + coords = { 9.0, 6.0 }; points.push_back(Point(coords.begin(), coords.end())); - coords = { 0.0, 0.0, 1.0, 0.0 }; + coords = { 0.0, 14.0 }; points.push_back(Point(coords.begin(), coords.end())); - coords = { 0.0, 1.0, 0.0, 0.0 }; + coords = { 2.0, 19.0 }; points.push_back(Point(coords.begin(), coords.end())); - coords = { 1.0, 0.0, 0.0, 0.0 }; + coords = { 9.0, 17.0 }; points.push_back(Point(coords.begin(), coords.end())); // ---------------------------------------------------------------------------- // Init of an alpha complex from the list of points // ---------------------------------------------------------------------------- - double max_alpha_square_value = 1e10; - Gudhi::alphacomplex::Alpha_complex alpha_complex_from_points(points, max_alpha_square_value); + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_points(points, alpha_square_max_value); // ---------------------------------------------------------------------------- // Display information about the alpha complex diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index 10b87f04..33ff6805 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -27,10 +27,6 @@ if(CGAL_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable ( alphaoffreader Alpha_complex_from_off.cpp ) target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc.txt b/src/Alpha_complex/example/alphaoffreader_for_doc.txt deleted file mode 100644 index 71f29a00..00000000 --- a/src/Alpha_complex/example/alphaoffreader_for_doc.txt +++ /dev/null @@ -1,27 +0,0 @@ -Alpha complex is of dimension 2 - 25 simplices - 7 vertices. -Iterator on alpha complex simplices in the filtration order, with [filtration value]: - ( 0 ) -> [0] - ( 1 ) -> [0] - ( 2 ) -> [0] - ( 3 ) -> [0] - ( 4 ) -> [0] - ( 5 ) -> [0] - ( 6 ) -> [0] - ( 3 2 ) -> [6.25] - ( 5 4 ) -> [7.25] - ( 2 0 ) -> [8.5] - ( 1 0 ) -> [9.25] - ( 3 1 ) -> [10] - ( 2 1 ) -> [11.25] - ( 3 2 1 ) -> [12.5] - ( 2 1 0 ) -> [12.9959] - ( 6 5 ) -> [13.25] - ( 4 2 ) -> [20] - ( 6 4 ) -> [22.7367] - ( 6 5 4 ) -> [22.7367] - ( 6 3 ) -> [30.25] - ( 6 2 ) -> [36.5] - ( 6 3 2 ) -> [36.5] - ( 6 4 2 ) -> [37.2449] - ( 4 0 ) -> [59.7107] - ( 4 2 0 ) -> [59.7107] diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt new file mode 100644 index 00000000..553431a9 --- /dev/null +++ b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt @@ -0,0 +1,22 @@ +Alpha complex is of dimension 2 - 20 simplices - 7 vertices. +Iterator on alpha complex simplices in the filtration order, with [filtration value]: + ( 0 ) -> [0] + ( 1 ) -> [0] + ( 2 ) -> [0] + ( 3 ) -> [0] + ( 4 ) -> [0] + ( 5 ) -> [0] + ( 6 ) -> [0] + ( 5 4 ) -> [6.25] + ( 4 1 ) -> [20] + ( 4 2 ) -> [8.5] + ( 6 2 ) -> [9.25] + ( 6 5 ) -> [10] + ( 6 4 ) -> [11.25] + ( 6 5 4 ) -> [12.5] + ( 6 4 2 ) -> [12.9959] + ( 3 0 ) -> [13.25] + ( 4 1 ) -> [20] + ( 1 0 ) -> [22.7367] + ( 3 1 0 ) -> [22.7367] + ( 5 0 ) -> [30.25] diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt new file mode 100644 index 00000000..71f29a00 --- /dev/null +++ b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt @@ -0,0 +1,27 @@ +Alpha complex is of dimension 2 - 25 simplices - 7 vertices. +Iterator on alpha complex simplices in the filtration order, with [filtration value]: + ( 0 ) -> [0] + ( 1 ) -> [0] + ( 2 ) -> [0] + ( 3 ) -> [0] + ( 4 ) -> [0] + ( 5 ) -> [0] + ( 6 ) -> [0] + ( 3 2 ) -> [6.25] + ( 5 4 ) -> [7.25] + ( 2 0 ) -> [8.5] + ( 1 0 ) -> [9.25] + ( 3 1 ) -> [10] + ( 2 1 ) -> [11.25] + ( 3 2 1 ) -> [12.5] + ( 2 1 0 ) -> [12.9959] + ( 6 5 ) -> [13.25] + ( 4 2 ) -> [20] + ( 6 4 ) -> [22.7367] + ( 6 5 4 ) -> [22.7367] + ( 6 3 ) -> [30.25] + ( 6 2 ) -> [36.5] + ( 6 3 2 ) -> [36.5] + ( 6 4 2 ) -> [37.2449] + ( 4 0 ) -> [59.7107] + ( 4 2 0 ) -> [59.7107] diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 6adfa2e6..2dae4028 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -27,6 +27,8 @@ #include #include #include +// to construct a Delaunay_triangulation from a OFF file +#include #include #include // isnan, fmax diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 9d1eac80..3a75527e 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -42,6 +42,12 @@ else() INCLUDE_DIRECTORIES(${Boost_INCLUDE_DIRS}) LINK_DIRECTORIES(${Boost_LIBRARY_DIRS}) + if (DEBUG_TRACES) + message(STATUS "DEBUG_TRACES are activated") + # For programs to be more verbose + add_definitions(-DDEBUG_TRACES) + endif() + #--------------------------------------------------------------------------------------- # Gudhi compilation part include_directories(include) diff --git a/src/Doxyfile b/src/Doxyfile index 81f55e29..9cd73444 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -673,6 +673,7 @@ LAYOUT_FILE = # also \cite for info how to create references. CITE_BIB_FILES = biblio/bibliography.bib \ + biblio/how_to_cite_cgal.bib \ biblio/how_to_cite_gudhi.bib #--------------------------------------------------------------------------- @@ -812,7 +813,7 @@ EXCLUDE_SYMBOLS = # that contain example code fragments that are included (see the \include # command). -EXAMPLE_PATH = biblio/ +EXAMPLE_PATH = biblio/ \ example/common/ \ example/Alpha_complex/ diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt index 5c2afdd0..75adba08 100644 --- a/src/GudhUI/CMakeLists.txt +++ b/src/GudhUI/CMakeLists.txt @@ -102,10 +102,6 @@ if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND ) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable (acp alpha_complex_persistence.cpp) target_link_libraries(acp ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${QT_LIBRARIES}) diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 8caf7d8b..eb4ee3e3 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -35,10 +35,6 @@ if(GMPXX_FOUND AND GMP_FOUND) target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) if(CGAL_FOUND) - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable(alpha_shapes_persistence alpha_shapes_persistence.cpp) target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5) @@ -70,10 +66,6 @@ if(GMPXX_FOUND AND GMP_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable (alphacomplexpersistence alpha_complex_persistence.cpp) target_link_libraries(alphacomplexpersistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 3edcbd0b..3c42f72d 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -63,7 +63,7 @@ * CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms. * * The following examples require the Computational Geometry Algorithms - * Library (CGAL) and will not be built if CGAL is not installed: + * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: * \li GudhUI * \li Persistent_cohomology/alpha_shapes_persistence * \li Simplex_tree/simplex_tree_from_alpha_shapes_3 diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 089f0c04..d29e31e7 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -26,10 +26,6 @@ if(CGAL_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - if (CMAKE_BUILD_TYPE MATCHES Debug) - # For programs to be more verbose - add_definitions(-DDEBUG_TRACES) - endif() add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp ) target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) -- cgit v1.2.3 From f9b5b9b3306f3f00f5bfa2724cbfa087d5161fcb Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 11 Dec 2015 15:41:39 +0000 Subject: Commit code and doc review Still issue and lot of logs in simplex_tree::prune_above_filtration git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@945 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: cd4f60ddcacb0444e0eb3b9323d8042eb49b132e --- CMakeLists.txt | 4 +- biblio/bibliography.bib | 16 -- src/Alpha_complex/doc/Intro_alpha_complex.h | 8 +- src/Alpha_complex/doc/alpha_complex_doc.ipe | 24 +- src/Alpha_complex/doc/alpha_complex_doc.png | Bin 46746 -> 49973 bytes src/Alpha_complex/doc/alpha_complex_doc_135.ipe | 88 +++---- src/Alpha_complex/doc/alpha_complex_doc_135.png | Bin 127130 -> 80794 bytes .../doc/alpha_complex_representation.png | Bin 0 -> 16737 bytes .../example/Alpha_complex_from_points.cpp | 22 +- src/Alpha_complex/include/gudhi/Alpha_complex.h | 6 +- src/Alpha_complex/test/CMakeLists.txt | 2 + src/Simplex_tree/include/gudhi/Simplex_tree.h | 95 ++++++-- .../gudhi/Simplex_tree/Simplex_tree_siblings.h | 4 + src/Simplex_tree/test/simplex_tree_unit_test.cpp | 266 ++++++++++++++++++++- .../example/dtoffrw_alphashapedoc_result.off | 15 ++ .../example/dtoffrw_alphashapedoc_result.txt | 1 - .../include/gudhi/Delaunay_triangulation_off_io.h | 2 +- src/common/test/dtoffrw_unit_test.cpp | 4 +- 18 files changed, 436 insertions(+), 121 deletions(-) create mode 100644 src/Alpha_complex/doc/alpha_complex_representation.png create mode 100644 src/common/example/dtoffrw_alphashapedoc_result.off (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index d42f7af7..d0770dd7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -22,8 +22,8 @@ if(MSVC) # Turn off some VC++ warnings SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018") else() - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -std=c++11 -Wall -Wpedantic -Wsign-compare") - set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -ggdb -O0") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -std=c++11 -fsanitize=memory -fno-omit-frame-pointer -Wall -Wpedantic -Wsign-compare") + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -ggdb -O1") set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}") endif() diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index 859696b4..3fd1c10a 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -897,22 +897,6 @@ language={English} bibsource = {DBLP, http://dblp.uni-trier.de} } -@ARTICLE{AlphaShapesDefinition, - author = {N. Akkiraju, H. Edelsbrunner, M. Facello, P. Fu, E. P. Mucke, and C. Varela}, - title = {\href{http://pub.ist.ac.at/~edels/Papers/1995-P-06-AlphaShapesSoftware.pdf}{Alpha shapes: definition and software}}, - journal = {Proc. Internat. Comput. Geom. Software Workshop 1995}, - year = {1995}, - bibsource = {http://pub.ist.ac.at} -} - -@ARTICLE{AlphaShapesIntroduction, - author = {Kaspar Fischer}, - title = {\href{http://www.cs.uu.nl/docs/vakken/ddm/texts/Delaunay/alphashapes.pdf}{Introduction to Alpha Shapes}}, - journal = {Unknown}, - year = {Unknown}, - bibsource = {http://www.cs.uu.nl} -} - misc{buddha_stanford_scan, author = "", title = "The Stanford 3D Scanning Repository", diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index 685a4c2f..12d62ac0 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -37,10 +37,10 @@ namespace alphacomplex { * \section definition Definition * * Alpha_complex is a simplicial complex - * constructed from each finite cell of a Delaunay Triangulation. + * constructed from the finite cells of a Delaunay Triangulation. * - * The filtration value of each simplex is computed from the alpha square value of the simplex if it is Gabriel or - * from the alpha value of the simplex coface that makes the simplex not Gabriel. + * The filtration value of each simplex is computed from the circumradius of the simplex if it is Gabriel or + * from the alpha value of the simplex cofaces that make it not Gabriel. * * All simplices that have a filtration value strictly greater than a given alpha square value are not inserted into * the simplex. @@ -78,7 +78,7 @@ namespace alphacomplex { * * \subsection datastructure Data structure * - * In order to build the alpha complex, first, a Simplex tree is build from the cells of a Delaunay Triangulation. + * In order to build the alpha complex, first, a Simplex tree is built from the cells of a Delaunay Triangulation. * (The filtration value is set to NaN, which stands for unknown value): * \image html "alpha_complex_doc.png" "Simplex tree structure construction example" * diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe index b5601143..e74f9bc4 100644 --- a/src/Alpha_complex/doc/alpha_complex_doc.ipe +++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe @@ -1,7 +1,7 @@ - - + + @@ -202,13 +202,13 @@ h + + - - @@ -232,14 +232,7 @@ h - - - - - - - - + 320 580 m 350 520 l 290 530 l @@ -434,5 +427,12 @@ h 280 610 m 170 610 l + + + + + + + diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png index 601ac051..c9eab275 100644 Binary files a/src/Alpha_complex/doc/alpha_complex_doc.png and b/src/Alpha_complex/doc/alpha_complex_doc.png differ diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.ipe b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe index 28b893b8..5d1d29d4 100644 --- a/src/Alpha_complex/doc/alpha_complex_doc_135.ipe +++ b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe @@ -1,7 +1,7 @@ - - + + @@ -202,13 +202,13 @@ h + + - - @@ -232,14 +232,7 @@ h - - - - - - - - + 320 580 m 350 520 l 290 530 l @@ -288,19 +281,11 @@ h 77.2727 0 0 77.2727 243.636 591.818 e - 243.428 591.569 m 186.061 643.28 l $\alpha_{420}$ - - - - - - - 320 580 m 350 520 l @@ -325,7 +310,6 @@ h modified (NaN) 0 -1 2 3 4 @@ -357,18 +341,10 @@ modified (NaN) 29.1548 0 0 29.1548 305 555 e - 304.883 555.015 m 334.509 555.015 l - - - - - - - 320 580 m 350 520 l @@ -391,10 +367,7 @@ modified (NaN) [0,4] is not Gabriel $\rightarrow$ $\alpha_{40} = \alpha_{420}$ 0 -1 -2 3 -4 5 6 @@ -420,16 +393,6 @@ modified (NaN) 290 530 m 280 660 l - -65.192 0 0 65.192 285 595 e - - - - - - - - 320 580 m 350 520 l @@ -483,7 +446,6 @@ modified (NaN) 44.5799 0 0 44.5799 425.934 457.774 e - 425.854 457.774 m 470.795 457.774 l @@ -505,10 +467,48 @@ modified (NaN) For all faces of [4,2,0] N.B. : is Gabriel on a single point has no sense. Dimension =2 - $\sigma$ = [4,2,0] - 247.333 430.892 m 311.764 430.892 l + + + + + + + + + + + + + +1 + + + + + +4 + + +1 + + +2 + +65.192 0 0 65.192 285 595 e + + + + + + + + + + + diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.png b/src/Alpha_complex/doc/alpha_complex_doc_135.png index 5dce5edd..ef7187f7 100644 Binary files a/src/Alpha_complex/doc/alpha_complex_doc_135.png and b/src/Alpha_complex/doc/alpha_complex_doc_135.png differ diff --git a/src/Alpha_complex/doc/alpha_complex_representation.png b/src/Alpha_complex/doc/alpha_complex_representation.png new file mode 100644 index 00000000..06e54c06 Binary files /dev/null and b/src/Alpha_complex/doc/alpha_complex_representation.png differ diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp index 62f594d1..00e988a6 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp @@ -30,21 +30,13 @@ int main(int argc, char **argv) { // Init of a list of points // ---------------------------------------------------------------------------- Vector_of_points points; - - std::vector coords = { 1.0, 1.0 }; - points.push_back(Point(coords.begin(), coords.end())); - coords = { 7.0, 0.0 }; - points.push_back(Point(coords.begin(), coords.end())); - coords = { 4.0, 6.0 }; - points.push_back(Point(coords.begin(), coords.end())); - coords = { 9.0, 6.0 }; - points.push_back(Point(coords.begin(), coords.end())); - coords = { 0.0, 14.0 }; - points.push_back(Point(coords.begin(), coords.end())); - coords = { 2.0, 19.0 }; - points.push_back(Point(coords.begin(), coords.end())); - coords = { 9.0, 17.0 }; - points.push_back(Point(coords.begin(), coords.end())); + points.push_back(Point(1.0, 1.0)); + points.push_back(Point(7.0, 0.0)); + points.push_back(Point(4.0, 6.0)); + points.push_back(Point(9.0, 6.0)); + points.push_back(Point(0.0, 14.0)); + points.push_back(Point(2.0, 19.0)); + points.push_back(Point(9.0, 17.0)); // ---------------------------------------------------------------------------- // Init of an alpha complex from the list of points diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 2dae4028..9f931066 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -125,7 +125,9 @@ class Alpha_complex : public Simplex_tree<> { /** \brief Alpha_complex constructor from a Delaunay triangulation. * - * @param[in] triangulation_ptr Pointer on a Delaunay triangulation. + * @param[in] triangulation_ptr Pointer on a + * CGAL::Delaunay_triangulation \cite cgal:hdj-t-15b. * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$. */ Alpha_complex(Delaunay_triangulation* triangulation_ptr, @@ -170,7 +172,7 @@ class Alpha_complex : public Simplex_tree<> { } } - /** \brief Alpha_complex destructor from a Delaunay triangulation. + /** \brief Alpha_complex destructor. * * @warning Deletes the Delaunay triangulation. */ diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index fa24e1b1..d7c13da0 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -18,6 +18,8 @@ if(CGAL_FOUND) add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp ) target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + add_executable ( cerr cerr.cpp ) + # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 3adf06d3..4b04e75a 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -530,7 +530,7 @@ class Simplex_tree { return dimension_; } - /** \brief Returns true iff the node in the simplex tree pointed by + /** \brief Returns true if the node in the simplex tree pointed by * sh has children.*/ bool has_children(Simplex_handle sh) const { return (sh->second.children()->parent() == sh->first); @@ -1134,7 +1134,6 @@ class Simplex_tree { if (sh->second.filtration() < upper_filtration) { // Store the filtration modification information modified = true; - std::cout << "modified" << std::endl; sh->second.assign_filtration(upper_filtration); } if (has_children(sh)) { @@ -1154,21 +1153,51 @@ class Simplex_tree { * call `initialize_filtration()` to recompute it. */ void prune_above_filtration(Filtration_value filtration) { - if (filtration < threshold_) { - threshold_ = filtration; - // Initialize filtration_vect_ if required - if (filtration_vect_.empty()) { - initialize_filtration(); - } +std::cout << "prune_above_filtration - filtration=" << filtration << std::endl; + // No action if filtration is not stored + if (Options::store_filtration) { + if (filtration < threshold_) { + threshold_ = filtration; + // Initialize filtration_vect_ if required + if (filtration_vect_.empty()) { +std::cout << "prune_above_filtration - initialize_filtration" << std::endl; + initialize_filtration(); + } + +std::cout << "prune_above_filtration - after initialize_filtration "; +for(auto sh : filtration_vect_) { +for (auto vertex : simplex_vertex_range(sh)) { +std::cout << (int) vertex << ", "; +} +std::cout << " - filtration=" << sh->second.filtration() << " - " << &(sh->second) << std::endl; +} - // Loop in reverse mode until threshold is reached - auto f_simplex = filtration_vect_.rbegin(); - for (; f_simplex != filtration_vect_.rend() && ((*f_simplex)->second.filtration() > threshold_); f_simplex++) { - remove_maximal_simplex(*f_simplex); + + // Loop in reverse mode until threshold is reached + auto f_simplex = filtration_vect_.rbegin(); + for (; (f_simplex != filtration_vect_.rend()) && ((*f_simplex)->second.filtration() > threshold_); f_simplex++) { + +std::cout << "prune_above_filtration - remove "; +for (auto vertex : simplex_vertex_range(*f_simplex)) { +std::cout << (int) vertex << ", "; +} +std::cout << " - " << &((*f_simplex)->second) << std::endl; + + remove_maximal_simplex(*f_simplex); + } +std::cout << "prune_above_filtration - remove STOPPED ON "; +for (auto vertex : simplex_vertex_range(*f_simplex)) { +std::cout << (int) vertex << ", "; +} +std::cout << " - filtration=" << (*f_simplex)->second.filtration() << " - " << &(*f_simplex->second) << std::endl; + if (f_simplex != filtration_vect_.rbegin()) { + // Do not forget to update filtration_vect_ - resize is enough + std::size_t new_size = filtration_vect_.size() - (f_simplex - filtration_vect_.rbegin()); +std::cout << "prune_above_filtration - resize" << new_size << std::endl; + filtration_vect_.resize(new_size); + } + } - // Do not forget to update filtration_vect_ - resize is enough - std::size_t new_size = filtration_vect_.size() - (f_simplex - filtration_vect_.rbegin()); - filtration_vect_.resize(new_size); } } @@ -1187,14 +1216,46 @@ class Simplex_tree { if ((child->size() > 1) || (child == root())) { // Not alone, just remove it from members // Special case when child is the root of the simplex tree, just remove it from members - child->members().erase(sh->first); +std::cout << "remove_maximal_simplex - members removal" << std::endl; + child->erase(sh->first); } else { // Sibling is emptied : must be deleted, and its parent must point on his own Sibling +std::cout << "remove_maximal_simplex - members is empty" << std::endl; child->oncles()->members().at(child->parent()).assign_children(child->oncles()); delete child; } } - +/***************************************************************************************************************/ + public: + /** \brief Prints the simplex_tree hierarchically. + * Since it prints the vertices recursively, one can watch its tree shape. + */ + void print_tree() { + for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh) { + std::cout << sh->first << " "; + if (has_children(sh)) { + std::cout << "("; + rec_print(sh->second.children()); + std::cout << ")"; + } + std::cout << std::endl; + } + } + + + /** \brief Recursively prints the simplex_tree, using depth first search. */ + private: + void rec_print(Siblings * sib) { + for (auto sh = sib->members().begin(); sh != sib->members().end(); ++sh) { + std::cout << " " << sh->first << " "; + if (has_children(sh)) { + std::cout << "("; + rec_print(sh->second.children()); + std::cout << ")"; + } + } + } +/*****************************************************************************************************************/ private: Vertex_handle null_vertex_; /** \brief Upper bound on the filtration values of the simplices.*/ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h index 158ee1f7..c1ff8bf2 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h @@ -116,6 +116,10 @@ class Simplex_tree_siblings { return members_.size(); } + void erase(const Vertex_handle vh) { + members_.erase(vh); + } + Simplex_tree_siblings * oncles_; Vertex_handle parent_; Dictionary members_; diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp index 00cf69bc..f6bd5411 100644 --- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp @@ -362,7 +362,9 @@ BOOST_AUTO_TEST_CASE(simplex_tree_insertion) { } -bool sort_in_decr_order (Vertex_handle i,Vertex_handle j) { return (i>j); } +bool sort_in_decr_order(Vertex_handle i, Vertex_handle j) { + return (i > j); +} BOOST_AUTO_TEST_CASE(NSimplexAndSubfaces_tree_insertion) { std::cout << "********************************************************************" << std::endl; @@ -476,7 +478,7 @@ BOOST_AUTO_TEST_CASE(NSimplexAndSubfaces_tree_insertion) { BOOST_CHECK(vertex == SimplexVector6[position]); position++; } - + /* Inserted simplex: */ /* 1 6 */ /* o---o */ @@ -720,14 +722,268 @@ BOOST_AUTO_TEST_CASE(copy_move_on_simplex_tree) { // Check there is a new simplex tree reference BOOST_CHECK(&st_move != &st_copy); BOOST_CHECK(&st_move != &st); - + typeST st_empty; // Check st has been emptied by the move BOOST_CHECK(st == st_empty); BOOST_CHECK(st.filtration() == 0); BOOST_CHECK(st.dimension() == -1); BOOST_CHECK(st.num_simplices() == 0); - BOOST_CHECK(st.num_vertices() == (size_t)0); - + BOOST_CHECK(st.num_vertices() == (size_t) 0); + std::cout << "Printing st once again- address = " << &st << std::endl; } + +BOOST_AUTO_TEST_CASE(make_filtration_non_decreasing) { + std::cout << "********************************************************************" << std::endl; + std::cout << "MAKE FILTRATION NON DECREASING" << std::endl; + typeST st; + + st.insert_simplex_and_subfaces({2, 1, 0}, 4.0); + st.insert_simplex_and_subfaces({3, 0}, 3.0); + st.insert_simplex_and_subfaces({3, 4, 5}, 2.0); + // Because of non decreasing property of simplex tree, { 0 } , { 1 } and { 0, 1 } are going to be set from value 4.0 + // to 1.0 + st.insert_simplex_and_subfaces({0, 1, 6, 7}, 1.0); + + /* Inserted simplex: */ + /* 1 6 */ + /* o---o */ + /* /X\7/ */ + /* o---o---o---o */ + /* 2 0 3\X/4 */ + /* o */ + /* 5 */ + + // FIXME + st.set_dimension(3); + + // Copy constructor + typeST st_copy = st; + + // Check default insertion ensures the filtration values are non decreasing + BOOST_CHECK(!st.make_filtration_non_decreasing()); + // Check the simplex tree is not modified by the function + BOOST_CHECK(st == st_copy); + + // Make { 0, 1 } decreasing + st.assign_filtration(st.find({0, 1}), 0.5); + // Make { 1, 6, 7 } decreasing + st.assign_filtration(st.find({1, 6, 7}), 0.4); + // Make { 3, 4 } decreasing + st.assign_filtration(st.find({3, 4}), 0.3); + // Make { 4, 5 } decreasing + st.assign_filtration(st.find({4, 5}), 0.1); + + // Check the filtration values were decreasing + BOOST_CHECK(st.make_filtration_non_decreasing()); + // Check the simplex tree has been modified by the function to retrieve the initial simplex tree + BOOST_CHECK(st == st_copy); + + // Change { 0, 3 }, but still non decreasing + st.assign_filtration(st.find({0, 3}), 1.01); + // Change { 0, 1, 6, 7 }, but still non decreasing + st.assign_filtration(st.find({0, 1, 6, 7}), 1.201); + // Change { 1, 2 }, but still non decreasing + st.assign_filtration(st.find({1, 2}), 1.05); + // Change { 4 }, but still non decreasing + st.assign_filtration(st.find({4}), 1.123); + + // Check the filtration values are non decreasing + BOOST_CHECK(!st.make_filtration_non_decreasing()); + // Check the simplex tree has been modified from the original + BOOST_CHECK(st != st_copy); + +} + +struct MyOptions : Simplex_tree_options_full_featured { + // Not doing persistence, so we don't need those + static const bool store_key = false; + static const bool store_filtration = false; + // I have few vertices + typedef short Vertex_handle; +}; +typedef Simplex_tree miniST; + +/*BOOST_AUTO_TEST_CASE(remove_maximal_simplex) { + std::cout << "********************************************************************" << std::endl; + std::cout << "REMOVE MAXIMAL SIMPLEX" << std::endl; + + miniST st; + + // FIXME + st.set_dimension(3); + + st.insert_simplex_and_subfaces({0, 1, 6, 7}); + st.insert_simplex_and_subfaces({3, 4, 5}); + + // Constructs a copy at this state for further test purpose + miniST st_pruned = st; + + st.insert_simplex_and_subfaces({3, 0}); + st.insert_simplex_and_subfaces({2, 1, 0}); + + // Constructs a copy at this state for further test purpose + miniST st_complete = st; + // st_complete and st: + // 1 6 + // o---o + // /X\7/ + // o---o---o---o + // 2 0 3\X/4 + // o + // 5 + // st_pruned: + // 1 6 + // o---o + // \7/ + // o o---o + // 0 3\X/4 + // o + // 5 + +#ifdef GUDHI_DEBUG + std::cout << "Check exception throw in debug mode" << std::endl; + // throw excpt because sh has children + BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({0, 1, 6})), std::invalid_argument); + BOOST_CHECK_THROW (st.remove_maximal_simplex(st.find({3})), std::invalid_argument); + BOOST_CHECK(st == st_complete); +#endif + + st.remove_maximal_simplex(st.find({0, 2})); + st.remove_maximal_simplex(st.find({0, 1, 2})); + st.remove_maximal_simplex(st.find({1, 2})); + st.remove_maximal_simplex(st.find({2})); + st.remove_maximal_simplex(st.find({0, 3})); + + BOOST_CHECK(st == st_pruned); + // Remove all, but as the simplex tree is not storing filtration, there is no modification + st.prune_above_filtration(0.0); + BOOST_CHECK(st == st_pruned); + + miniST st_wo_seven; + // FIXME + st_wo_seven.set_dimension(3); + + st_wo_seven.insert_simplex_and_subfaces({0, 1, 6}); + st_wo_seven.insert_simplex_and_subfaces({3, 4, 5}); + // st_wo_seven: + // 1 6 + // o---o + // \X/ + // o o---o + // 0 3\X/4 + // o + // 5 + + // Remove all 7 to test the both remove_maximal_simplex cases (when _members is empty or not) + st.remove_maximal_simplex(st.find({0, 1, 6, 7})); + st.remove_maximal_simplex(st.find({0, 1, 7})); + st.remove_maximal_simplex(st.find({0, 6, 7})); + st.remove_maximal_simplex(st.find({0, 7})); + st.remove_maximal_simplex(st.find({1, 6, 7})); + st.remove_maximal_simplex(st.find({1, 7})); + st.remove_maximal_simplex(st.find({6, 7})); + st.remove_maximal_simplex(st.find({7})); + + BOOST_CHECK(st == st_wo_seven); +}*/ + +BOOST_AUTO_TEST_CASE(prune_above_filtration) { + std::cout << "********************************************************************" << std::endl; + std::cout << "PRUNE ABOVE FILTRATION" << std::endl; + typeST st; + + // FIXME + st.set_dimension(3); + + st.insert_simplex_and_subfaces({0, 1, 6, 7}, 1.0); + st.insert_simplex_and_subfaces({3, 4, 5}, 2.0); + st.set_filtration(6.0); + + // Constructs a copy at this state for further test purpose + typeST st_pruned = st; + st_pruned.initialize_filtration(); // reset + + st.insert_simplex_and_subfaces({3, 0}, 3.0); + st.insert_simplex_and_subfaces({2, 1, 0}, 4.0); + + // Constructs a copy at this state for further test purpose + typeST st_complete = st; + // st_complete and st: + // 1 6 + // o---o + // /X\7/ + // o---o---o---o + // 2 0 3\X/4 + // o + // 5 + // st_pruned: + // 1 6 + // o---o + // \7/ + // o o---o + // 0 3\X/4 + // o + // 5 + + // Check the no action cases + // greater than initial filtration value + st.prune_above_filtration(10.0); + BOOST_CHECK(st == st_complete); + // equal to initial filtration value + st.prune_above_filtration(6.0); + BOOST_CHECK(st == st_complete); + // lower than initial filtration value, but still greater than the maximum filtration value + st_complete.set_filtration(5.0); + st.prune_above_filtration(5.0); + BOOST_CHECK(st == st_complete); + + // Display the Simplex_tree - Can not be done in the middle of 2 inserts + std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl; + std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; + std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; + for (auto f_simplex : st.filtration_simplex_range()) { + std::cout << " " << "[" << st.filtration(f_simplex) << "] "; + for (auto vertex : st.simplex_vertex_range(f_simplex)) { + std::cout << (int) vertex << " "; + } + std::cout << std::endl; + } + + // Check the pruned cases + // Set the st_pruned filtration for operator== + st_pruned.set_filtration(2.5); + st.prune_above_filtration(2.5); + /*BOOST_CHECK(st == st_pruned); + + st_pruned.set_filtration(2.0); + st.prune_above_filtration(2.0); + BOOST_CHECK(st == st_pruned); +*/ +/* std::cout << "The complex contains " << st.num_simplices() << " simplices --------------------------" << std::endl; + std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; + st.print_tree(); + + std::cout << "The pruned complex contains " << st_pruned.num_simplices() << " simplices --------------------------" << std::endl; + std::cout << " - dimension " << st_pruned.dimension() << " - filtration " << st_pruned.filtration() << std::endl; + st_pruned.print_tree(); + + typeST st_empty; + // FIXME + st_empty.set_dimension(3); + st.prune_above_filtration(0.0); + */ + /*BOOST_CHECK(st == st_empty); + + // Test case to the limit + st.prune_above_filtration(-1.0); + st_empty.set_filtration(-1.0); + BOOST_CHECK(st == st_empty); +*/ +} + +/*BOOST_AUTO_TEST_CASE(sanitizer) { + int a[2] = {1, 0}; + int b=a[2]; +}*/ diff --git a/src/common/example/dtoffrw_alphashapedoc_result.off b/src/common/example/dtoffrw_alphashapedoc_result.off new file mode 100644 index 00000000..03b7ca75 --- /dev/null +++ b/src/common/example/dtoffrw_alphashapedoc_result.off @@ -0,0 +1,15 @@ +nOFF +2 7 6 0 +1 1 +7 0 +4 6 +9 6 +0 14 +2 19 +9 17 +3 0 1 2 +3 3 2 1 +3 4 0 2 +3 4 2 6 +3 6 2 3 +3 5 4 6 diff --git a/src/common/example/dtoffrw_alphashapedoc_result.txt b/src/common/example/dtoffrw_alphashapedoc_result.txt index 57761d14..8e659740 100644 --- a/src/common/example/dtoffrw_alphashapedoc_result.txt +++ b/src/common/example/dtoffrw_alphashapedoc_result.txt @@ -1,3 +1,2 @@ Number of vertices= 7 Number of finite full cells= 6 - diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index 6335d243..b3f4a299 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -135,7 +135,7 @@ class Delaunay_triangulation_off_visitor_reader { * * When launching: * - * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off + * \code $> ./dtoffrw ../../data/points/alphacomplexdoc triangulated.off * \endcode * * the program output is: diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp index 20094229..f682df1a 100644 --- a/src/common/test/dtoffrw_unit_test.cpp +++ b/src/common/test/dtoffrw_unit_test.cpp @@ -64,7 +64,7 @@ BOOST_AUTO_TEST_CASE( Delaunay_triangulation_doc_test ) BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) { - Gudhi::Delaunay_triangulation_off_reader off_reader("pouetpouet_tralala.off"); + Gudhi::Delaunay_triangulation_off_reader off_reader("some_impossible_weird_file_name.off"); // Check the read operation was correct BOOST_CHECK(!off_reader.is_valid()); T* triangulation = off_reader.get_complex(); @@ -80,7 +80,7 @@ BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_write_test ) T* triangulation = off_reader.get_complex(); // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces - Gudhi::Delaunay_triangulation_off_writer off_writer("/pouetpouet_tralala/pouetpouet_tralala/pouetpouet_tralala.off", triangulation); + Gudhi::Delaunay_triangulation_off_writer off_writer("/some_impossible_weird_directory_name/another_weird_directory_name/some_impossible_weird_file_name.off", triangulation); // Check the write operation was correct BOOST_CHECK(!off_writer.is_valid()); -- cgit v1.2.3 From 9aa92aa2b504d9530125a6a164f76c11f45d8bb5 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 8 Jan 2016 15:03:16 +0000 Subject: cpplint and cppcheck fixes git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@956 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f54c0a2a375e8818c7162aec05249e446361834b --- src/Hasse_complex/include/gudhi/Hasse_complex.h | 10 ++-- .../example/parallel_rips_persistence.cpp | 2 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 4 +- .../gudhi/Simplex_tree/Simplex_tree_iterators.h | 60 ++++++++++++---------- .../gudhi/Simplex_tree/Simplex_tree_siblings.h | 2 +- src/common/include/gudhi/allocator.h | 10 ++-- 6 files changed, 47 insertions(+), 41 deletions(-) (limited to 'src/common') diff --git a/src/Hasse_complex/include/gudhi/Hasse_complex.h b/src/Hasse_complex/include/gudhi/Hasse_complex.h index 38887264..8b06b771 100644 --- a/src/Hasse_complex/include/gudhi/Hasse_complex.h +++ b/src/Hasse_complex/include/gudhi/Hasse_complex.h @@ -23,14 +23,14 @@ #ifndef HASSE_COMPLEX_H_ #define HASSE_COMPLEX_H_ +#include + #include #include #include // for pair #include -#include - #ifdef GUDHI_USE_TBB #include #endif @@ -109,12 +109,12 @@ class Hasse_complex { , dim_max_(cpx.dimension()) { int size = complex_.size(); #ifdef GUDHI_USE_TBB - tbb::parallel_for(0,size,[&](int idx){new (&complex_[idx]) Hasse_simp(cpx, cpx.simplex(idx));}); - for (int idx=0; idx #include #include -#include "gudhi/Hasse_complex.h" +#include #include diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 356deb3a..708cdef9 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -528,7 +528,7 @@ class Simplex_tree { * The type InputVertexRange must be a range of Vertex_handle * on which we can call std::begin() function */ - template> + template> Simplex_handle find(const InputVertexRange & s) { auto first = std::begin(s); auto last = std::end(s); @@ -635,7 +635,7 @@ class Simplex_tree { * * The type InputVertexRange must be a range for which .begin() and * .end() return input iterators, with 'value_type' Vertex_handle. */ - template> + template> std::pair insert_simplex(const InputVertexRange & simplex, Filtration_value filtration = 0) { auto first = std::begin(simplex); diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h index 794060ee..936b7a1f 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_iterators.h @@ -54,7 +54,7 @@ class Simplex_tree_simplex_vertex_iterator : public boost::iterator_facade< explicit Simplex_tree_simplex_vertex_iterator(SimplexTree * st) : // any end() iterator - sib_(NULL), + sib_(nullptr), v_(st->null_vertex()) { } @@ -99,19 +99,19 @@ class Simplex_tree_boundary_simplex_iterator : public boost::iterator_facade< // any end() iterator explicit Simplex_tree_boundary_simplex_iterator(SimplexTree * st) - : sib_(NULL), - sh_(st->null_simplex()) { + : sib_(nullptr), + sh_(st->null_simplex()), + st_(st) { } Simplex_tree_boundary_simplex_iterator(SimplexTree * st, Simplex_handle sh) - : suffix_(), - sib_(st->self_siblings(sh)), + : last_(sh->first), + sib_(nullptr), st_(st) { - last_ = sh->first; Siblings * sib = st->self_siblings(sh); next_ = sib->parent(); - sib_ = sib->oncles(); /* \todo check if NULL*/ - if (sib_ != NULL) { + sib_ = sib->oncles(); + if (sib_ != nullptr) { sh_ = sib_->find(next_); } else { sh_ = st->null_simplex(); @@ -131,7 +131,7 @@ class Simplex_tree_boundary_simplex_iterator : public boost::iterator_facade< } void increment() { - if (sib_ == NULL) { + if (sib_ == nullptr) { sh_ = st_->null_simplex(); return; } @@ -189,13 +189,15 @@ class Simplex_tree_complex_simplex_iterator : public boost::iterator_facade< // any end() iterator Simplex_tree_complex_simplex_iterator() - : st_(NULL) { + : sib_(nullptr), + st_(nullptr) { } explicit Simplex_tree_complex_simplex_iterator(SimplexTree * st) - : st_(st) { - if (st == NULL || st->root() == NULL || st->root()->members().empty()) { - st_ = NULL; + : sib_(nullptr), + st_(st) { + if (st == nullptr || st->root() == nullptr || st->root()->members().empty()) { + st_ = nullptr; } else { sh_ = st->root()->members().begin(); sib_ = st->root(); @@ -210,10 +212,10 @@ class Simplex_tree_complex_simplex_iterator : public boost::iterator_facade< // valid when iterating along the SAME boundary. bool equal(Simplex_tree_complex_simplex_iterator const& other) const { - if (other.st_ == NULL) { - return (st_ == NULL); + if (other.st_ == nullptr) { + return (st_ == nullptr); } - if (st_ == NULL) { + if (st_ == nullptr) { return false; } return (&(sh_->second) == &(other.sh_->second)); @@ -227,8 +229,8 @@ class Simplex_tree_complex_simplex_iterator : public boost::iterator_facade< void increment() { ++sh_; if (sh_ == sib_->members().end()) { - if (sib_->oncles() == NULL) { - st_ = NULL; + if (sib_->oncles() == nullptr) { + st_ = nullptr; return; } // reach the end sh_ = sib_->oncles()->members().find(sib_->parent()); @@ -261,15 +263,19 @@ class Simplex_tree_skeleton_simplex_iterator : public boost::iterator_facade< // any end() iterator Simplex_tree_skeleton_simplex_iterator() - : st_(NULL) { + : sib_(nullptr), + st_(nullptr), + dim_skel_(0), + curr_dim_(0) { } Simplex_tree_skeleton_simplex_iterator(SimplexTree * st, int dim_skel) - : st_(st), + : sib_(nullptr), + st_(st), dim_skel_(dim_skel), curr_dim_(0) { - if (st == NULL || st->root() == NULL || st->root()->members().empty()) { - st_ = NULL; + if (st == nullptr || st->root() == nullptr || st->root()->members().empty()) { + st_ = nullptr; } else { sh_ = st->root()->members().begin(); sib_ = st->root(); @@ -285,10 +291,10 @@ class Simplex_tree_skeleton_simplex_iterator : public boost::iterator_facade< // valid when iterating along the SAME boundary. bool equal(Simplex_tree_skeleton_simplex_iterator const& other) const { - if (other.st_ == NULL) { - return (st_ == NULL); + if (other.st_ == nullptr) { + return (st_ == nullptr); } - if (st_ == NULL) { + if (st_ == nullptr) { return false; } return (&(sh_->second) == &(other.sh_->second)); @@ -302,8 +308,8 @@ class Simplex_tree_skeleton_simplex_iterator : public boost::iterator_facade< void increment() { ++sh_; if (sh_ == sib_->members().end()) { - if (sib_->oncles() == NULL) { - st_ = NULL; + if (sib_->oncles() == nullptr) { + st_ = nullptr; return; } // reach the end sh_ = sib_->oncles()->members().find(sib_->parent()); diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h index 158ee1f7..072afc8d 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree/Simplex_tree_siblings.h @@ -57,7 +57,7 @@ class Simplex_tree_siblings { /* Default constructor.*/ Simplex_tree_siblings() - : oncles_(NULL), + : oncles_(nullptr), parent_(-1), members_() { } diff --git a/src/common/include/gudhi/allocator.h b/src/common/include/gudhi/allocator.h index b825173b..4ede14e4 100644 --- a/src/common/include/gudhi/allocator.h +++ b/src/common/include/gudhi/allocator.h @@ -20,8 +20,8 @@ * along with this program. If not, see . */ -#ifndef GUDHI_ALLOCATOR_H_ -#define GUDHI_ALLOCATOR_H_ +#ifndef ALLOCATOR_H_ +#define ALLOCATOR_H_ #include #include @@ -43,13 +43,13 @@ struct no_init_allocator : Base { // Do nothing: that's the whole point! template - void construct(P*)noexcept{} + void construct(P*) noexcept {} - template void construct(P*p, U&&...u){ + template void construct(P*p, U&&...u) { Base_traits::construct(*(Base*)this, p, std::forward(u)...); } }; } // namespace Gudhi -#endif // GUDHI_ALLOCATOR_H_ +#endif // ALLOCATOR_H_ -- cgit v1.2.3 From 11b195d4e26d48cdc56883957cbad16e298e43ca Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 12 Jan 2016 16:07:10 +0000 Subject: Fix alpha complex remarks and bugs git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@957 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: fa837fd1a4373c2322db16353d98767907f34c79 --- CMakeLists.txt | 4 +- biblio/how_to_cite_cgal.bib | 947 +++++++++++++++++++++ src/Alpha_complex/test/CMakeLists.txt | 2 - src/GudhUI/alpha_complex_persistence.cpp | 78 -- src/GudhUI/utils/Bar_code_persistence.h | 3 +- src/GudhUI/utils/Persistence_compute.h | 15 +- src/Persistent_cohomology/example/CMakeLists.txt | 100 ++- .../example/alpha_complex_persistence.cpp | 92 +- .../example/rips_persistence.cpp | 3 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 81 +- src/Simplex_tree/test/simplex_tree_unit_test.cpp | 56 +- src/common/include/gudhi/distance_functions.h | 4 +- src/common/include/gudhi/reader_utils.h | 4 +- 13 files changed, 1140 insertions(+), 249 deletions(-) create mode 100644 biblio/how_to_cite_cgal.bib delete mode 100644 src/GudhUI/alpha_complex_persistence.cpp (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index d0770dd7..54e86f72 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,7 +10,7 @@ endif() enable_testing() -set(CMAKE_PREFIX_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/") +set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/") message("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}") # Generate GUDHI official version file @@ -22,7 +22,7 @@ if(MSVC) # Turn off some VC++ warnings SET (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4267 /wd4668 /wd4311 /wd4800 /wd4820 /wd4503 /wd4244 /wd4345 /wd4996 /wd4396 /wd4018") else() - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -std=c++11 -fsanitize=memory -fno-omit-frame-pointer -Wall -Wpedantic -Wsign-compare") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2 -std=c++11 -Wall -Wpedantic -Wsign-compare") set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -ggdb -O1") set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}") endif() diff --git a/biblio/how_to_cite_cgal.bib b/biblio/how_to_cite_cgal.bib new file mode 100644 index 00000000..7336ee81 --- /dev/null +++ b/biblio/how_to_cite_cgal.bib @@ -0,0 +1,947 @@ +@book{ cgal:eb-15b +, title = "{CGAL} User and Reference Manual" +, author = "{The CGAL Project}" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, year = 2015 +, url = "http://doc.cgal.org/4.7/Manual/packages.html" +} +@incollection{cgal:h-af-15b +, author = "Michael Hemmer" +, title = "Algebraic Foundations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlgebraicFoundationsSummary" +, year = 2015 +} + +@incollection{cgal:hhkps-nt-15b +, author = "Michael Hemmer and Susan Hert and Sylvain Pion and Stefan Schirra" +, title = "Number Types" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNumberTypesSummary" +, year = 2015 +} + +@incollection{cgal:h-ma-15b +, author = "Michael Hemmer and Sylvain Pion" +, title = "Modular Arithmetic" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgModularArithmeticSummary" +, year = 2015 +} + +@incollection{cgal:h-p-15b +, author = "Michael Hemmer" +, title = "Polynomial" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolynomialSummary" +, year = 2015 +} + +@incollection{cgal:bht-ak-15b +, author = "Eric Berberich and Michael Hemmer and Michael Kerber and Sylvain Lazard and Luis Pe{\~n}aranda and Monique Teillaud" +, title = "Algebraic Kernel" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlgebraicKerneldSummary" +, year = 2015 +} + +@incollection{cgal:h-msms-15b +, author = "Michael Hoffmann" +, title = "Monotone and Sorted Matrix Search" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMatrixSearchSummary" +, year = 2015 +} + +@incollection{cgal:fgsw-lqps-15b +, author = "Kaspar Fischer and Bernd G{\"a}rtner and Sven Sch{\"o}nherr and Frans Wessendorp" +, title = "Linear and Quadratic Programming Solver" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgQPSolverSummary" +, year = 2015 +} + +@incollection{cgal:bfghhkps-lgk23-15b +, author = "Herv{\'e} Br{\"o}nnimann and Andreas Fabri and Geert-Jan Giezeman and Susan Hert and Michael Hoffmann and Lutz Kettner and Sylvain Pion and Stefan Schirra" +, title = "{2D} and {3D} Linear Geometry Kernel" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKernel23Summary" +, year = 2015 +} + +@incollection{cgal:s-gkd-15b +, author = "Michael Seel" +, title = "{dD} Geometry Kernel" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKernelDSummary" +, year = 2015 +} + +@incollection{cgal:cpt-cgk2-15b +, author = "Pedro Machado Manh{\~a}es de Castro and Sylvain Pion and Monique Teillaud" +, title = "{2D} Circular Geometry Kernel" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgCircularKernel2Summary" +, year = 2015 +} + +@incollection{cgal:cclt-sgk3-15b +, author = "Pedro Machado Manh{\~a}es de Castro and Fr{\'e}d{\'e}ric Cazals and S{\'e}bastien Loriot and Monique Teillaud" +, title = "{3D} Spherical Geometry Kernel" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSphericalKernel3Summary" +, year = 2015 +} + +@incollection{cgal:hs-chep2-15b +, author = "Susan Hert and Stefan Schirra" +, title = "{2D} Convex Hulls and Extreme Points" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexHull2Summary" +, year = 2015 +} + +@incollection{cgal:hs-ch3-15b +, author = "Susan Hert and Stefan Schirra" +, title = "{3D} Convex Hulls" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexHull3Summary" +, year = 2015 +} + +@incollection{cgal:hs-chdt3-15b +, author = "Susan Hert and Michael Seel" +, title = "{dD} Convex Hulls and Delaunay Triangulations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexHullDSummary" +, year = 2015 +} + +@incollection{cgal:gw-p2-15b +, author = "Geert-Jan Giezeman and Wieger Wesselink" +, title = "{2D} Polygons" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolygon2Summary" +, year = 2015 +} + +@incollection{cgal:fwzh-rbso2-15b +, author = "Efi Fogel and Ophir Setter and Ron Wein and Guy Zucker and Baruch Zukerman and Dan Halperin" +, title = "{2D} Regularized Boolean Set-Operations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBooleanSetOperations2Summary" +, year = 2015 +} + +@incollection{cgal:s-bonp2-15b +, author = "Michael Seel" +, title = "{2D} Boolean Operations on Nef Polygons" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNef2Summary" +, year = 2015 +} + +@incollection{cgal:hk-bonpes2-15b +, author = "Peter Hachenberger and Lutz Kettner" +, title = "{2D} Boolean Operations on Nef Polygons Embedded on the Sphere" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNefS2Summary" +, year = 2015 +} + +@incollection{cgal:h-pp2-15b +, author = "Susan Hert" +, title = "{2D} Polygon Partitioning" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolygonPartitioning2Summary" +, year = 2015 +} + +@incollection{cgal:c-sspo2-15b +, author = "Fernando Cacciola" +, title = "{2D} Straight Skeleton and Polygon Offsetting" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgStraightSkeleton2Summary" +, year = 2015 +} + +@incollection{cgal:w-rms2-15b +, author = "Ron Wein and Alon Baram and Eyal Flato and Efi Fogel and Michael Hemmer and Sebastian Morr" +, title = "{2D} Minkowski Sums" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMinkowskiSum2Summary" +, year = 2015 +} + +@incollection{cgal:f-ps2-15b +, author = "Andreas Fabri" +, title = "{2D} Polyline Simplification" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolylineSimplification2Summary" +, year = 2015 +} + +@incollection{hhb-visibility-2-15b +, author = "Michael Hemmer and Kan Huang and Francisc Bungiu and Ning Xu" +, title = "{2D} Visibility Computation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgVisibility_2Summary" +, year = 2015 +} + +@incollection{cgal:k-ps-15b +, author = "Lutz Kettner" +, title = "{3D} Polyhedral Surface" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolyhedronSummary" +, year = 2015 +} + +@incollection{cgal:k-hds-15b +, author = "Lutz Kettner" +, title = "Halfedge Data Structures" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgHDSSummary" +, year = 2015 +} + +@incollection{cgal:bsmf-sm-15b +, author = "Mario Botsch and Daniel Sieger and Philipp Moeller and Andreas Fabri" +, title = "Surface Mesh" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMeshSummary" +, year = 2015 +} + +@incollection{cgal:d-cm-15b +, author = "Guillaume Damiand" +, title = "Combinatorial Maps" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgCombinatorialMapsSummary" +, year = 2015 +} + +@incollection{cgal:d-lcc-12-15b +, author = "Guillaume Damiand" +, title = "Linear Cell Complex" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgLinearCellComplexSummary" +, year = 2015 +} + +@incollection{cgal:hk-bonp3-15b +, author = "Peter Hachenberger and Lutz Kettner" +, title = "{3D} Boolean Operations on Nef Polyhedra" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgNef3Summary" +, year = 2015 +} + +@incollection{cgal:h-emspe-15b +, author = "Peter Hachenberger" +, title = "Convex Decomposition of Polyhedra" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgConvexDecomposition3Summary" +, year = 2015 +} + +@incollection{cgal:h-msp3-15b +, author = "Peter Hachenberger" +, title = "{3D} Minkowski Sum of Polyhedra" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMinkowskiSum3Summary" +, year = 2015 +} + +@incollection{cgal:wfzh-a2-15b +, author = "Ron Wein and Eric Berberich and Efi Fogel and Dan Halperin and Michael Hemmer and Oren Salzman and Baruch Zukerman" +, title = "{2D} Arrangements" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgArrangement2Summary" +, year = 2015 +} + +@incollection{cgal:wfz-ic2-15b +, author = "Baruch Zukerman and Ron Wein and Efi Fogel" +, title = "{2D} Intersection of Curves" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgIntersectionOfCurves2Summary" +, year = 2015 +} + +@incollection{cgal:p-sr2-15b +, author = "Eli Packer" +, title = "{2D} Snap Rounding" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSnapRounding2Summary" +, year = 2015 +} + +@incollection{cgal:w-e2-15b +, author = "Ron Wein" +, title = "{2D} Envelopes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgEnvelope2Summary" +, year = 2015 +} + +@incollection{cgal:mwz-e3-15b +, author = "Dan Halperin and Michal Meyerovitch and Ron Wein and Baruch Zukerman" +, title = "{3D} Envelopes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgEnvelope3Summary" +, year = 2015 +} + +@incollection{cgal:y-t2-15b +, author = "Mariette Yvinec" +, title = "{2D} Triangulation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTriangulation2Summary" +, year = 2015 +} + +@incollection{cgal:py-tds2-15b +, author = "Sylvain Pion and Mariette Yvinec" +, title = "{2D} Triangulation Data Structure" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTDS2Summary" +, year = 2015 +} + +@incollection{cgal:k-pt2-13-15b +, author = "Nico Kruithof" +, title = "{2D} Periodic Triangulations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPeriodic2Triangulation2Summary" +, year = 2015 +} + +@incollection{cgal:pt-t3-15b +, author = "Cl{\'e}ment Jamin and Sylvain Pion and Monique Teillaud" +, title = "{3D} Triangulations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTriangulation3Summary" +, year = 2015 +} + +@incollection{cgal:pt-tds3-15b +, author = "Cl{\'e}ment Jamin and Sylvain Pion and Monique Teillaud" +, title = "{3D} Triangulation Data Structure" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTDS3Summary" +, year = 2015 +} + +@incollection{cgal:ct-pt3-15b +, author = "Manuel Caroli and Monique Teillaud" +, title = "{3D} Periodic Triangulations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPeriodic3Triangulation3Summary" +, year = 2015 +} + +@incollection{cgal:hdj-t-15b +, author = "Samuel Hornus and Olivier Devillers and Cl{\'e}ment Jamin" +, title = "{dD} Triangulations" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgTriangulationsSummary" +, year = 2015 +} + +@incollection{cgal:d-as2-15b +, author = "Tran Kai Frank Da" +, title = "{2D} Alpha Shapes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlphaShape2Summary" +, year = 2015 +} + +@incollection{cgal:dy-as3-15b +, author = "Tran Kai Frank Da and S{\'e}bastien Loriot and Mariette Yvinec" +, title = "{3D} Alpha Shapes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAlphaShapes3Summary" +, year = 2015 +} + +@incollection{cgal:k-sdg2-15b +, author = "Menelaos Karavelas" +, title = "{2D} Segment Delaunay Graphs" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSegmentDelaunayGraph2Summary" +, year = 2015 +} + +@incollection{cgal:cdp-sdglinf2-15b +, author = "Panagiotis Cheilaris and Sandeep Kumar Dey and Evanthia Papadopoulou" +, title = "L Infinity Segment Delaunay Graphs" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSDGLinfSummary" +, year = 2015 +} + +@incollection{cgal:ky-ag2-15b +, author = "Menelaos Karavelas and Mariette Yvinec" +, title = "{2D} Apollonius Graphs (Delaunay Graphs of Disks)" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgApolloniusGraph2Summary" +, year = 2015 +} + +@incollection{cgal:k-vda2-15b +, author = "Menelaos Karavelas" +, title = "{2D} Voronoi Diagram Adaptor" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgVoronoiDiagramAdaptor2Summary" +, year = 2015 +} + +@incollection{cgal:r-ctm2-15b +, author = "Laurent Rineau" +, title = "{2D} Conforming Triangulations and Meshes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMesh2Summary" +, year = 2015 +} + +@incollection{cgal:ry-smg-15b +, author = "Laurent Rineau and Mariette Yvinec" +, title = "{3D} Surface Mesh Generation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMesher3Summary" +, year = 2015 +} + +@incollection{cgal:asg-srps-15b +, author = "Pierre Alliez and Laurent Saboret and Ga{\"e}l Guennebaud" +, title = "Surface Reconstruction from Point Sets" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceReconstructionFromPointSetsSummary" +, year = 2015 +} + +@incollection{cgal:ssr3-15b +, author = "Thijs van Lankveld" +, title = "Scale-Space Surface Reconstruction" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgScaleSpaceReconstruction3Summary" +, year = 2015 +} + +@incollection{cgal:dc-afsr-15b +, author = "Tran Kai Frank Da and David Cohen-Steiner" +, title = "Advancing Front Surface Reconstruction" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAdvancingFrontSurfaceReconstructionSummary" +, year = 2015 +} + +@incollection{cgal:k-ssm3-15b +, author = "Nico Kruithof" +, title = "{3D} Skin Surface Meshing" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSkinSurface3Summary" +, year = 2015 +} + +@incollection{cgal:rty-m3-15b +, author = "Pierre Alliez and Cl{\'e}ment Jamin and Laurent Rineau and St{\'e}phane Tayeb and Jane Tournois and Mariette Yvinec" +, title = "{3D} Mesh Generation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMesh_3Summary" +, year = 2015 +} + +@incollection{cgal:lty-pmp-15b +, author = "S{\'e}bastien Loriot and Jane Tournois and Ilker O. Yaz" +, title = "Polygon Mesh Processing" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPolygonMeshProcessingSummary" +, year = 2015 +} + +@incollection{cgal:s-ssm2-15b +, author = "Le-Jeng Andy Shiue" +, title = "{3D} Surface Subdivision Methods" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceSubdivisionMethods3Summary" +, year = 2015 +} + +@incollection{cgal:y-smsimpl-15b +, author = "Ilker O. Yaz and S{\'e}bastien Loriot" +, title = "Triangulated Surface Mesh Segmentation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceSegmentationSummary" +, year = 2015 +} + +@incollection{cgal:c-tsms-12-15b +, author = "Fernando Cacciola" +, title = "Triangulated Surface Mesh Simplification" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMeshSimplificationSummary" +, year = 2015 +} + +@incollection{cgal:lsxy-tsmd-15b +, author = "S{\'e}bastien Loriot and Olga Sorkine-Hornung and Yin Xu and Ilker O. Yaz" +, title = "Triangulated Surface Mesh Deformation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceModelingSummary" +, year = 2015 +} + +@incollection{cgal:sal-pptsm2-15b +, author = "Laurent Saboret and Pierre Alliez and Bruno L{\'e}vy" +, title = "Planar Parameterization of Triangulated Surface Meshes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceParameterizationSummary" +, year = 2015 +} + +@incollection{cgal:klcdv-tsmsp-15b +, author = "Stephen Kiazyk and S{\'e}bastien Loriot and {\'E}ric Colin de Verdi{\`e}re" +, title = "Triangulated Surface Mesh Shortest Paths" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSurfaceMeshShortestPathSummary" +, year = 2015 +} + +@incollection{cgal:glt-tsms-15b +, author = "Xiang Gao and S{\'e}bastien Loriot and Andrea Tagliasacchi" +, title = "Triangulated Surface Mesh Skeletonization" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgMeanCurvatureSkeleton3Summary" +, year = 2015 +} + +@incollection{cgal:cp-arutsm-15b +, author = "Marc Pouget and Fr{\'e}d{\'e}ric Cazals" +, title = "Approximation of Ridges and Umbilics on Triangulated Surface Meshes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgRidges_3Summary" +, year = 2015 +} + +@incollection{cgal:pc-eldp-15b +, author = "Marc Pouget and Fr{\'e}d{\'e}ric Cazals" +, title = "Estimation of Local Differential Properties of Point-Sampled Surfaces" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgJet_fitting_3Summary" +, year = 2015 +} + +@incollection{cgal:ass-psp-15b +, author = "Pierre Alliez and Cl{\'e}ment Jamin and Quentin M{\'e}rigot and Jocelyn Meyron and Laurent Saboret and Nader Salman and Shihao Wu" +, title = "Point Set Processing" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPointSetProcessingSummary" +, year = 2015 +} + +@incollection{cgal:ovja-pssd-15b +, author = "Sven Oesau and Yannick Verdie and Cl{\'e}ment Jamin and Pierre Alliez" +, title = "Point Set Shape Detection" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPointSetShapeDetection3Summary" +, year = 2015 +} + +@incollection{cgal:m-ps-15b +, author = "Abdelkrim Mebarki" +, title = "{2D} Placement of Streamlines" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPlacementOfStreamlines2Summary" +, year = 2015 +} + +@incollection{cgal:b-ss2-15b +, author = "Matthias B{\"a}sken" +, title = "{2D} Range and Neighbor Search" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPointSet2Summary" +, year = 2015 +} + +@incollection{cgal:f-isl-15b +, author = "Andreas Fabri" +, title = "Interval Skip List" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgIntervalSkipListSummary" +, year = 2015 +} + +@incollection{cgal:tf-ssd-15b +, author = "Hans Tangelder and Andreas Fabri" +, title = "{dD} Spatial Searching" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSpatialSearchingDSummary" +, year = 2015 +} + +@incollection{cgal:n-rstd-15b +, author = "Gabriele Neyer" +, title = "{dD} Range and Segment Trees" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgRangeSegmentTreesDSummary" +, year = 2015 +} + +@incollection{cgal:kmz-isiobd-15b +, author = "Lutz Kettner and Andreas Meyer and Afra Zomorodian" +, title = "Intersecting Sequences of {dD} Iso-oriented Boxes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBoxIntersectionDSummary" +, year = 2015 +} + +@incollection{cgal:atw-aabb-15b +, author = "Pierre Alliez and St{\'e}phane Tayeb and Camille Wormser" +, title = "{3D} Fast Intersection and Distance Computation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgAABB_treeSummary" +, year = 2015 +} + +@incollection{cgal:dd-ss-15b +, author = "Christophe Delage and Olivier Devillers" +, title = "Spatial Sorting" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgSpatialSortingSummary" +, year = 2015 +} + +@incollection{cgal:fghhs-bv-15b +, author = "Kaspar Fischer and Bernd G{\"a}rtner and Thomas Herrmann and Michael Hoffmann and Sven Sch{\"o}nherr" +, title = "Bounding Volumes" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBoundingVolumesSummary" +, year = 2015 +} + +@incollection{cgal:hp-ia-15b +, author = "Michael Hoffmann and Eli Packer" +, title = "Inscribed Areas" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgInscribedAreasSummary" +, year = 2015 +} + +@incollection{cgal:fghhs-od-15b +, author = "Kaspar Fischer and Bernd G{\"a}rtner and Thomas Herrmann and Michael Hoffmann and Sven Sch{\"o}nherr" +, title = "Optimal Distances" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgOptimalDistancesSummary" +, year = 2015 +} + +@incollection{cgal:ap-pcad-15b +, author = "Pierre Alliez and Sylvain Pion and Ankit Gupta" +, title = "Principal Component Analysis" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgPrincipalComponentAnalysisDSummary" +, year = 2015 +} + +@incollection{cgal:f-i-15b +, author = "Julia Fl{\"o}totto" +, title = "{2D} and Surface Function Interpolation" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgInterpolation2Summary" +, year = 2015 +} + +@incollection{cgal:abha-gbc-15b +, author = "Dmitry Anisimov and David Bommes and Kai Hormann and Pierre Alliez" +, title = "{2D} Generalized Barycentric Coordinates" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBarycentric_coordinates_2Summary" +, year = 2015 +} + +@incollection{cgal:r-kds-15b +, author = "Daniel Russel" +, title = "Kinetic Data Structures" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKdsSummary" +, year = 2015 +} + +@incollection{cgal:r-kdsf-15b +, author = "Daniel Russel" +, title = "Kinetic Framework" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgKdsFrameworkSummary" +, year = 2015 +} + +@incollection{cgal:hkpw-se-15b +, author = "Michael Hoffmann and Lutz Kettner and Sylvain Pion and Ron Wein" +, title = "STL Extensions for {CGAL}" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgStlExtensionSummary" +, year = 2015 +} + +@incollection{cgal:cfw-cbgl-15b +, author = "Andreas Fabri and Fernando Cacciola and Philipp Moeller and Ron Wein" +, title = "{CGAL} and the {Boost} Graph Library" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgBGLSummary" +, year = 2015 +} + +@incollection{cgal:fs-cbpm-15b +, author = "Andreas Fabri and Laurent Saboret" +, title = "{CGAL} and {Boost} Property Maps" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgProperty_mapSummary" +, year = 2015 +} + +@incollection{cgal:dksy-hc-15b +, author = "Olivier Devillers and Lutz Kettner and Sylvain Pion and Michael Seel and Mariette Yvinec" +, title = "Handles and Circulators" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgHandlesAndCirculatorsSummary" +, year = 2015 +} + +@incollection{cgal:dhhk-gog-15b +, author = "Pedro M. M. de Castro and Olivier Devillers and Susan Hert and Michael Hoffmann and Lutz Kettner and Sven Sch{\"o}nherr and Alexandru Tifrea" +, title = "Geometric Object Generators" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgGeneratorsSummary" +, year = 2015 +} + +@incollection{cgal:kps-pthum-15b +, author = "Lutz Kettner and Sylvain Pion and Michael Seel" +, title = "Profiling tools, Hash Map, Union-find, Modifiers" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgProfilingToolsSummary" +, year = 2015 +} + +@incollection{cgal:fgk-ios-12-15b +, author = "Andreas Fabri and Geert-Jan Giezeman and Lutz Kettner" +, title = "IO Streams" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgIOstreamsSummary" +, year = 2015 +} + +@incollection{cgal:fp-gv-15b +, author = "Andreas Fabri and Sylvain Pion" +, title = "Geomview" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgGeomviewSummary" +, year = 2015 +} + +@incollection{cgal:fr-cqgvf-15b +, author = "Andreas Fabri and Laurent Rineau" +, title = "{CGAL} and the {Qt} Graphics View Framework" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgGraphicsViewSummary" +, year = 2015 +} + +@incollection{cgal:lp-gi-15b +, author = "Olivier Devillers and S{\'e}bastien Loriot and Sylvain Pion" +, title = "{CGAL} Ipelets" +, publisher = "{CGAL Editorial Board}" +, edition = "{4.7}" +, booktitle = "{CGAL} User and Reference Manual" +, url = "http://doc.cgal.org/4.7/Manual/packages.html#PkgCGALIpeletsSummary" +, year = 2015 +} diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index d7c13da0..fa24e1b1 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -18,8 +18,6 @@ if(CGAL_FOUND) add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp ) target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) - add_executable ( cerr cerr.cpp ) - # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/GudhUI/alpha_complex_persistence.cpp b/src/GudhUI/alpha_complex_persistence.cpp deleted file mode 100644 index 4f85459a..00000000 --- a/src/GudhUI/alpha_complex_persistence.cpp +++ /dev/null @@ -1,78 +0,0 @@ -#include -#include - - -#include - -// to construct a Delaunay_triangulation from a OFF file -#include -#include -#include - -#include "utils/Bar_code_persistence.h" - -void usage(char * const progName) { - std::cerr << "Usage: " << progName << " filename.off " << // alpha_square_max_value[double] " << - "coeff_field_characteristic[integer > 0] min_persistence[double >= -1.0]" << std::endl; - std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0 2 0.02" << std::endl; - exit(-1); // ----- >> -} - -int main(int argc, char **argv) { - if (argc != 4) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; - usage(argv[0]); - } - - QApplication qtapp(argc, argv); - - std::string off_file_name(argv[1]); - // double alpha_square_max_value = atof(argv[2]); - double alpha_square_max_value = 1e20; - int coeff_field_characteristic = atoi(argv[2]); // argv[3] - double min_persistence = atof(argv[3]); // argv[4] - - // ---------------------------------------------------------------------------- - // Init of an alpha complex from an OFF file - // ---------------------------------------------------------------------------- - typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; - Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name, alpha_square_max_value); - - // ---------------------------------------------------------------------------- - // Display information about the alpha complex - // ---------------------------------------------------------------------------- - std::cout << "Alpha complex is of dimension " << alpha_complex_from_file.dimension() << - " - " << alpha_complex_from_file.num_simplices() << " simplices - " << - alpha_complex_from_file.num_vertices() << " vertices." << std::endl; - - // Sort the simplices in the order of the filtration - alpha_complex_from_file.initialize_filtration(); - - std::cout << "Simplex_tree dim: " << alpha_complex_from_file.dimension() << std::endl; - // Compute the persistence diagram of the complex - Gudhi::persistent_cohomology::Persistent_cohomology< Gudhi::alphacomplex::Alpha_complex, - Gudhi::persistent_cohomology::Field_Zp > pcoh(alpha_complex_from_file); - - std::cout << "coeff_field_characteristic " << coeff_field_characteristic << - " - min_persistence " << min_persistence << std::endl; - - // initializes the coefficient field for homology - pcoh.init_coefficients(coeff_field_characteristic); - - pcoh.compute_persistent_cohomology(min_persistence); - - pcoh.output_diagram(); - - std::vector> persistence_vector; - pcoh.get_persistence(persistence_vector); - - Bar_code_persistence bc_persistence; - - for (auto persistence : persistence_vector) { - bc_persistence.insert(persistence.first, persistence.second); - } - - bc_persistence.show(); - - return qtapp.exec(); -} diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h index a1a46ea8..a4cd8156 100644 --- a/src/GudhUI/utils/Bar_code_persistence.h +++ b/src/GudhUI/utils/Bar_code_persistence.h @@ -39,7 +39,7 @@ class Bar_code_persistence { max_death = death; } - void show() { + void show(const std::string& window_title) { // Create a view, put a scene in it QGraphicsView * view = new QGraphicsView(); QGraphicsScene * scene = new QGraphicsScene(); @@ -78,6 +78,7 @@ class Bar_code_persistence { QGraphicsTextItem* dimText = scene->addText(scale_value, QFont("Helvetica", 8)); dimText->setPos(scale - (3.0 * scale_value.size()), height + 9.0 * (modulo % 2)); } + view->setWindowTitle(window_title.c_str()); // Show the view view->show(); } diff --git a/src/GudhUI/utils/Persistence_compute.h b/src/GudhUI/utils/Persistence_compute.h index 0b9961d3..1f04cc6b 100644 --- a/src/GudhUI/utils/Persistence_compute.h +++ b/src/GudhUI/utils/Persistence_compute.h @@ -46,10 +46,6 @@ struct Persistence_params { * Show persistence into output stream */ template class Persistence_compute { - private: - SkBlComplex& complex_; - std::ostream& stream_; - public: typedef typename SkBlComplex::Vertex_handle Vertex_handle; typedef typename SkBlComplex::Edge_handle Edge_handle; @@ -61,9 +57,7 @@ template class Persistence_compute { * double threshold * int p for coefficient Z_p */ - Persistence_compute(SkBlComplex& complex, std::ostream& stream, const Persistence_params& params) : - // double threshold = 0.5,unsigned dim_max = 8): - complex_(complex), stream_(stream) { + Persistence_compute(SkBlComplex& complex, std::ostream& stream, const Persistence_params& params) { // for now everything is copied, todo boost adapt iterators to points of SkBlComplex instead of copying to an // initial vector typedef std::vector Point_t; @@ -87,10 +81,11 @@ template class Persistence_compute { pcoh.init_coefficients(params.p); // put params.min_pers pcoh.compute_persistent_cohomology(params.min_pers); - stream_ << "persistence: \n"; - stream_ << "p dimension birth death: \n"; + stream << "persistence: \n"; + stream << "p dimension birth death: \n"; - pcoh.output_diagram(stream_); + pcoh.output_diagram(stream); + } }; diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index eb4ee3e3..9e96adc0 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -22,63 +22,61 @@ add_executable(persistence_from_file persistence_from_file.cpp) target_link_libraries(persistence_from_file ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 2 -m 0) add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) - -if(GMPXX_FOUND AND GMP_FOUND) - message("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") - message("GMP_LIBRARIES = ${GMP_LIBRARIES}") - - add_executable(rips_multifield_persistence rips_multifield_persistence.cpp ) - target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) - add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 71 -m 100) - - add_executable ( performance_rips_persistence performance_rips_persistence.cpp ) - target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) - - if(CGAL_FOUND) - add_executable(alpha_shapes_persistence alpha_shapes_persistence.cpp) - target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) - add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5) - #add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) - - - - if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - include( ${CGAL_USE_FILE} ) - # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten. - # cf. http://doc.cgal.org/latest/Manual/installation.html#title40 - # A workaround is to add "-std=c++11" again. - # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html - # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html - # but it implies to use cmake version 3.1 at least. - if(NOT MSVC) - include(CheckCXXCompilerFlag) - CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11) - if(COMPILER_SUPPORTS_CXX11) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") +if(GMPXX_FOUND AND GMP_FOUND) + message("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") + message("GMP_LIBRARIES = ${GMP_LIBRARIES}") + + add_executable(rips_multifield_persistence rips_multifield_persistence.cpp ) + target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) + add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 71 -m 100) + + add_executable ( performance_rips_persistence performance_rips_persistence.cpp ) + target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) + + if(CGAL_FOUND) + add_executable(alpha_shapes_persistence alpha_shapes_persistence.cpp) + target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5) + #add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) + + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + include( ${CGAL_USE_FILE} ) + # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten. + # cf. http://doc.cgal.org/latest/Manual/installation.html#title40 + # A workaround is to add "-std=c++11" again. + # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html + # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html + # but it implies to use cmake version 3.1 at least. + if(NOT MSVC) + include(CheckCXXCompilerFlag) + CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11) + if(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + endif() endif() - endif() - # - End of workaround + # - End of workaround - find_package(Eigen3 3.1.0) - if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) - add_executable (alphacomplexpersistence alpha_complex_persistence.cpp) - target_link_libraries(alphacomplexpersistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_executable (alpha_complex_persistence alpha_complex_persistence.cpp) + target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + endif() else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") - endif() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + endif () else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") - endif () - - - - endif() + # message(WARNING "CGAL not found.") + endif() +else() + # message(WARNING "GMP not found.") endif() diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp index fbadf673..0dabdeac 100644 --- a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp +++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp @@ -1,34 +1,35 @@ #include #include +#include + // to construct a Delaunay_triangulation from a OFF file #include #include #include -void usage(char * const progName) { - std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value[double] " << - "coeff_field_characteristic[integer > 0] min_persistence[double >= -1.0]" << std::endl; - std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0 2 0.02" << std::endl; - exit(-1); // ----- >> -} +void program_options(int argc, char * argv[] + , std::string & off_file_points + , std::string & output_file_diag + , Filtration_value & alpha_square_max_value + , int & coeff_field_characteristic + , Filtration_value & min_persistence); int main(int argc, char **argv) { - if (argc != 5) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; - usage(argv[0]); - } + std::string off_file_points; + std::string output_file_diag; + Filtration_value alpha_square_max_value; + int coeff_field_characteristic; + Filtration_value min_persistence; + + program_options(argc, argv, off_file_points, output_file_diag, alpha_square_max_value, coeff_field_characteristic, min_persistence); - std::string off_file_name(argv[1]); - double alpha_square_max_value = atof(argv[2]); - int coeff_field_characteristic = atoi(argv[3]); - double min_persistence = atof(argv[4]); // ---------------------------------------------------------------------------- // Init of an alpha complex from an OFF file // ---------------------------------------------------------------------------- typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; - Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_name, alpha_square_max_value); + Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_points, alpha_square_max_value); // ---------------------------------------------------------------------------- // Display information about the alpha complex @@ -49,7 +50,66 @@ int main(int argc, char **argv) { pcoh.compute_persistent_cohomology(min_persistence); - pcoh.output_diagram(); + // Output the diagram in filediag + if (output_file_diag.empty()) { + pcoh.output_diagram(); + } else { + std::cout << "Result in file: " << output_file_diag << std::endl; + std::ofstream out(output_file_diag); + pcoh.output_diagram(out); + out.close(); + } return 0; } + +void program_options(int argc, char * argv[] + , std::string & off_file_points + , std::string & output_file_diag + , Filtration_value & alpha_square_max_value + , int & coeff_field_characteristic + , Filtration_value & min_persistence) { + namespace po = boost::program_options; + po::options_description hidden("Hidden options"); + hidden.add_options() + ("input-file", po::value(&off_file_points), + "Name of file containing a point set. Format is one point per line: X1 ... Xd "); + + po::options_description visible("Allowed options", 100); + visible.add_options() + ("help,h", "produce help message") + ("output-file,o", po::value(&output_file_diag)->default_value(std::string()), + "Name of file in which the persistence diagram is written. Default print in std::cout") + ("max-alpha-square-value,r", po::value(&alpha_square_max_value)->default_value(std::numeric_limits::infinity()), + "Maximal alpha square value for the Alpha complex construction.") + ("field-charac,p", po::value(&coeff_field_characteristic)->default_value(11), + "Characteristic p of the coefficient field Z/pZ for computing homology.") + ("min-persistence,m", po::value(&min_persistence), + "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals"); + + po::positional_options_description pos; + pos.add("input-file", 1); + + po::options_description all; + all.add(visible).add(hidden); + + po::variables_map vm; + po::store(po::command_line_parser(argc, argv). + options(all).positional(pos).run(), vm); + po::notify(vm); + + if (vm.count("help") || !vm.count("input-file")) { + std::cout << std::endl; + std::cout << "Compute the persistent homology with coefficient field Z/pZ \n"; + std::cout << "of an Alpha complex defined on a set of input points.\n \n"; + std::cout << "The output diagram contains one bar per line, written with the convention: \n"; + std::cout << " p dim b d \n"; + std::cout << "where dim is the dimension of the homological feature,\n"; + std::cout << "b and d are respectively the birth and death of the feature and \n"; + std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl; + + std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl; + std::cout << visible << std::endl; + std::abort(); + } +} diff --git a/src/Persistent_cohomology/example/rips_persistence.cpp b/src/Persistent_cohomology/example/rips_persistence.cpp index 9b1ef42f..fa0449a8 100644 --- a/src/Persistent_cohomology/example/rips_persistence.cpp +++ b/src/Persistent_cohomology/example/rips_persistence.cpp @@ -30,6 +30,7 @@ #include #include +#include // infinity using namespace Gudhi; using namespace Gudhi::persistent_cohomology; @@ -114,7 +115,7 @@ void program_options(int argc, char * argv[] ("help,h", "produce help message") ("output-file,o", po::value(&filediag)->default_value(std::string()), "Name of file in which the persistence diagram is written. Default print in std::cout") - ("max-edge-length,r", po::value(&threshold)->default_value(0), + ("max-edge-length,r", po::value(&threshold)->default_value(std::numeric_limits::infinity()), "Maximal length of an edge for the Rips complex construction.") ("cpx-dimension,d", po::value(&dim_max)->default_value(1), "Maximal dimension of the Rips complex we want to compute.") diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 4b04e75a..d4f9aeae 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -1160,43 +1160,28 @@ std::cout << "prune_above_filtration - filtration=" << filtration << std::endl; threshold_ = filtration; // Initialize filtration_vect_ if required if (filtration_vect_.empty()) { -std::cout << "prune_above_filtration - initialize_filtration" << std::endl; initialize_filtration(); } - -std::cout << "prune_above_filtration - after initialize_filtration "; -for(auto sh : filtration_vect_) { -for (auto vertex : simplex_vertex_range(sh)) { -std::cout << (int) vertex << ", "; -} -std::cout << " - filtration=" << sh->second.filtration() << " - " << &(sh->second) << std::endl; -} - + std::vector> simplex_list_to_removed; // Loop in reverse mode until threshold is reached - auto f_simplex = filtration_vect_.rbegin(); - for (; (f_simplex != filtration_vect_.rend()) && ((*f_simplex)->second.filtration() > threshold_); f_simplex++) { - -std::cout << "prune_above_filtration - remove "; -for (auto vertex : simplex_vertex_range(*f_simplex)) { -std::cout << (int) vertex << ", "; -} -std::cout << " - " << &((*f_simplex)->second) << std::endl; - - remove_maximal_simplex(*f_simplex); + // Do not erase while looping, because removing is shifting data in a flat_map + for (auto f_simplex = filtration_vect_.rbegin(); + (f_simplex != filtration_vect_.rend()) && ((*f_simplex)->second.filtration() > threshold_); + f_simplex++) { + std::vector simplex_to_remove; + for (auto vertex : simplex_vertex_range(*f_simplex)) + simplex_to_remove.insert(simplex_to_remove.begin(), vertex); + simplex_list_to_removed.push_back(simplex_to_remove); } -std::cout << "prune_above_filtration - remove STOPPED ON "; -for (auto vertex : simplex_vertex_range(*f_simplex)) { -std::cout << (int) vertex << ", "; -} -std::cout << " - filtration=" << (*f_simplex)->second.filtration() << " - " << &(*f_simplex->second) << std::endl; - if (f_simplex != filtration_vect_.rbegin()) { - // Do not forget to update filtration_vect_ - resize is enough - std::size_t new_size = filtration_vect_.size() - (f_simplex - filtration_vect_.rbegin()); -std::cout << "prune_above_filtration - resize" << new_size << std::endl; - filtration_vect_.resize(new_size); + for (auto simplex_to_remove : simplex_list_to_removed) { + Simplex_handle sh = find_simplex(simplex_to_remove); + if (sh != null_simplex()) + remove_maximal_simplex(sh); } - + // Re-initialize filtration_vect_ if dta were removed, because removing is shifting data in a flat_map + if (simplex_list_to_removed.size() > 0) + initialize_filtration(); } } } @@ -1205,6 +1190,7 @@ std::cout << "prune_above_filtration - resize" << new_size << std::endl; * @param[in] sh Simplex handle on the maximal simplex to remove. * \pre Please check the simplex has no coface before removing it. * \warning In debug mode, the exception std::invalid_argument is thrown if sh has children. + * \warning Be aware that removing is shifting data in a flat_map (initialize_filtration to be done). */ void remove_maximal_simplex(Simplex_handle sh) { // Guarantee the simplex has no children @@ -1213,49 +1199,18 @@ std::cout << "prune_above_filtration - resize" << new_size << std::endl; // Simplex is a leaf, it means the child is the Siblings owning the leaf Siblings* child = sh->second.children(); + if ((child->size() > 1) || (child == root())) { // Not alone, just remove it from members // Special case when child is the root of the simplex tree, just remove it from members -std::cout << "remove_maximal_simplex - members removal" << std::endl; child->erase(sh->first); } else { // Sibling is emptied : must be deleted, and its parent must point on his own Sibling -std::cout << "remove_maximal_simplex - members is empty" << std::endl; child->oncles()->members().at(child->parent()).assign_children(child->oncles()); delete child; } } -/***************************************************************************************************************/ - public: - /** \brief Prints the simplex_tree hierarchically. - * Since it prints the vertices recursively, one can watch its tree shape. - */ - void print_tree() { - for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh) { - std::cout << sh->first << " "; - if (has_children(sh)) { - std::cout << "("; - rec_print(sh->second.children()); - std::cout << ")"; - } - std::cout << std::endl; - } - } - - /** \brief Recursively prints the simplex_tree, using depth first search. */ - private: - void rec_print(Siblings * sib) { - for (auto sh = sib->members().begin(); sh != sib->members().end(); ++sh) { - std::cout << " " << sh->first << " "; - if (has_children(sh)) { - std::cout << "("; - rec_print(sh->second.children()); - std::cout << ")"; - } - } - } -/*****************************************************************************************************************/ private: Vertex_handle null_vertex_; /** \brief Upper bound on the filtration values of the simplices.*/ diff --git a/src/Simplex_tree/test/simplex_tree_unit_test.cpp b/src/Simplex_tree/test/simplex_tree_unit_test.cpp index f6bd5411..0d73d347 100644 --- a/src/Simplex_tree/test/simplex_tree_unit_test.cpp +++ b/src/Simplex_tree/test/simplex_tree_unit_test.cpp @@ -351,7 +351,7 @@ BOOST_AUTO_TEST_CASE(simplex_tree_insertion) { // Display the Simplex_tree - Can not be done in the middle of 2 inserts std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl; std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; - std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; + std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; for (auto f_simplex : st.filtration_simplex_range()) { std::cout << " " << "[" << st.filtration(f_simplex) << "] "; for (auto vertex : st.simplex_vertex_range(f_simplex)) { @@ -549,7 +549,7 @@ BOOST_AUTO_TEST_CASE(NSimplexAndSubfaces_tree_insertion) { // Display the Simplex_tree - Can not be done in the middle of 2 inserts std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl; std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; - std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; + std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; for (auto f_simplex : st.filtration_simplex_range()) { std::cout << " " << "[" << st.filtration(f_simplex) << "] "; for (auto vertex : st.simplex_vertex_range(f_simplex)) { @@ -805,7 +805,7 @@ struct MyOptions : Simplex_tree_options_full_featured { }; typedef Simplex_tree miniST; -/*BOOST_AUTO_TEST_CASE(remove_maximal_simplex) { +BOOST_AUTO_TEST_CASE(remove_maximal_simplex) { std::cout << "********************************************************************" << std::endl; std::cout << "REMOVE MAXIMAL SIMPLEX" << std::endl; @@ -887,7 +887,7 @@ typedef Simplex_tree miniST; st.remove_maximal_simplex(st.find({7})); BOOST_CHECK(st == st_wo_seven); -}*/ +} BOOST_AUTO_TEST_CASE(prune_above_filtration) { std::cout << "********************************************************************" << std::endl; @@ -939,10 +939,10 @@ BOOST_AUTO_TEST_CASE(prune_above_filtration) { st.prune_above_filtration(5.0); BOOST_CHECK(st == st_complete); - // Display the Simplex_tree - Can not be done in the middle of 2 inserts + // Display the Simplex_tree std::cout << "The complex contains " << st.num_simplices() << " simplices" << std::endl; std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; - std::cout << std::endl << std::endl << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; + std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; for (auto f_simplex : st.filtration_simplex_range()) { std::cout << " " << "[" << st.filtration(f_simplex) << "] "; for (auto vertex : st.simplex_vertex_range(f_simplex)) { @@ -955,35 +955,45 @@ BOOST_AUTO_TEST_CASE(prune_above_filtration) { // Set the st_pruned filtration for operator== st_pruned.set_filtration(2.5); st.prune_above_filtration(2.5); - /*BOOST_CHECK(st == st_pruned); + BOOST_CHECK(st == st_pruned); + + // Display the Simplex_tree + std::cout << "The complex pruned at 2.5 contains " << st.num_simplices() << " simplices" << std::endl; + std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; + std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; + for (auto f_simplex : st.filtration_simplex_range()) { + std::cout << " " << "[" << st.filtration(f_simplex) << "] "; + for (auto vertex : st.simplex_vertex_range(f_simplex)) { + std::cout << (int) vertex << " "; + } + std::cout << std::endl; + } st_pruned.set_filtration(2.0); st.prune_above_filtration(2.0); BOOST_CHECK(st == st_pruned); -*/ -/* std::cout << "The complex contains " << st.num_simplices() << " simplices --------------------------" << std::endl; - std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; - st.print_tree(); - std::cout << "The pruned complex contains " << st_pruned.num_simplices() << " simplices --------------------------" << std::endl; - std::cout << " - dimension " << st_pruned.dimension() << " - filtration " << st_pruned.filtration() << std::endl; - st_pruned.print_tree(); - typeST st_empty; // FIXME st_empty.set_dimension(3); st.prune_above_filtration(0.0); - */ - /*BOOST_CHECK(st == st_empty); + + // Display the Simplex_tree + std::cout << "The complex pruned at 0.0 contains " << st.num_simplices() << " simplices" << std::endl; + std::cout << " - dimension " << st.dimension() << " - filtration " << st.filtration() << std::endl; + std::cout << "Iterator on Simplices in the filtration, with [filtration value]:" << std::endl; + for (auto f_simplex : st.filtration_simplex_range()) { + std::cout << " " << "[" << st.filtration(f_simplex) << "] "; + for (auto vertex : st.simplex_vertex_range(f_simplex)) { + std::cout << (int) vertex << " "; + } + std::cout << std::endl; + } + + BOOST_CHECK(st == st_empty); // Test case to the limit st.prune_above_filtration(-1.0); st_empty.set_filtration(-1.0); BOOST_CHECK(st == st_empty); -*/ } - -/*BOOST_AUTO_TEST_CASE(sanitizer) { - int a[2] = {1, 0}; - int b=a[2]; -}*/ diff --git a/src/common/include/gudhi/distance_functions.h b/src/common/include/gudhi/distance_functions.h index e5c79ded..cd518581 100644 --- a/src/common/include/gudhi/distance_functions.h +++ b/src/common/include/gudhi/distance_functions.h @@ -23,6 +23,8 @@ #ifndef DISTANCE_FUNCTIONS_H_ #define DISTANCE_FUNCTIONS_H_ +#include // for std::sqrt + /* Compute the Euclidean distance between two Points given * by a range of coordinates. The points are assumed to have * the same dimension. */ @@ -35,7 +37,7 @@ double euclidean_distance(Point &p1, Point &p2) { double tmp = *it1 - *it2; dist += tmp*tmp; } - return sqrt(dist); + return std::sqrt(dist); } #endif // DISTANCE_FUNCTIONS_H_ diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h index e05714c7..da2c2c36 100644 --- a/src/common/include/gudhi/reader_utils.h +++ b/src/common/include/gudhi/reader_utils.h @@ -58,7 +58,9 @@ inline void read_points(std::string file_name, std::vector< std::vector< double while (iss >> x) { point.push_back(x); } - points.push_back(point); + // Check for empty lines + if (!point.empty()) + points.push_back(point); } in_file.close(); } -- cgit v1.2.3 From 1967dc923b9bb24edd52a848b7991539779dbe8b Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 9 Mar 2016 07:01:55 +0000 Subject: Add header and footer to generated documentation in order to fit with the web site. Add a package overview on top page. Remove text that was redundant with the web site. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1035 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 95d6c0e2f0ed8389620c27c03e37d441f93787ee --- CMakeGUDHIVersion.txt | 2 +- .../doc/sphere_contraction_representation.png | Bin 0 -> 44839 bytes src/Contraction/include/gudhi/Edge_contraction.h | 2 +- src/Doxyfile | 26 +- .../include/gudhi/Persistent_cohomology.h | 9 - .../doc/Simplex_tree_representation.png | Bin 0 -> 39217 bytes src/Simplex_tree/include/gudhi/Simplex_tree.h | 1 + src/common/doc/footer.html | 29 + src/common/doc/header.html | 82 ++ src/common/doc/main_page.h | 109 +- src/common/doc/stylesheet.css | 1367 ++++++++++++++++++++ 11 files changed, 1580 insertions(+), 47 deletions(-) create mode 100644 src/Contraction/doc/sphere_contraction_representation.png create mode 100644 src/Simplex_tree/doc/Simplex_tree_representation.png create mode 100644 src/common/doc/footer.html create mode 100644 src/common/doc/header.html create mode 100644 src/common/doc/stylesheet.css (limited to 'src/common') diff --git a/CMakeGUDHIVersion.txt b/CMakeGUDHIVersion.txt index 19d10535..20767813 100644 --- a/CMakeGUDHIVersion.txt +++ b/CMakeGUDHIVersion.txt @@ -1,5 +1,5 @@ set (GUDHI_MAJOR_VERSION 1) -set (GUDHI_MINOR_VERSION 2) +set (GUDHI_MINOR_VERSION 3) set (GUDHI_PATCH_VERSION 0) set(GUDHI_VERSION ${GUDHI_MAJOR_VERSION}.${GUDHI_MINOR_VERSION}.${GUDHI_PATCH_VERSION}) diff --git a/src/Contraction/doc/sphere_contraction_representation.png b/src/Contraction/doc/sphere_contraction_representation.png new file mode 100644 index 00000000..edf37bf3 Binary files /dev/null and b/src/Contraction/doc/sphere_contraction_representation.png differ diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index ee3e3de1..73236db9 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -46,7 +46,7 @@ namespace contraction { The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, the -size of this data-structure is even uch lower than the total number of simplices. +size of this data-structure is even much lower than the total number of simplices. The edge contraction operation consists in identifying two vertices of a simplicial complex. A lot of algorithms have been developed in computer graphics that allows to reduce efficiently the size of 2-dimensional simplicial complexes diff --git a/src/Doxyfile b/src/Doxyfile index faa0d3fe..25f4f1ac 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -32,19 +32,19 @@ DOXYFILE_ENCODING = UTF-8 # title of most generated pages and in a few other places. # The default value is: My Project. -PROJECT_NAME = "Gudhi" +PROJECT_NAME = "GUDHI" # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = "1.2.0" +PROJECT_NUMBER = "1.3.0" # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. -PROJECT_BRIEF = +PROJECT_BRIEF = "C++ library for Topological Data Analysis (TDA) and Higher Dimensional Geometry Understanding." # With the PROJECT_LOGO tag one can specify an logo or icon that is included in # the documentation. The maximum height of the logo should not exceed 55 pixels @@ -580,27 +580,27 @@ STRICT_PROTO_MATCHING = NO # documentation. # The default value is: YES. -GENERATE_TODOLIST = YES +GENERATE_TODOLIST = NO # The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the # test list. This list is created by putting \test commands in the # documentation. # The default value is: YES. -GENERATE_TESTLIST = YES +GENERATE_TESTLIST = NO # The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. -GENERATE_BUGLIST = YES +GENERATE_BUGLIST = NO # The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. -GENERATE_DEPRECATEDLIST= YES +GENERATE_DEPRECATEDLIST= NO # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond @@ -834,7 +834,9 @@ EXAMPLE_RECURSIVE = NO IMAGE_PATH = doc/Skeleton_blocker/ \ doc/common/ \ - doc/Contraction/ + doc/Contraction/ \ + doc/Simplex_tree/ \ + doc/Persistent_cohomology/ \ # The INPUT_FILTER tag can be used to specify a program that doxygen should @@ -1039,7 +1041,7 @@ HTML_FILE_EXTENSION = .html # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_HEADER = +HTML_HEADER = doc/common/header.html # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard @@ -1049,7 +1051,7 @@ HTML_HEADER = # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_FOOTER = +HTML_FOOTER = doc/common/footer.html # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of @@ -1061,7 +1063,7 @@ HTML_FOOTER = # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. -HTML_STYLESHEET = +HTML_STYLESHEET = doc/common/stylesheet.css # The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- # defined cascading style sheet that is included after the standard style sheets @@ -1544,7 +1546,7 @@ EXTRA_SEARCH_MAPPINGS = # If the GENERATE_LATEX tag is set to YES doxygen will generate LaTeX output. # The default value is: YES. -GENERATE_LATEX = YES +GENERATE_LATEX = NO # The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index 643b810c..3c331f0f 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -63,15 +63,6 @@ namespace persistent_cohomology { composed of three elements: topological spaces, their homology groups and an evolution scheme. - The theory of homology consists in attaching to a topological space a sequence of - (homology) groups, - capturing global topological features - like connected components, holes, cavities, etc. Persistent homology studies the evolution - -- birth, life and death -- of - these features when the topological space is changing. Consequently, the theory is essentially - composed of three elements: - topological spaces, their homology groups and an evolution scheme. -
Topological Spaces:
Topological spaces are represented by simplicial complexes. Let \f$V = \{1, \cdots ,|V|\}\f$ be a set of vertices. diff --git a/src/Simplex_tree/doc/Simplex_tree_representation.png b/src/Simplex_tree/doc/Simplex_tree_representation.png new file mode 100644 index 00000000..9d401520 Binary files /dev/null and b/src/Simplex_tree/doc/Simplex_tree_representation.png differ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 096270ee..53f6fec7 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -68,6 +68,7 @@ namespace Gudhi { The simplex tree is an efficient and flexible data structure for representing general (filtered) simplicial complexes. The data structure is described in \cite boissonnatmariasimplextreealgorithmica + \image html "Simplex_tree_representation.png" "Simplex tree representation" The second one is the Hasse_complex. The Hasse complex is a data structure representing explicitly all co-dimension 1 incidence relations in a complex. It is consequently faster diff --git a/src/common/doc/footer.html b/src/common/doc/footer.html new file mode 100644 index 00000000..7b4cdc5c --- /dev/null +++ b/src/common/doc/footer.html @@ -0,0 +1,29 @@ + + + + + + + +
+ $projectname + Version $projectnumber + - $projectbrief + + + + $generatedby + + doxygen $doxygenversion + +
+ + + + + + diff --git a/src/common/doc/header.html b/src/common/doc/header.html new file mode 100644 index 00000000..c958390c --- /dev/null +++ b/src/common/doc/header.html @@ -0,0 +1,82 @@ + + + + + + + + +$projectname: $title +$title + + + + + + +$treeview +$search +$mathjax + +$extrastylesheet + + + + + + + +
+ + +
+ + + + + + + + + + +
$searchbox
+
+ + diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 41b8ba1e..5661c969 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -1,15 +1,14 @@ /*! \mainpage + * \tableofcontents * \image html "Gudhi_banner.jpg" "" width=20cm * * \section Introduction Introduction * The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic open source C++ library for * Computational Topology and Topological Data Analysis * (TDA). - * The GUDHI library is developed as part of the - * GUDHI project supported by the European - * Research Council. The GUDHI library intends to help the development of new algorithmic solutions in TDA and their - * transfer to applications. It provides robust, efficient, flexible and easy to use implementations of - * state-of-the-art algorithms and data structures. + * The GUDHI library intends to help the development of new algorithmic solutions in TDA and their transfer to + * applications. It provides robust, efficient, flexible and easy to use implementations of state-of-the-art + * algorithms and data structures. * * The current release of the GUDHI library includes: * @@ -20,24 +19,86 @@ * All data-structures are generic and several of their aspects can be parameterized via template classes. * We refer to \cite gudhilibrary_ICMS14 for a detailed description of the design of the library. * - * The library is available here - * and the documentation is available at this - * webpage. - * - * The library comes with data sets, \ref demos and \ref testsuites. - * - * Gudhi is also accessible though the - * R package TDA - * (Statistical Tools for Topological Data Analysis). - * - * The development of the GUDHI library is steered by an Editorial Board composed of: - * - * \li - * Jean-Daniel Boissonnat | INRIA Sophia Antipolis - Méditerranée - * \li Marc Glisse | INRIA Saclay - Ile de France - * \li Clément Jamin | INRIA Sophia Antipolis - Méditerranée - * \li Vincent Rouvreau | INRIA Saclay - Ile de France - * + \section DataStructures Data structures + \subsection SimplexTreeDataStructure Simplex tree + \image html "Simplex_tree_representation.png" "Simplex tree representation" + + + + + +
+ Introduced in: GUDHI 1.0.0
+ Copyright: GPL v3
+
+ Clément Maria
+ The simplex tree is an efficient and flexible + data structure for representing general (filtered) simplicial complexes. The data structure + is described in \cite boissonnatmariasimplextreealgorithmica .
+ User manual: \ref simplex_tree - Reference manual: Gudhi::Simplex_tree +
+ \subsection SkeletonBlockerDataStructure Skeleton blocker + \image html "ds_representation.png" "Skeleton blocker representation" + + + + + +
+ Introduced in: GUDHI 1.1.0
+ Copyright: GPL v3
+
+ David Salinas
+ The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit* + representation of its simplices \cite socg_blockers_2011,\cite blockers2012. Intuitively, it just stores the + 1-skeleton of a simplicial complex with a graph and the set of its "missing faces" that is very small in practice. + This data-structure handles all simplicial complexes operations such as simplex enumeration or simplex removal but + operations that are particularly efficient are operations that do not require simplex enumeration such as edge + iteration, link computation or simplex contraction.
+ User manual: \ref skbl - Reference manual: Gudhi::skbl::Skeleton_blocker_complex +
+ + \section Toolbox Toolbox + \subsection PersistentCohomologyToolbox Persistent Cohomology + \image html "barcode_poch.png" "Persistent Cohomology represented as a barcode" + + + + + +
+ Introduced in: GUDHI 1.0.0
+ Copyright: GPL v3
+
+ Clément Maria
+ The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing + global topological features like connected components, holes, cavities, etc. Persistent homology studies the + evolution -- birth, life and death -- of these features when the topological space is changing. Consequently, the + theory is essentially composed of three elements: topological spaces, their homology groups and an evolution + scheme. + Computation of persistent cohomology using the algorithm of \cite DBLP:journals/dcg/SilvaMV11 and + \cite DBLP:journals/corr/abs-1208-5018 and the Compressed Annotation Matrix implementation of + \cite DBLP:conf/esa/BoissonnatDM13 .
+ User manual: \ref persistent_cohomology +
+ \subsection ContractionToolbox Contraction + \image html "sphere_contraction_representation.png" "Sphere contraction example" + + + + + +
+ Introduced in: GUDHI 1.1.0
+ Copyright: GPL v3
+
+ David Salinas
+ The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge + simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most + used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, + the size of this data-structure is even much lower than the total number of simplices.
+ User manual: \ref contr +
*/ /*! \page installation Gudhi installation @@ -47,7 +108,7 @@ * * \section compiling Compiling * The library uses c++11 and requires Boost with version 1.48.0 or - * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2013. + * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015. * * \subsection gmp GMP: * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision diff --git a/src/common/doc/stylesheet.css b/src/common/doc/stylesheet.css new file mode 100644 index 00000000..1df177a4 --- /dev/null +++ b/src/common/doc/stylesheet.css @@ -0,0 +1,1367 @@ +/* The standard CSS for doxygen 1.8.6 */ + +body, table, div, p, dl { + font: 400 14px/22px Roboto,sans-serif; +} + +/* @group Heading Levels */ + +h1.groupheader { + font-size: 150%; +} + +.title { + font: 400 14px/28px Roboto,sans-serif; + font-size: 150%; + font-weight: bold; + margin: 10px 2px; +} + +h2.groupheader { + border-bottom: 1px solid #879ECB; + color: #354C7B; + font-size: 150%; + font-weight: normal; + margin-top: 1.75em; + padding-top: 8px; + padding-bottom: 4px; + width: 100%; +} + +h3.groupheader { + font-size: 100%; +} + +h1, h2, h3, h4, h5, h6 { + -webkit-transition: text-shadow 0.5s linear; + -moz-transition: text-shadow 0.5s linear; + -ms-transition: text-shadow 0.5s linear; + -o-transition: text-shadow 0.5s linear; + transition: text-shadow 0.5s linear; + margin-right: 15px; +} + +h1.glow, h2.glow, h3.glow, h4.glow, h5.glow, h6.glow { + text-shadow: 0 0 15px cyan; +} + +dt { + font-weight: bold; +} + +div.multicol { + -moz-column-gap: 1em; + -webkit-column-gap: 1em; + -moz-column-count: 3; + -webkit-column-count: 3; +} + +p.startli, p.startdd { + margin-top: 2px; +} + +p.starttd { + margin-top: 0px; +} + +p.endli { + margin-bottom: 0px; +} + +p.enddd { + margin-bottom: 4px; +} + +p.endtd { + margin-bottom: 2px; +} + +/* @end */ + +caption { + font-weight: bold; +} + +span.legend { + font-size: 70%; + text-align: center; +} + +h3.version { + font-size: 90%; + text-align: center; +} + +div.qindex, div.navtab{ + background-color: #EBEFF6; + border: 1px solid #A3B4D7; + text-align: center; +} + +div.qindex, div.navpath { + width: 100%; + line-height: 140%; +} + +div.navtab { + margin-right: 15px; +} + +/* @group Link Styling */ + +a { + color: #3D578C; + font-weight: normal; + text-decoration: none; +} + +.contents a:visited { + color: #4665A2; +} + +a:hover { + text-decoration: underline; +} + +a.qindex { + font-weight: bold; +} + +a.qindexHL { + font-weight: bold; + background-color: #9CAFD4; + color: #ffffff; + border: 1px double #869DCA; +} + +.contents a.qindexHL:visited { + color: #ffffff; +} + +a.el { + font-weight: bold; +} + +a.elRef { +} + +a.code, a.code:visited, a.line, a.line:visited { + color: #4665A2; +} + +a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited { + color: #4665A2; +} + +/* @end */ + +dl.el { + margin-left: -1cm; +} + +pre.fragment { + border: 1px solid #C4CFE5; + background-color: #FBFCFD; + padding: 4px 6px; + margin: 4px 8px 4px 2px; + overflow: auto; + word-wrap: break-word; + font-size: 9pt; + line-height: 125%; + font-family: monospace, fixed; + font-size: 105%; +} + +div.fragment { + padding: 4px 6px; + margin: 4px 8px 4px 2px; + background-color: #FBFCFD; + border: 1px solid #C4CFE5; +} + +div.line { + font-family: monospace, fixed; + font-size: 13px; + min-height: 13px; + line-height: 1.0; + text-wrap: unrestricted; + white-space: -moz-pre-wrap; /* Moz */ + white-space: -pre-wrap; /* Opera 4-6 */ + white-space: -o-pre-wrap; /* Opera 7 */ + white-space: pre-wrap; /* CSS3 */ + word-wrap: break-word; /* IE 5.5+ */ + text-indent: -53px; + padding-left: 53px; + padding-bottom: 0px; + margin: 0px; + -webkit-transition-property: background-color, box-shadow; + -webkit-transition-duration: 0.5s; + -moz-transition-property: background-color, box-shadow; + -moz-transition-duration: 0.5s; + -ms-transition-property: background-color, box-shadow; + -ms-transition-duration: 0.5s; + -o-transition-property: background-color, box-shadow; + -o-transition-duration: 0.5s; + transition-property: background-color, box-shadow; + transition-duration: 0.5s; +} + +div.line.glow { + background-color: cyan; + box-shadow: 0 0 10px cyan; +} + + +span.lineno { + padding-right: 4px; + text-align: right; + border-right: 2px solid #0F0; + background-color: #E8E8E8; + white-space: pre; +} +span.lineno a { + background-color: #D8D8D8; +} + +span.lineno a:hover { + background-color: #C8C8C8; +} + +div.ah { + background-color: black; + font-weight: bold; + color: #ffffff; + margin-bottom: 3px; + margin-top: 3px; + padding: 0.2em; + border: solid thin #333; + border-radius: 0.5em; + -webkit-border-radius: .5em; + -moz-border-radius: .5em; + box-shadow: 2px 2px 3px #999; + -webkit-box-shadow: 2px 2px 3px #999; + -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; + background-image: -webkit-gradient(linear, left top, left bottom, from(#eee), to(#000),color-stop(0.3, #444)); + background-image: -moz-linear-gradient(center top, #eee 0%, #444 40%, #000); +} + +div.groupHeader { + margin-left: 16px; + margin-top: 12px; + font-weight: bold; +} + +div.groupText { + margin-left: 16px; + font-style: italic; +} + +body { + background-color: white; + color: black; + margin: 0; +} + +div.contents { + margin-top: 10px; + margin-left: 12px; + margin-right: 8px; +} + +td.indexkey { + background-color: #EBEFF6; + font-weight: bold; + border: 1px solid #C4CFE5; + margin: 2px 0px 2px 0; + padding: 2px 10px; + white-space: nowrap; + vertical-align: top; +} + +td.indexvalue { + background-color: #EBEFF6; + border: 1px solid #C4CFE5; + padding: 2px 10px; + margin: 2px 0px; +} + +tr.memlist { + background-color: #EEF1F7; +} + +p.formulaDsp { + text-align: center; +} + +img.formulaDsp { + +} + +img.formulaInl { + vertical-align: middle; +} + +div.center { + text-align: center; + margin-top: 0px; + margin-bottom: 0px; + padding: 0px; +} + +div.center img { + border: 0px; +} + +address.footer { + text-align: right; + padding-right: 12px; +} + +img.footer { + border: 0px; + vertical-align: middle; +} + +/* @group Code Colorization */ + +span.keyword { + color: #008000 +} + +span.keywordtype { + color: #604020 +} + +span.keywordflow { + color: #e08000 +} + +span.comment { + color: #800000 +} + +span.preprocessor { + color: #806020 +} + +span.stringliteral { + color: #002080 +} + +span.charliteral { + color: #008080 +} + +span.vhdldigit { + color: #ff00ff +} + +span.vhdlchar { + color: #000000 +} + +span.vhdlkeyword { + color: #700070 +} + +span.vhdllogic { + color: #ff0000 +} + +blockquote { + background-color: #F7F8FB; + border-left: 2px solid #9CAFD4; + margin: 0 24px 0 4px; + padding: 0 12px 0 16px; +} + +/* @end */ + +/* +.search { + color: #003399; + font-weight: bold; +} + +form.search { + margin-bottom: 0px; + margin-top: 0px; +} + +input.search { + font-size: 75%; + color: #000080; + font-weight: normal; + background-color: #e8eef2; +} +*/ + +td.tiny { + font-size: 75%; +} + +.dirtab { + padding: 4px; + border-collapse: collapse; + border: 1px solid #A3B4D7; +} + +th.dirtab { + background: #EBEFF6; + font-weight: bold; +} + +hr { + height: 0px; + border: none; + border-top: 1px solid #4A6AAA; +} + +hr.footer { + height: 1px; +} + +/* @group Member Descriptions */ + +table.memberdecls { + border-spacing: 0px; + padding: 0px; +} + +.memberdecls td, .fieldtable tr { + -webkit-transition-property: background-color, box-shadow; + -webkit-transition-duration: 0.5s; + -moz-transition-property: background-color, box-shadow; + -moz-transition-duration: 0.5s; + -ms-transition-property: background-color, box-shadow; + -ms-transition-duration: 0.5s; + -o-transition-property: background-color, box-shadow; + -o-transition-duration: 0.5s; + transition-property: background-color, box-shadow; + transition-duration: 0.5s; +} + +.memberdecls td.glow, .fieldtable tr.glow { + background-color: cyan; + box-shadow: 0 0 15px cyan; +} + +.mdescLeft, .mdescRight, +.memItemLeft, .memItemRight, +.memTemplItemLeft, .memTemplItemRight, .memTemplParams { + background-color: #F9FAFC; + border: none; + margin: 4px; + padding: 1px 0 0 8px; +} + +.mdescLeft, .mdescRight { + padding: 0px 8px 4px 8px; + color: #555; +} + +.memSeparator { + border-bottom: 1px solid #DEE4F0; + line-height: 1px; + margin: 0px; + padding: 0px; +} + +.memItemLeft, .memTemplItemLeft { + white-space: nowrap; +} + +.memItemRight { + width: 100%; +} + +.memTemplParams { + color: #4665A2; + white-space: nowrap; + font-size: 80%; +} + +/* @end */ + +/* @group Member Details */ + +/* Styles for detailed member documentation */ + +.memtemplate { + font-size: 80%; + color: #4665A2; + font-weight: normal; + margin-left: 9px; +} + +.memnav { + background-color: #EBEFF6; + border: 1px solid #A3B4D7; + text-align: center; + margin: 2px; + margin-right: 15px; + padding: 2px; +} + +.mempage { + width: 100%; +} + +.memitem { + padding: 0; + margin-bottom: 10px; + margin-right: 5px; + -webkit-transition: box-shadow 0.5s linear; + -moz-transition: box-shadow 0.5s linear; + -ms-transition: box-shadow 0.5s linear; + -o-transition: box-shadow 0.5s linear; + transition: box-shadow 0.5s linear; + display: table !important; + width: 100%; +} + +.memitem.glow { + box-shadow: 0 0 15px cyan; +} + +.memname { + font-weight: bold; + margin-left: 6px; +} + +.memname td { + vertical-align: bottom; +} + +.memproto, dl.reflist dt { + border-top: 1px solid #A8B8D9; + border-left: 1px solid #A8B8D9; + border-right: 1px solid #A8B8D9; + padding: 6px 0px 6px 0px; + color: #253555; + font-weight: bold; + text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); + background-image:url('nav_f.png'); + background-repeat:repeat-x; + background-color: #E2E8F2; + /* opera specific markup */ + box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); + border-top-right-radius: 4px; + border-top-left-radius: 4px; + /* firefox specific markup */ + -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; + -moz-border-radius-topright: 4px; + -moz-border-radius-topleft: 4px; + /* webkit specific markup */ + -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); + -webkit-border-top-right-radius: 4px; + -webkit-border-top-left-radius: 4px; + +} + +.memdoc, dl.reflist dd { + border-bottom: 1px solid #A8B8D9; + border-left: 1px solid #A8B8D9; + border-right: 1px solid #A8B8D9; + padding: 6px 10px 2px 10px; + background-color: #FBFCFD; + border-top-width: 0; + background-image:url('nav_g.png'); + background-repeat:repeat-x; + background-color: #FFFFFF; + /* opera specific markup */ + border-bottom-left-radius: 4px; + border-bottom-right-radius: 4px; + box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); + /* firefox specific markup */ + -moz-border-radius-bottomleft: 4px; + -moz-border-radius-bottomright: 4px; + -moz-box-shadow: rgba(0, 0, 0, 0.15) 5px 5px 5px; + /* webkit specific markup */ + -webkit-border-bottom-left-radius: 4px; + -webkit-border-bottom-right-radius: 4px; + -webkit-box-shadow: 5px 5px 5px rgba(0, 0, 0, 0.15); +} + +dl.reflist dt { + padding: 5px; +} + +dl.reflist dd { + margin: 0px 0px 10px 0px; + padding: 5px; +} + +.paramkey { + text-align: right; +} + +.paramtype { + white-space: nowrap; +} + +.paramname { + color: #602020; + white-space: nowrap; +} +.paramname em { + font-style: normal; +} +.paramname code { + line-height: 14px; +} + +.params, .retval, .exception, .tparams { + margin-left: 0px; + padding-left: 0px; +} + +.params .paramname, .retval .paramname { + font-weight: bold; + vertical-align: top; +} + +.params .paramtype { + font-style: italic; + vertical-align: top; +} + +.params .paramdir { + font-family: "courier new",courier,monospace; + vertical-align: top; +} + +table.mlabels { + border-spacing: 0px; +} + +td.mlabels-left { + width: 100%; + padding: 0px; +} + +td.mlabels-right { + vertical-align: bottom; + padding: 0px; + white-space: nowrap; +} + +span.mlabels { + margin-left: 8px; +} + +span.mlabel { + background-color: #728DC1; + border-top:1px solid #5373B4; + border-left:1px solid #5373B4; + border-right:1px solid #C4CFE5; + border-bottom:1px solid #C4CFE5; + text-shadow: none; + color: white; + margin-right: 4px; + padding: 2px 3px; + border-radius: 3px; + font-size: 7pt; + white-space: nowrap; + vertical-align: middle; +} + + + +/* @end */ + +/* these are for tree view when not used as main index */ + +div.directory { + margin: 10px 0px; + border-top: 1px solid #A8B8D9; + border-bottom: 1px solid #A8B8D9; + width: 100%; +} + +.directory table { + border-collapse:collapse; +} + +.directory td { + margin: 0px; + padding: 0px; + vertical-align: top; +} + +.directory td.entry { + white-space: nowrap; + padding-right: 6px; + padding-top: 3px; +} + +.directory td.entry a { + outline:none; +} + +.directory td.entry a img { + border: none; +} + +.directory td.desc { + width: 100%; + padding-left: 6px; + padding-right: 6px; + padding-top: 3px; + border-left: 1px solid rgba(0,0,0,0.05); +} + +.directory tr.even { + padding-left: 6px; + background-color: #F7F8FB; +} + +.directory img { + vertical-align: -30%; +} + +.directory .levels { + white-space: nowrap; + width: 100%; + text-align: right; + font-size: 9pt; +} + +.directory .levels span { + cursor: pointer; + padding-left: 2px; + padding-right: 2px; + color: #3D578C; +} + +div.dynheader { + margin-top: 8px; + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +address { + font-style: normal; + color: #2A3D61; +} + +table.doxtable { + border-collapse:collapse; + margin-top: 4px; + margin-bottom: 4px; +} + +table.doxtable td, table.doxtable th { + border: 1px solid #2D4068; + padding: 3px 7px 2px; +} + +table.doxtable th { + background-color: #374F7F; + color: #FFFFFF; + font-size: 110%; + padding-bottom: 4px; + padding-top: 5px; +} + +table.fieldtable { + /*width: 100%;*/ + margin-bottom: 10px; + border: 1px solid #A8B8D9; + border-spacing: 0px; + -moz-border-radius: 4px; + -webkit-border-radius: 4px; + border-radius: 4px; + -moz-box-shadow: rgba(0, 0, 0, 0.15) 2px 2px 2px; + -webkit-box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); + box-shadow: 2px 2px 2px rgba(0, 0, 0, 0.15); +} + +.fieldtable td, .fieldtable th { + padding: 3px 7px 2px; +} + +.fieldtable td.fieldtype, .fieldtable td.fieldname { + white-space: nowrap; + border-right: 1px solid #A8B8D9; + border-bottom: 1px solid #A8B8D9; + vertical-align: top; +} + +.fieldtable td.fieldname { + padding-top: 3px; +} + +.fieldtable td.fielddoc { + border-bottom: 1px solid #A8B8D9; + /*width: 100%;*/ +} + +.fieldtable td.fielddoc p:first-child { + margin-top: 0px; +} + +.fieldtable td.fielddoc p:last-child { + margin-bottom: 2px; +} + +.fieldtable tr:last-child td { + border-bottom: none; +} + +.fieldtable th { + background-image:url('nav_f.png'); + background-repeat:repeat-x; + background-color: #E2E8F2; + font-size: 90%; + color: #253555; + padding-bottom: 4px; + padding-top: 5px; + text-align:left; + -moz-border-radius-topleft: 4px; + -moz-border-radius-topright: 4px; + -webkit-border-top-left-radius: 4px; + -webkit-border-top-right-radius: 4px; + border-top-left-radius: 4px; + border-top-right-radius: 4px; + border-bottom: 1px solid #A8B8D9; +} + + +.tabsearch { + top: 0px; + left: 10px; + height: 36px; + background-image: url('tab_b.png'); + z-index: 101; + overflow: hidden; + font-size: 13px; +} + +.navpath ul +{ + font-size: 11px; + background-image:url('tab_b.png'); + background-repeat:repeat-x; + background-position: 0 -5px; + height:30px; + line-height:30px; + color:#8AA0CC; + border:solid 1px #C2CDE4; + overflow:hidden; + margin:0px; + padding:0px; +} + +.navpath li +{ + list-style-type:none; + float:left; + padding-left:10px; + padding-right:15px; + background-image:url('bc_s.png'); + background-repeat:no-repeat; + background-position:right; + color:#364D7C; +} + +.navpath li.navelem a +{ + height:32px; + display:block; + text-decoration: none; + outline: none; + color: #283A5D; + font-family: 'Lucida Grande',Geneva,Helvetica,Arial,sans-serif; + text-shadow: 0px 1px 1px rgba(255, 255, 255, 0.9); + text-decoration: none; +} + +.navpath li.navelem a:hover +{ + color:#6884BD; +} + +.navpath li.footer +{ + list-style-type:none; + float:right; + padding-left:10px; + padding-right:15px; + background-image:none; + background-repeat:no-repeat; + background-position:right; + color:#364D7C; + font-size: 8pt; +} + + +div.summary +{ + float: right; + font-size: 8pt; + padding-right: 5px; + width: 50%; + text-align: right; +} + +div.summary a +{ + white-space: nowrap; +} + +div.ingroups +{ + font-size: 8pt; + width: 50%; + text-align: left; +} + +div.ingroups a +{ + white-space: nowrap; +} + +div.header +{ + background-image:url('nav_h.png'); + background-repeat:repeat-x; + background-color: #F9FAFC; + margin: 0px; + border-bottom: 1px solid #C4CFE5; +} + +div.headertitle +{ + padding: 5px 5px 5px 10px; +} + +dl +{ + padding: 0 0 0 10px; +} + +/* dl.note, dl.warning, dl.attention, dl.pre, dl.post, dl.invariant, dl.deprecated, dl.todo, dl.test, dl.bug */ +dl.section +{ + margin-left: 0px; + padding-left: 0px; +} + +dl.note +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #D0C000; +} + +dl.warning, dl.attention +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #FF0000; +} + +dl.pre, dl.post, dl.invariant +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #00D000; +} + +dl.deprecated +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #505050; +} + +dl.todo +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #00C0E0; +} + +dl.test +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #3030E0; +} + +dl.bug +{ + margin-left:-7px; + padding-left: 3px; + border-left:4px solid; + border-color: #C08050; +} + +dl.section dd { + margin-bottom: 6px; +} + + +#projectlogo +{ + text-align: center; + vertical-align: bottom; + border-collapse: separate; +} + +#projectlogo img +{ + border: 0px none; +} + +#projectname +{ + border: 0px none; + font: 300% Tahoma, Arial,sans-serif; + margin: 0px; + padding: 2px 0px; +} + +#projectbrief +{ + font: 60% Tahoma, Arial,sans-serif; + margin: 0px; + padding: 0px; +} + +#projectnumber +{ + font: 80% Tahoma, Arial,sans-serif; + margin: 0px; + padding: 0px; +} + +#titlearea +{ + padding: 0px; + margin: 0px; + width: 100%; + border-bottom: 1px solid #5373B4; +} + +.image +{ + text-align: center; +} + +.dotgraph +{ + text-align: center; +} + +.mscgraph +{ + text-align: center; +} + +.diagraph +{ + text-align: center; +} + +.caption +{ + font-weight: bold; +} + +div.zoom +{ + border: 1px solid #90A5CE; +} + +dl.citelist { + margin-bottom:50px; +} + +dl.citelist dt { + color:#334975; + float:left; + font-weight:bold; + margin-right:10px; + padding:5px; +} + +dl.citelist dd { + margin:2px 0; + padding:5px 0; +} + +div.toc { + padding: 14px 25px; + background-color: #F4F6FA; + border: 1px solid #D8DFEE; + border-radius: 7px 7px 7px 7px; + float: right; + height: auto; + margin: 0 20px 10px 10px; + width: 200px; +} + +div.toc li { + background: url("bdwn.png") no-repeat scroll 0 5px transparent; + font: 10px/1.2 Verdana,DejaVu Sans,Geneva,sans-serif; + margin-top: 5px; + padding-left: 10px; + padding-top: 2px; +} + +div.toc h3 { + font: bold 12px/1.2 Arial,FreeSans,sans-serif; + color: #4665A2; + border-bottom: 0 none; + margin: 0; +} + +div.toc ul { + list-style: none outside none; + border: medium none; + padding: 0px; +} + +div.toc li.level1 { + margin-left: 0px; +} + +div.toc li.level2 { + margin-left: 15px; +} + +div.toc li.level3 { + margin-left: 30px; +} + +div.toc li.level4 { + margin-left: 45px; +} + +.inherit_header { + font-weight: bold; + color: gray; + cursor: pointer; + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + +.inherit_header td { + padding: 6px 0px 2px 5px; +} + +.inherit { + display: none; +} + +tr.heading h2 { + margin-top: 12px; + margin-bottom: 4px; +} + +/* tooltip related style info */ + +.ttc { + position: absolute; + display: none; +} + +#powerTip { + cursor: default; + white-space: nowrap; + background-color: white; + border: 1px solid gray; + border-radius: 4px 4px 4px 4px; + box-shadow: 1px 1px 7px gray; + display: none; + font-size: smaller; + max-width: 80%; + opacity: 0.9; + padding: 1ex 1em 1em; + position: absolute; + z-index: 2147483647; +} + +#powerTip div.ttdoc { + color: grey; + font-style: italic; +} + +#powerTip div.ttname a { + font-weight: bold; +} + +#powerTip div.ttname { + font-weight: bold; +} + +#powerTip div.ttdeci { + color: #006318; +} + +#powerTip div { + margin: 0px; + padding: 0px; + font: 12px/16px Roboto,sans-serif; +} + +#powerTip:before, #powerTip:after { + content: ""; + position: absolute; + margin: 0px; +} + +#powerTip.n:after, #powerTip.n:before, +#powerTip.s:after, #powerTip.s:before, +#powerTip.w:after, #powerTip.w:before, +#powerTip.e:after, #powerTip.e:before, +#powerTip.ne:after, #powerTip.ne:before, +#powerTip.se:after, #powerTip.se:before, +#powerTip.nw:after, #powerTip.nw:before, +#powerTip.sw:after, #powerTip.sw:before { + border: solid transparent; + content: " "; + height: 0; + width: 0; + position: absolute; +} + +#powerTip.n:after, #powerTip.s:after, +#powerTip.w:after, #powerTip.e:after, +#powerTip.nw:after, #powerTip.ne:after, +#powerTip.sw:after, #powerTip.se:after { + border-color: rgba(255, 255, 255, 0); +} + +#powerTip.n:before, #powerTip.s:before, +#powerTip.w:before, #powerTip.e:before, +#powerTip.nw:before, #powerTip.ne:before, +#powerTip.sw:before, #powerTip.se:before { + border-color: rgba(128, 128, 128, 0); +} + +#powerTip.n:after, #powerTip.n:before, +#powerTip.ne:after, #powerTip.ne:before, +#powerTip.nw:after, #powerTip.nw:before { + top: 100%; +} + +#powerTip.n:after, #powerTip.ne:after, #powerTip.nw:after { + border-top-color: #ffffff; + border-width: 10px; + margin: 0px -10px; +} +#powerTip.n:before { + border-top-color: #808080; + border-width: 11px; + margin: 0px -11px; +} +#powerTip.n:after, #powerTip.n:before { + left: 50%; +} + +#powerTip.nw:after, #powerTip.nw:before { + right: 14px; +} + +#powerTip.ne:after, #powerTip.ne:before { + left: 14px; +} + +#powerTip.s:after, #powerTip.s:before, +#powerTip.se:after, #powerTip.se:before, +#powerTip.sw:after, #powerTip.sw:before { + bottom: 100%; +} + +#powerTip.s:after, #powerTip.se:after, #powerTip.sw:after { + border-bottom-color: #ffffff; + border-width: 10px; + margin: 0px -10px; +} + +#powerTip.s:before, #powerTip.se:before, #powerTip.sw:before { + border-bottom-color: #808080; + border-width: 11px; + margin: 0px -11px; +} + +#powerTip.s:after, #powerTip.s:before { + left: 50%; +} + +#powerTip.sw:after, #powerTip.sw:before { + right: 14px; +} + +#powerTip.se:after, #powerTip.se:before { + left: 14px; +} + +#powerTip.e:after, #powerTip.e:before { + left: 100%; +} +#powerTip.e:after { + border-left-color: #ffffff; + border-width: 10px; + top: 50%; + margin-top: -10px; +} +#powerTip.e:before { + border-left-color: #808080; + border-width: 11px; + top: 50%; + margin-top: -11px; +} + +#powerTip.w:after, #powerTip.w:before { + right: 100%; +} +#powerTip.w:after { + border-right-color: #ffffff; + border-width: 10px; + top: 50%; + margin-top: -10px; +} +#powerTip.w:before { + border-right-color: #808080; + border-width: 11px; + top: 50%; + margin-top: -11px; +} + +@media print +{ + #top { display: none; } + #side-nav { display: none; } + #nav-path { display: none; } + body { overflow:visible; } + h1, h2, h3, h4, h5, h6 { page-break-after: avoid; } + .summary { display: none; } + .memitem { page-break-inside: avoid; } + #doc-content + { + margin-left:0 !important; + height:auto !important; + width:auto !important; + overflow:inherit; + display:inline; + } +} + -- cgit v1.2.3 From 5478241247f6fb7bcbc57f86050205e4fa8c2737 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 9 Mar 2016 10:53:36 +0000 Subject: Add of persistence doc image for package overview git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1036 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ab1ca8902d6e5651cdfd9957d9b261b6874bae8d --- src/Persistent_cohomology/doc/3DTorus_poch.png | Bin 0 -> 33733 bytes src/common/doc/main_page.h | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 src/Persistent_cohomology/doc/3DTorus_poch.png (limited to 'src/common') diff --git a/src/Persistent_cohomology/doc/3DTorus_poch.png b/src/Persistent_cohomology/doc/3DTorus_poch.png new file mode 100644 index 00000000..293c1b8c Binary files /dev/null and b/src/Persistent_cohomology/doc/3DTorus_poch.png differ diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 5661c969..35313409 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -60,7 +60,7 @@ \section Toolbox Toolbox \subsection PersistentCohomologyToolbox Persistent Cohomology - \image html "barcode_poch.png" "Persistent Cohomology represented as a barcode" + \image html "3DTorus_poch.png" "Rips Persistent Cohomology on a 3D Torus"
-- cgit v1.2.3 From a28d3c6ba5ab9687626992cc0402c4399c76ecaf Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 15 Mar 2016 14:23:19 +0000 Subject: In Delaunay_triangulation_off_io.h, constructing the Triangulation from a vector of Points is a more efficient way (instead of inserting points on the fly). As consequence, the documentation needs to be rewritten as the points index are changed. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1046 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: bdb0196b61418a983cfaf7f6781a3a2906e8b136 --- src/Alpha_complex/doc/Intro_alpha_complex.h | 8 +- src/Alpha_complex/doc/alpha_complex_doc.ipe | 371 ++++++++++----- src/Alpha_complex/doc/alpha_complex_doc.png | Bin 49973 -> 25150 bytes src/Alpha_complex/doc/alpha_complex_doc_135.ipe | 514 --------------------- src/Alpha_complex/doc/alpha_complex_doc_135.png | Bin 80794 -> 0 bytes src/Alpha_complex/doc/alpha_complex_doc_421.ipe | 514 +++++++++++++++++++++ src/Alpha_complex/doc/alpha_complex_doc_421.png | Bin 0 -> 100798 bytes .../doc/alpha_complex_representation.ipe | 14 +- .../doc/alpha_complex_representation.png | Bin 16737 -> 14628 bytes src/Alpha_complex/example/CMakeLists.txt | 8 +- .../example/alphaoffreader_for_doc_32.txt | 26 +- .../example/alphaoffreader_for_doc_60.txt | 36 +- src/Alpha_complex/include/gudhi/Alpha_complex.h | 1 + src/common/example/CMakeLists.txt | 6 + .../example/dtoffrw_alphashapedoc_result.off | 18 +- .../include/gudhi/Delaunay_triangulation_off_io.h | 26 +- src/common/test/CMakeLists.txt | 4 +- src/common/test/dtoffrw_alphashapedoc_result.off | 18 +- 18 files changed, 851 insertions(+), 713 deletions(-) delete mode 100644 src/Alpha_complex/doc/alpha_complex_doc_135.ipe delete mode 100644 src/Alpha_complex/doc/alpha_complex_doc_135.png create mode 100644 src/Alpha_complex/doc/alpha_complex_doc_421.ipe create mode 100644 src/Alpha_complex/doc/alpha_complex_doc_421.png (limited to 'src/common') diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index 0987e0e7..deecf93f 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -112,19 +112,19 @@ namespace alphacomplex { * * \subsubsection dimension2 Dimension 2 * - * From the example above, it means the algorithm looks into each triangle ([1,2,3], [2,3,4], [1,3,5], ...), + * From the example above, it means the algorithm looks into each triangle ([4,2,1], [2,4,6], [4,5,6], ...), * computes the filtration value of the triangle, and then propagates the filtration value as described * here : - * \image html "alpha_complex_doc_135.png" "Filtration value propagation example" + * \image html "alpha_complex_doc_421.png" "Filtration value propagation example" * * \subsubsection dimension1 Dimension 1 * - * Then, the algorithm looks into each edge ([1,2], [2,3], [1,3], ...), + * Then, the algorithm looks into each edge ([1,2], [4,2], [4,1], ...), * computes the filtration value of the edge (in this case, propagation will have no effect). * * \subsubsection dimension0 Dimension 0 * - * Finally, the algorithm looks into each vertex ([1], [2], [3], [4], [5], [6] and [7]) and + * Finally, the algorithm looks into each vertex ([0], [1], [2], [3], [4], [5] and [6]) and * sets the filtration value (0 in case of a vertex - propagation will have no effect). * * \subsubsection nondecreasing Non decreasing filtration values diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe index e74f9bc4..99bd05af 100644 --- a/src/Alpha_complex/doc/alpha_complex_doc.ipe +++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe @@ -1,7 +1,7 @@ - + @@ -253,13 +253,13 @@ h 320 580 l Delaunay triangulation -0 -1 -2 -3 -4 -5 -6 +2 +6 +4 +5 +1 +3 +0 280 660 m 300 710 l @@ -278,161 +278,282 @@ h 320 580 l 280 660 l -0 -1 -2 -2 -1 -2 -2 -3 -3 -3 -3 -4 -4 -4 -4 -6 -6 -6 -6 -6 -6 -5 -6 -5 - + 4 0 0 4 320 704 e - + 322.919 706.788 m 317.189 701.058 l 317.189 701.203 l - + 317.551 706.934 m 322.629 701.058 l - -230 680 m -240 670 l + +240 620 m +220 600 l - -230 680 m -240 670 l + +240 620 m +220 640 l - -230 680 m -240 670 l +Simplex tree structure + +280 630 m +170 630 l - -230 680 m -240 670 l + +280 610 m +170 610 l - -230 680 m -220 670 l + + + + + + + +3 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -230 680 m -230 670 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -220 660 m -220 650 l +4 +3 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -230 660 m -230 650 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -260 680 m -260 670 l +4 +1 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -260 660 m -260 650 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -300 680 m -300 670 l +5 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -300 680 m -290 670 l +5 +3 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -290 660 m -290 650 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -300 660 m -300 650 l +4 +2 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -330 680 m -330 670 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -350 680 m -350 670 l +4 +6 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -350 660 m -350 650 l +4 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -320 700 m -240 690 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -320 700 m -270 690 l +6 +6 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -320 700 m -310 690 l +5 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -320 700 m -330 690 l + +300 688 m +300 676 l +312 676 l +312 688 l +h - -320 700 m -350 690 l +6 + +300 688 m +300 676 l +312 676 l +312 688 l +h - -320 700 m -380 690 l +6 + +292 716 m +292 728 l +316 728 l +316 716 l +h - -320 700 m -400 690 l + +316 716 m +316 728 l +340 728 l +340 716 l +h - -240 620 m -220 600 l + +340 716 m +340 728 l +364 728 l +364 716 l +h - -240 620 m -220 640 l + +364 716 m +364 728 l +388 728 l +388 716 l +h + + +388 716 m +388 728 l +412 728 l +412 716 l +h + + +412 716 m +412 728 l +436 728 l +436 716 l +h + + +436 716 m +436 728 l +460 728 l +460 716 l +h + +0 +1 +2 +3 +4 +5 +6 + +436 708 m +436 716 l + + +364 708 m +364 716 l + + +364 688 m +364 696 l + + +320 688 m +320 696 l + + +296 708 m +308 716 l +308 716 l + + +264 688 m +268 696 l + + +292 688 m +292 696 l + + +388 736 m +388 728 l -Simplex tree structure -280 630 m -170 630 l +372 612 m +376 620 l -280 610 m -170 610 l +448 612 m +448 620 l - - - - - - - diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png index c9eab275..cfe3ede6 100644 Binary files a/src/Alpha_complex/doc/alpha_complex_doc.png and b/src/Alpha_complex/doc/alpha_complex_doc.png differ diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.ipe b/src/Alpha_complex/doc/alpha_complex_doc_135.ipe deleted file mode 100644 index 5d1d29d4..00000000 --- a/src/Alpha_complex/doc/alpha_complex_doc_135.ipe +++ /dev/null @@ -1,514 +0,0 @@ - - - - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0.6 0 0 0.6 0 0 e -0.4 0 0 0.4 0 0 e - - - - -0.6 0 0 0.6 0 0 e - - - - - -0.5 0 0 0.5 0 0 e - - -0.6 0 0 0.6 0 0 e -0.4 0 0 0.4 0 0 e - - - - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h --0.4 -0.4 m -0.4 -0.4 l -0.4 0.4 l --0.4 0.4 l -h - - - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h - - - - - --0.5 -0.5 m -0.5 -0.5 l -0.5 0.5 l --0.5 0.5 l -h - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h --0.4 -0.4 m -0.4 -0.4 l -0.4 0.4 l --0.4 0.4 l -h - - - - - - --0.43 -0.57 m -0.57 0.43 l -0.43 0.57 l --0.57 -0.43 l -h - - --0.43 0.57 m -0.57 -0.43 l -0.43 -0.57 l --0.57 0.43 l -h - - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --0.8 0 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --0.8 0 l --1 -0.333 l -h - - - - --1 0.333 m -0 0 l --1 -0.333 l - - - - -0 0 m --1 0.333 l --1 -0.333 l -h --1 0 m --2 0.333 l --2 -0.333 l -h - - - - -0 0 m --1 0.333 l --1 -0.333 l -h --1 0 m --2 0.333 l --2 -0.333 l -h - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -Cell [4,2,0] -0 -1 -2 -3 -4 -5 -6 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - - -77.2727 0 0 77.2727 243.636 591.818 e - - -243.428 591.569 m -186.061 643.28 l - -$\alpha_{420}$ - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -[2,0] is Gabriel $\rightarrow$ $\alpha_{20}$ is not$\\$ -modified (NaN) - -0 -2 -3 -4 -5 -6 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - -$\alpha_{20}$ - -290 530 m -320 580 l - - -29.1548 0 0 29.1548 305 555 e - - -304.883 555.015 m -334.509 555.015 l - - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -[0,4] is not Gabriel $\rightarrow$ $\alpha_{40} = \alpha_{420}$ -0 -3 -5 -6 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - -$\alpha_{40}$ - -290 530 m -280 660 l - - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -0 -1 -2 -3 -5 -6 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - -$\alpha_{42}$ -4 - -406.093 497.775 m -446.094 418.092 l - - -44.5799 0 0 44.5799 425.934 457.774 e - - -425.854 457.774 m -470.795 457.774 l - -[2,4] is Gabriel $\rightarrow$ $\alpha_{42}$ is not modified (NaN) - - -205.028 596.091 m -110.946 544.02 l - - -280.768 588.99 m -280.768 547.57 l - - -341.123 594.316 m -413.904 554.079 l - -For all faces of [4,2,0] -N.B. : is Gabriel on a single point has no sense. -Dimension =2 - $\sigma$ = [4,2,0] - -247.333 430.892 m -311.764 430.892 l - - - - - - - - - - - - - - -1 - - - - - -4 - - -1 - - -2 - -65.192 0 0 65.192 285 595 e - - - - - - - - - - - - - diff --git a/src/Alpha_complex/doc/alpha_complex_doc_135.png b/src/Alpha_complex/doc/alpha_complex_doc_135.png deleted file mode 100644 index ef7187f7..00000000 Binary files a/src/Alpha_complex/doc/alpha_complex_doc_135.png and /dev/null differ diff --git a/src/Alpha_complex/doc/alpha_complex_doc_421.ipe b/src/Alpha_complex/doc/alpha_complex_doc_421.ipe new file mode 100644 index 00000000..727816c5 --- /dev/null +++ b/src/Alpha_complex/doc/alpha_complex_doc_421.ipe @@ -0,0 +1,514 @@ + + + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + +0.6 0 0 0.6 0 0 e + + + + + +0.5 0 0 0.5 0 0 e + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h + + + + + +-0.5 -0.5 m +0.5 -0.5 l +0.5 0.5 l +-0.5 0.5 l +h + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + + + +-0.43 -0.57 m +0.57 0.43 l +0.43 0.57 l +-0.57 -0.43 l +h + + +-0.43 0.57 m +0.57 -0.43 l +0.43 -0.57 l +-0.57 0.43 l +h + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +-1 0.333 m +0 0 l +-1 -0.333 l + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +Cell [4,2,1] +2 +6 +4 +5 +1 +3 +0 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + + +77.2727 0 0 77.2727 243.636 591.818 e + + +243.428 591.569 m +186.061 643.28 l + +$\alpha_{421}$ + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +[4,2] is Gabriel $\rightarrow$ $\alpha_{42}$ is not$\\$ +modified (NaN) + +2 +4 +5 +1 +3 +0 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + +$\alpha_{42}$ + +290 530 m +320 580 l + + +29.1548 0 0 29.1548 305 555 e + + +304.883 555.015 m +334.509 555.015 l + + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +[2,1] is not Gabriel $\rightarrow$ $\alpha_{21} = \alpha_{421}$ +2 +5 +3 +0 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + +$\alpha_{12}$ + +290 530 m +280 660 l + + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +2 +6 +4 +5 +3 +0 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + +$\alpha_{41}$ +1 + +406.093 497.775 m +446.094 418.092 l + + +44.5799 0 0 44.5799 425.934 457.774 e + + +425.854 457.774 m +470.795 457.774 l + +[4,1] is Gabriel $\rightarrow$ $\alpha_{41}$ is not modified (NaN) + + +205.028 596.091 m +110.946 544.02 l + + +280.768 588.99 m +280.768 547.57 l + + +341.123 594.316 m +413.904 554.079 l + +For all faces of [4,2,1] +N.B. : is Gabriel on a single point has no sense. +Dimension =2 - $\sigma$ = [4,2,1] + +247.333 430.892 m +311.764 430.892 l + + + + + + + + + + + + + + +6 + + + + + +1 + + +6 + + +4 + +65.192 0 0 65.192 285 595 e + + + + + + + + + + + + + diff --git a/src/Alpha_complex/doc/alpha_complex_doc_421.png b/src/Alpha_complex/doc/alpha_complex_doc_421.png new file mode 100644 index 00000000..1cce4402 Binary files /dev/null and b/src/Alpha_complex/doc/alpha_complex_doc_421.png differ diff --git a/src/Alpha_complex/doc/alpha_complex_representation.ipe b/src/Alpha_complex/doc/alpha_complex_representation.ipe index 8687d694..fead1661 100644 --- a/src/Alpha_complex/doc/alpha_complex_representation.ipe +++ b/src/Alpha_complex/doc/alpha_complex_representation.ipe @@ -251,13 +251,13 @@ h h Alpha complex -0 -1 -2 -3 -4 -5 -6 +2 +6 +4 +5 +1 +3 +0 58.1341 0 0 58.1341 218.925 692.601 e diff --git a/src/Alpha_complex/doc/alpha_complex_representation.png b/src/Alpha_complex/doc/alpha_complex_representation.png index 06e54c06..9833bff3 100644 Binary files a/src/Alpha_complex/doc/alpha_complex_representation.png and b/src/Alpha_complex/doc/alpha_complex_representation.png differ diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index e904133b..debb440d 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -18,15 +18,15 @@ if(CGAL_FOUND) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - # Do not forget to copy test results files in current binary dir - file(COPY "alphaoffreader_for_doc_32.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - file(COPY "alphaoffreader_for_doc_60.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - add_executable ( alphaoffreader Alpha_complex_from_off.cpp ) target_link_libraries(alphaoffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_test(alphaoffreader_doc_60 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader alphacomplexdoc.off 60.0 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt) add_test(alphaoffreader_doc_32 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader alphacomplexdoc.off 32.0 ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt) if (DIFF_PATH) + # Do not forget to copy test results files in current binary dir + file(COPY "alphaoffreader_for_doc_32.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + file(COPY "alphaoffreader_for_doc_60.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + add_test(alphaoffreader_doc_60_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_60.txt) add_test(alphaoffreader_doc_32_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt) endif() diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt index 13183e86..5869fdff 100644 --- a/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt +++ b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt @@ -7,16 +7,16 @@ Iterator on alpha complex simplices in the filtration order, with [filtration va ( 4 ) -> [0] ( 5 ) -> [0] ( 6 ) -> [0] - ( 3 2 ) -> [6.25] - ( 5 4 ) -> [7.25] - ( 2 0 ) -> [8.5] - ( 1 0 ) -> [9.25] - ( 3 1 ) -> [10] - ( 2 1 ) -> [11.25] - ( 3 2 1 ) -> [12.5] - ( 2 1 0 ) -> [12.9959] - ( 6 5 ) -> [13.25] - ( 4 2 ) -> [20] - ( 6 4 ) -> [22.7367] - ( 6 5 4 ) -> [22.7367] - ( 6 3 ) -> [30.25] + ( 5 4 ) -> [6.25] + ( 3 1 ) -> [7.25] + ( 4 2 ) -> [8.5] + ( 6 2 ) -> [9.25] + ( 6 5 ) -> [10] + ( 6 4 ) -> [11.25] + ( 6 5 4 ) -> [12.5] + ( 6 4 2 ) -> [12.9959] + ( 3 0 ) -> [13.25] + ( 4 1 ) -> [20] + ( 1 0 ) -> [22.7367] + ( 3 1 0 ) -> [22.7367] + ( 5 0 ) -> [30.25] diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt index 71f29a00..1d17a58a 100644 --- a/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt +++ b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt @@ -7,21 +7,21 @@ Iterator on alpha complex simplices in the filtration order, with [filtration va ( 4 ) -> [0] ( 5 ) -> [0] ( 6 ) -> [0] - ( 3 2 ) -> [6.25] - ( 5 4 ) -> [7.25] - ( 2 0 ) -> [8.5] - ( 1 0 ) -> [9.25] - ( 3 1 ) -> [10] - ( 2 1 ) -> [11.25] - ( 3 2 1 ) -> [12.5] - ( 2 1 0 ) -> [12.9959] - ( 6 5 ) -> [13.25] - ( 4 2 ) -> [20] - ( 6 4 ) -> [22.7367] - ( 6 5 4 ) -> [22.7367] - ( 6 3 ) -> [30.25] - ( 6 2 ) -> [36.5] - ( 6 3 2 ) -> [36.5] - ( 6 4 2 ) -> [37.2449] - ( 4 0 ) -> [59.7107] - ( 4 2 0 ) -> [59.7107] + ( 5 4 ) -> [6.25] + ( 3 1 ) -> [7.25] + ( 4 2 ) -> [8.5] + ( 6 2 ) -> [9.25] + ( 6 5 ) -> [10] + ( 6 4 ) -> [11.25] + ( 6 5 4 ) -> [12.5] + ( 6 4 2 ) -> [12.9959] + ( 3 0 ) -> [13.25] + ( 4 1 ) -> [20] + ( 1 0 ) -> [22.7367] + ( 3 1 0 ) -> [22.7367] + ( 5 0 ) -> [30.25] + ( 4 0 ) -> [36.5] + ( 5 4 0 ) -> [36.5] + ( 4 1 0 ) -> [37.2449] + ( 2 1 ) -> [59.7107] + ( 4 2 1 ) -> [59.7107] diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 5183828f..a69afb51 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -48,6 +48,7 @@ namespace Gudhi { namespace alphacomplex { /** + * \class Alpha_complex Alpha_complex.h gudhi/Alpha_complex.h * \brief Alpha complex data structure. * * \ingroup alpha_complex diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 6c2e7669..91e78ea2 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -13,6 +13,12 @@ if(CGAL_FOUND) target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) + if (DIFF_PATH) + # Do not forget to copy test results files in current binary dir + file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + add_test(dtoffrw_result_off_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) + endif() + else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") endif() diff --git a/src/common/example/dtoffrw_alphashapedoc_result.off b/src/common/example/dtoffrw_alphashapedoc_result.off index 03b7ca75..d1839a43 100644 --- a/src/common/example/dtoffrw_alphashapedoc_result.off +++ b/src/common/example/dtoffrw_alphashapedoc_result.off @@ -1,15 +1,15 @@ nOFF 2 7 6 0 +9 17 +0 14 1 1 -7 0 +2 19 4 6 9 6 -0 14 -2 19 -9 17 -3 0 1 2 -3 3 2 1 -3 4 0 2 -3 4 2 6 -3 6 2 3 +7 0 +3 5 0 4 +3 0 1 4 +3 3 1 0 +3 4 1 2 3 5 4 6 +3 6 4 2 diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index b3f4a299..529774f0 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -31,7 +31,9 @@ namespace Gudhi { -/** \brief OFF file visitor implementation according to Off_reader in order to construct a CGAL Delaunay triangulation. +/** + * \class Delaunay_triangulation_off_visitor_reader Delaunay_triangulation_off_io.h gudhi/Delaunay_triangulation_off_io.h + * \brief OFF file visitor implementation according to Off_reader in order to construct a CGAL Delaunay triangulation. * * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page * http://doc.cgal.org/latest/Triangulation/ @@ -41,7 +43,7 @@ class Delaunay_triangulation_off_visitor_reader { private: Complex* complex_; typedef typename Complex::Point Point; - + std::vector point_cloud; public: // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO. @@ -95,7 +97,9 @@ class Delaunay_triangulation_off_visitor_reader { } std::cout << std::endl; #endif // DEBUG_TRACES - complex_->insert(Point(point.size(), point.begin(), point.end())); + // Fill the point cloud + // VR: complex_->insert(Point(point.size(), point.begin(), point.end())); + point_cloud.push_back(Point(point.size(), point.begin(), point.end())); } // Off_reader visitor maximal_face implementation - not used @@ -103,9 +107,11 @@ class Delaunay_triangulation_off_visitor_reader { // For Delaunay Triangulation, only points are read } - // Off_reader visitor done implementation - not used + // Off_reader visitor done implementation void done() { - // Nothing to be done on end of OFF file read + // It is advised to insert all the points at a time in a Delaunay Triangulation because points are sorted at the + // beginning of the insertion + complex_->insert(point_cloud.begin(), point_cloud.end()); } /** \brief Returns the constructed Delaunay triangulation. @@ -119,7 +125,9 @@ class Delaunay_triangulation_off_visitor_reader { } }; -/** \brief OFF file reader implementation in order to construct a Delaunay triangulation. +/** + * \class Delaunay_triangulation_off_reader Delaunay_triangulation_off_io.h gudhi/Delaunay_triangulation_off_io.h + * \brief OFF file reader implementation in order to construct a Delaunay triangulation. * * This class is using the Delaunay_triangulation_off_visitor_reader to visit the OFF file according to Off_reader. * @@ -135,7 +143,7 @@ class Delaunay_triangulation_off_visitor_reader { * * When launching: * - * \code $> ./dtoffrw ../../data/points/alphacomplexdoc triangulated.off + * \code $> ./dtoffrw ../../data/points/alphacomplexdoc.off triangulated.off * \endcode * * the program output is: @@ -203,7 +211,9 @@ class Delaunay_triangulation_off_reader { Complex* complex_; }; -/** \brief OFF file writer from a Delaunay triangulation. +/** + * \class Delaunay_triangulation_off_writer Delaunay_triangulation_off_io.h gudhi/Delaunay_triangulation_off_io.h + * \brief OFF file writer from a Delaunay triangulation. * * This class constructs the OFF file header according to http://www.geomview.org/docs/html/OFF.html * diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 50655a93..12eecda8 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -21,8 +21,6 @@ if(CGAL_FOUND) add_executable ( dtoffrw_UT dtoffrw_unit_test.cpp ) target_link_libraries(dtoffrw_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) - # Do not forget to copy test files in current binary dir - file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) @@ -32,6 +30,8 @@ if(CGAL_FOUND) --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/dtoffrw_UT.xml --log_level=test_suite --report_level=no) if (DIFF_PATH) + # Do not forget to copy test result files in current binary dir + file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) add_test(dtoffrw_diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off) endif() diff --git a/src/common/test/dtoffrw_alphashapedoc_result.off b/src/common/test/dtoffrw_alphashapedoc_result.off index 03b7ca75..d1839a43 100644 --- a/src/common/test/dtoffrw_alphashapedoc_result.off +++ b/src/common/test/dtoffrw_alphashapedoc_result.off @@ -1,15 +1,15 @@ nOFF 2 7 6 0 +9 17 +0 14 1 1 -7 0 +2 19 4 6 9 6 -0 14 -2 19 -9 17 -3 0 1 2 -3 3 2 1 -3 4 0 2 -3 4 2 6 -3 6 2 3 +7 0 +3 5 0 4 +3 0 1 4 +3 3 1 0 +3 4 1 2 3 5 4 6 +3 6 4 2 -- cgit v1.2.3 From 298c080b45250f2b8f16a0c31ace9bb6fc666c93 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 16 Mar 2016 08:33:04 +0000 Subject: CppCheck and CppLint fixes for alpha complexes git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1047 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a9b42ad2f8751384b5227fa3da8f380cda9b3696 --- src/Alpha_complex/doc/Intro_alpha_complex.h | 4 ++-- .../example/Alpha_complex_from_off.cpp | 2 +- src/Alpha_complex/include/gudhi/Alpha_complex.h | 21 ++++++++++----------- src/GudhUI/model/Model.h | 2 +- src/GudhUI/utils/Bar_code_persistence.h | 11 ++++++++--- src/GudhUI/utils/Persistence_compute.h | 1 - src/GudhUI/view/FirstCoordProjector.h | 1 - .../example/alpha_complex_3d_persistence.cpp | 15 ++++----------- .../example/Delaunay_triangulation_off_rw.cpp | 14 +++++++------- .../include/gudhi/Delaunay_triangulation_off_io.h | 10 +++------- src/common/include/gudhi/Off_reader.h | 2 +- 11 files changed, 37 insertions(+), 46 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index deecf93f..f55d7029 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -161,8 +161,8 @@ namespace alphacomplex { */ /** @} */ // end defgroup alpha_complex -} // namespace alphacomplex +} // namespace alphacomplex -} // namespace Gudhi +} // namespace Gudhi #endif // INTRO_ALPHA_COMPLEX_H_ diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index 80445a22..18a1a20d 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -27,7 +27,7 @@ int main(int argc, char **argv) { std::streambuf* streambufffer; std::ofstream ouput_file_stream; - + if (argc == 4) { ouput_file_stream.open(std::string(argv[3])); streambufffer = ouput_file_stream.rdbuf(); diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index a69afb51..eab66384 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -20,8 +20,8 @@ * along with this program. If not, see . */ -#ifndef ALPHA_COMPLEX_H_ -#define ALPHA_COMPLEX_H_ +#ifndef DOC_ALPHA_COMPLEX_INTRO_ALPHA_COMPLEX_H_ +#define DOC_ALPHA_COMPLEX_INTRO_ALPHA_COMPLEX_H_ // to construct a simplex_tree from Delaunay_triangulation #include @@ -97,7 +97,6 @@ class Alpha_complex : public Simplex_tree<> { // Double map type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa. typedef typename std::map< CGAL_vertex_iterator, Vertex_handle > Map_vertex_iterator_to_handle; - //typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Map_vertex_handle_to_iterator; typedef typename std::vector< CGAL_vertex_iterator > Vector_vertex_iterator; private: @@ -141,7 +140,7 @@ class Alpha_complex : public Simplex_tree<> { Alpha_complex(Delaunay_triangulation* triangulation_ptr, Filtration_value max_alpha_square = std::numeric_limits::infinity()) : triangulation_(triangulation_ptr) { - init(max_alpha_square); + init(max_alpha_square); } /** \brief Alpha_complex constructor from a list of points. @@ -160,17 +159,17 @@ class Alpha_complex : public Simplex_tree<> { : triangulation_(nullptr) { auto first = std::begin(points); auto last = std::end(points); - + GUDHI_CHECK((first == last), - std::invalid_argument ("Alpha_complex::Alpha_complex(InputPointRange) - Empty input point range")); - + std::invalid_argument("Alpha_complex::Alpha_complex(InputPointRange) - Empty input point range")); + if (first != last) { // point_dimension function initialization Point_Dimension point_dimension = kernel_.point_dimension_d_object(); // Delaunay triangulation is point dimension minus one. triangulation_ = new Delaunay_triangulation(point_dimension(*first) - 1); - + size_type inserted = triangulation_->insert(first, last); if (inserted != (last -first)) { std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << "\n"; @@ -227,7 +226,7 @@ class Alpha_complex : public Simplex_tree<> { } set_dimension(triangulation_->maximal_dimension()); - // set_filtration to +inf for prune_above_filtration to be done (if necessary) + // set_filtration to +inf for prune_above_filtration to be done (if necessary) set_filtration(std::numeric_limits::infinity()); // -------------------------------------------------------------------------------------------- @@ -247,7 +246,7 @@ class Alpha_complex : public Simplex_tree<> { } } // -------------------------------------------------------------------------------------------- - + // -------------------------------------------------------------------------------------------- // Simplex_tree construction from loop on triangulation finite full cells list for (auto cit = triangulation_->finite_full_cells_begin(); cit != triangulation_->finite_full_cells_end(); ++cit) { @@ -393,4 +392,4 @@ class Alpha_complex : public Simplex_tree<> { } // namespace Gudhi -#endif // ALPHA_COMPLEX_H_ +#endif // DOC_ALPHA_COMPLEX_INTRO_ALPHA_COMPLEX_H_ diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index 99a82eba..1c39c0d7 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -71,7 +71,7 @@ class CGAL_geometric_flag_complex_wrapper { void maximal_face(std::vector vertices) { if (!load_only_points_) { - //std::cout << "size:" << vertices.size() << std::endl; + // std::cout << "size:" << vertices.size() << std::endl; for (int i = 0; i < vertices.size(); ++i) for (int j = i + 1; j < vertices.size(); ++j) complex_.add_edge(Vertex_handle(vertices[i]), Vertex_handle(vertices[j])); diff --git a/src/GudhUI/utils/Bar_code_persistence.h b/src/GudhUI/utils/Bar_code_persistence.h index a4cd8156..b527d684 100644 --- a/src/GudhUI/utils/Bar_code_persistence.h +++ b/src/GudhUI/utils/Bar_code_persistence.h @@ -12,6 +12,10 @@ #include #include // NaN, infinity #include // for pair +#include + +#ifndef UTILS_BAR_CODE_PERSISTENCE_H_ +#define UTILS_BAR_CODE_PERSISTENCE_H_ class Bar_code_persistence { private: @@ -21,7 +25,6 @@ class Bar_code_persistence { double max_death; public: - Bar_code_persistence() : min_birth(std::numeric_limits::quiet_NaN()), max_death(std::numeric_limits::quiet_NaN()) { } @@ -45,13 +48,13 @@ class Bar_code_persistence { QGraphicsScene * scene = new QGraphicsScene(); view->setScene(scene); double ratio = 600.0 / (max_death - min_birth); - //std::cout << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl; + // std::cout << "min_birth=" << min_birth << " - max_death=" << max_death << " - ratio=" << ratio << std::endl; double height = 0.0, birth = 0.0, death = 0.0; int pers_num = 1; for (auto& persistence : persistence_vector) { height = 5.0 * pers_num; - //std::cout << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl; + // std::cout << "[" << pers_num << "] birth=" << persistence.first << " - death=" << persistence.second << std::endl; if (std::isfinite(persistence.first)) birth = ((persistence.first - min_birth) * ratio) + 50.0; else @@ -83,3 +86,5 @@ class Bar_code_persistence { view->show(); } }; + +#endif // UTILS_BAR_CODE_PERSISTENCE_H_ diff --git a/src/GudhUI/utils/Persistence_compute.h b/src/GudhUI/utils/Persistence_compute.h index 1f04cc6b..97165490 100644 --- a/src/GudhUI/utils/Persistence_compute.h +++ b/src/GudhUI/utils/Persistence_compute.h @@ -85,7 +85,6 @@ template class Persistence_compute { stream << "p dimension birth death: \n"; pcoh.output_diagram(stream); - } }; diff --git a/src/GudhUI/view/FirstCoordProjector.h b/src/GudhUI/view/FirstCoordProjector.h index 3ceda3f5..a4027b7f 100644 --- a/src/GudhUI/view/FirstCoordProjector.h +++ b/src/GudhUI/view/FirstCoordProjector.h @@ -36,7 +36,6 @@ class FirstCoordProjector3D : public Projector3D { return Point_3(p.x(), p.y(), p.z()); else if (p.dimension() >= 2) return Point_3(p.x(), p.y(), 0.0); - } }; diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp index ac208957..f81951ce 100644 --- a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp +++ b/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp @@ -39,9 +39,6 @@ #include #include -using namespace Gudhi; -using namespace Gudhi::persistent_cohomology; - // Alpha_shape_3 templates type definitions typedef CGAL::Exact_predicates_inexact_constructions_kernel Kernel; typedef CGAL::Alpha_shape_vertex_base_3 Vb; @@ -66,11 +63,12 @@ typedef Alpha_shape_3::Edge Edge_3; typedef std::list Vertex_list; // gudhi type definition -typedef Simplex_tree ST; +typedef Gudhi::Simplex_tree ST; typedef ST::Vertex_handle Simplex_tree_vertex; typedef std::map Alpha_shape_simplex_tree_map; typedef std::pair Alpha_shape_simplex_tree_pair; typedef std::vector< Simplex_tree_vertex > Simplex_tree_vector_vertex; +typedef Gudhi::persistent_cohomology::Persistent_cohomology< ST, Gudhi::persistent_cohomology::Field_Zp > PCOH; Vertex_list from(const Cell_handle& ch) { Vertex_list the_list; @@ -131,12 +129,7 @@ int main(int argc, char * const argv[]) { usage(argv[0]); } - int coeff_field_characteristic = 0; - int returnedScanValue = sscanf(argv[2], "%d", &coeff_field_characteristic); - if ((returnedScanValue == EOF) || (coeff_field_characteristic <= 0)) { - std::cerr << "Error: " << argv[2] << " is not correct\n"; - usage(argv[0]); - } + int coeff_field_characteristic = atoi(argv[2]); Filtration_value min_persistence = 0.0; returnedScanValue = sscanf(argv[3], "%lf", &min_persistence); @@ -282,7 +275,7 @@ int main(int argc, char * const argv[]) { std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl; // Compute the persistence diagram of the complex - Persistent_cohomology< ST, Field_Zp > pcoh(simplex_tree); + PCOH pcoh(simplex_tree); // initializes the coefficient field for homology pcoh.init_coefficients(coeff_field_characteristic); diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp index 75e4fafb..4c7a9aaf 100644 --- a/src/common/example/Delaunay_triangulation_off_rw.cpp +++ b/src/common/example/Delaunay_triangulation_off_rw.cpp @@ -10,12 +10,12 @@ // Use dynamic_dimension_tag for the user to be able to set dimension typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; typedef CGAL::Delaunay_triangulation T; -// The triangulation uses the default instantiation of the +// The triangulation uses the default instantiation of the // TriangulationDataStructure template parameter void usage(char * const progName) { std::cerr << "Usage: " << progName << " inputFile.off outputFile.off" << std::endl; - exit(-1); // ----- >> + exit(-1); } int main(int argc, char **argv) { @@ -30,9 +30,9 @@ int main(int argc, char **argv) { // Check the read operation was correct if (!off_reader.is_valid()) { std::cerr << "Unable to read file " << offInputFile << std::endl; - exit(-1); // ----- >> + exit(-1); } - + // Retrieve the triangulation T* triangulation = off_reader.get_complex(); // Operations on triangulation @@ -47,8 +47,8 @@ int main(int argc, char **argv) { // Check the write operation was correct if (!off_writer.is_valid()) { std::cerr << "Unable to write file " << offOutputFile << std::endl; - exit(-1); // ----- >> + exit(-1); } - + return 0; -} \ No newline at end of file +} diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index 529774f0..e623cf7b 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -44,8 +44,8 @@ class Delaunay_triangulation_off_visitor_reader { Complex* complex_; typedef typename Complex::Point Point; std::vector point_cloud; - public: + public: // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO. /** \brief Delaunay_triangulation_off_visitor_reader constructor @@ -153,7 +153,6 @@ class Delaunay_triangulation_off_visitor_reader { template class Delaunay_triangulation_off_reader { public: - /** \brief Reads the OFF file and constructs the Delaunay triangulation from the points * that are in the OFF file. * @@ -180,7 +179,6 @@ class Delaunay_triangulation_off_reader { std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader could not open file " << name_file << "\n"; } - } /** \brief Returns if the OFF file read operation was successful or not. @@ -201,7 +199,6 @@ class Delaunay_triangulation_off_reader { if (valid_) return complex_; return nullptr; - } private: @@ -285,7 +282,6 @@ class Delaunay_triangulation_off_writer { } for (auto cit = complex_ptr->finite_full_cells_begin(); cit != complex_ptr->finite_full_cells_end(); ++cit) { - std::vector vertexVector; stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " "; for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { stream << points_to_vh[(*vit)->point()] - 1 << " "; @@ -299,7 +295,7 @@ class Delaunay_triangulation_off_writer { name_file << "\n"; } } - + /** \brief Returns if the OFF write operation was successful or not. * * @return OFF file write status. @@ -313,6 +309,6 @@ class Delaunay_triangulation_off_writer { bool valid_; }; -} // namespace Gudhi +} // namespace Gudhi #endif // DELAUNAY_TRIANGULATION_OFF_IO_H_ diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index e45a7600..2420ae72 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -160,7 +160,7 @@ class Off_reader { iss >> num_face_vertices; std::vector face; face.assign(std::istream_iterator(iss), std::istream_iterator()); - //if (face.size() != (off_info_.dim + 1)) return false; + // if (face.size() != (off_info_.dim + 1)) return false; visitor.maximal_face(face); } return true; -- cgit v1.2.3 From 26e05a189a31846cd1aa97e055ff3595d9dddca1 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 17 Mar 2016 13:44:28 +0000 Subject: Use post and exception doxygen tag instead of warnings everywhere. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1053 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5e263835e662d66ef068c73d3efe71f72f90de10 --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 5 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 79 ++++++++-------------- .../include/gudhi/Delaunay_triangulation_off_io.h | 19 ++---- 3 files changed, 37 insertions(+), 66 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 415aa032..2d2d77a5 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -150,8 +150,7 @@ class Alpha_complex : public Simplex_tree<> { * * The type InputPointRange must be a range for which std::begin and * std::end return input iterators on a Kernel::Point_d. - * \warning In debug mode, the exception std::invalid_argument is thrown if an empty input point range is passed as - * argument. + * \exception std::invalid_argument In debug mode, if an empty input point range is passed as argument. */ template Alpha_complex(const InputPointRange& points, @@ -191,7 +190,7 @@ class Alpha_complex : public Simplex_tree<> { * * @param[in] vertex Vertex handle of the point to retrieve. * @return The point found. - * @warning Exception std::out_of_range is thrown in case vertex is not found. + * @exception std::out_of_range In case vertex is not found (cf. std::vector::at). */ Point_d get_point(Vertex_handle vertex) const { return vertex_handle_to_iterator_.at(vertex)->point(); diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index b7b4b8b2..7b55df11 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -47,7 +47,7 @@ #include #include // Inf #include -#include // for std::max +#include // for std::max namespace Gudhi { /** \defgroup simplex_tree Filtered Complexes @@ -141,7 +141,8 @@ class Simplex_tree { void assign_key(Simplex_key) { } Simplex_key key() const { assert(false); return -1; } }; - typedef typename std::conditional::type Key_simplex_base; + typedef typename std::conditional::type + Key_simplex_base; struct Filtration_simplex_base_real { Filtration_simplex_base_real() : filt_(0) {} @@ -450,7 +451,7 @@ class Simplex_tree { } /** \brief Sets the filtration value of a simplex. - * \warning In debug mode, the exception std::invalid_argument is thrown if sh is a null_simplex. + * \exception std::invalid_argument In debug mode, if sh is a null_simplex. */ void assign_filtration(Simplex_handle sh, Filtration_value fv) { GUDHI_CHECK(sh == null_simplex(), @@ -596,7 +597,19 @@ class Simplex_tree { private: /** \brief Inserts a simplex represented by a vector of vertex. - \warning the vector must be sorted by increasing vertex handle order */ + * @param[in] simplex vector of Vertex_handles, representing the vertices of the new simplex. The vector must be + * sorted by increasing vertex handle order. + * @param[in] filtration the filtration value assigned to the new simplex. + * @return If the new simplex is inserted successfully (i.e. it was not in the + * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned + * to the new simplex. + * If the insertion fails (the simplex is already there), the bool is set to false. If the insertion + * fails and the simplex already in the complex has a filtration value strictly bigger than 'filtration', + * we assign this simplex with the new value 'filtration', and set the Simplex_handle field of the + * output pair to the Simplex_handle of the simplex. Otherwise, we set the Simplex_handle part to + * null_simplex. + * + */ std::pair insert_vertex_vector(const std::vector& simplex, Filtration_value filtration) { Siblings * curr_sib = &root_; @@ -629,7 +642,7 @@ class Simplex_tree { * * @param[in] simplex range of Vertex_handles, representing the vertices of the new simplex * @param[in] filtration the filtration value assigned to the new simplex. - * The return type is a pair. If the new simplex is inserted successfully (i.e. it was not in the + * @return If the new simplex is inserted successfully (i.e. it was not in the * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned * to the new simplex. * If the insertion fails (the simplex is already there), the bool is set to false. If the insertion @@ -668,7 +681,7 @@ class Simplex_tree { * * @param[in] Nsimplex range of Vertex_handles, representing the vertices of the new N-simplex * @param[in] filtration the filtration value assigned to the new N-simplex. - * The return type is a pair. If the new simplex is inserted successfully (i.e. it was not in the + * @return If the new simplex is inserted successfully (i.e. it was not in the * simplicial complex yet) the bool is set to true and the Simplex_handle is the handle assigned * to the new simplex. * If the insertion fails (the simplex is already there), the bool is set to false. If the insertion @@ -677,7 +690,7 @@ class Simplex_tree { * output pair to the Simplex_handle of the simplex. Otherwise, we set the Simplex_handle part to * null_simplex. */ - template> + template> std::pair insert_simplex_and_subfaces(const InputVertexRange& Nsimplex, Filtration_value filtration = 0) { auto first = std::begin(Nsimplex); @@ -1124,7 +1137,7 @@ class Simplex_tree { * The simplex tree is browsed starting from the root until the leaf, and the filtration values are set with their * parent value (increased), in case the values are decreasing. * @return The filtration modification information. - * \warning Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()` + * \post Some simplex tree functions require the filtration to be valid. `make_filtration_non_decreasing()` * function is not launching `initialize_filtration()` but returns the filtration modification information. If the * complex has changed , please call `initialize_filtration()` to recompute it. */ @@ -1154,7 +1167,7 @@ class Simplex_tree { Boundary_simplex_iterator max_border = std::max_element(std::begin(boundary), std::end(boundary), [](Simplex_handle sh1, Simplex_handle sh2) { return filtration(sh1) < filtration(sh2); - } ); + }); Filtration_value max_filt_border_value = filtration(*max_border); if (simplex.second.filtration() < max_filt_border_value) { @@ -1173,7 +1186,7 @@ class Simplex_tree { public: /** \brief Prune above filtration value given as parameter. * @param[in] filtration Maximum threshold value. - * \warning The filtration must be valid. If the filtration has not been initialized yet, the method initializes it + * \post The filtration must be valid. If the filtration has not been initialized yet, the method initializes it * (i.e. order the simplices). If the complex has changed since the last time the filtration was initialized, please * call `initialize_filtration()` to recompute it. */ @@ -1182,7 +1195,7 @@ class Simplex_tree { if (filtration_vect_.empty()) { initialize_filtration(); } - + std::vector> simplex_list_to_removed; // Loop in reverse mode until threshold is reached // Do not erase while looping, because removing is shifting data in a flat_map @@ -1234,18 +1247,18 @@ class Simplex_tree { rec_prune_above_filtration(simplex.second.children(), filt); } }*/ - + /** \brief Remove a maximal simplex. * @param[in] sh Simplex handle on the maximal simplex to remove. * \pre Please check the simplex has no coface before removing it. - * \warning In debug mode, the exception std::invalid_argument is thrown if sh has children. - * \warning Be aware that removing is shifting data in a flat_map (initialize_filtration to be done). + * \exception std::invalid_argument In debug mode, if sh has children. + * \post Be aware that removing is shifting data in a flat_map (initialize_filtration to be done). */ void remove_maximal_simplex(Simplex_handle sh) { // Guarantee the simplex has no children GUDHI_CHECK(has_children(sh), std::invalid_argument("Simplex_tree::remove_maximal_simplex - argument has children")); - + // Simplex is a leaf, it means the child is the Siblings owning the leaf Siblings* child = sh->second.children(); @@ -1259,42 +1272,6 @@ class Simplex_tree { delete child; } } -/***************************************************************************************************************/ - public: - /** \brief Prints the simplex_tree hierarchically. - * Since it prints the vertices recursively, one can watch its tree shape. - */ - void debug_tree() { - std::cout << "{" << &root_ << "} -------------------------------------------------------------------" << std::endl; - for (auto sh = root_.members().begin(); sh != root_.members().end(); ++sh) { - std::cout << sh->first << " [" << sh->second.filtration() << "] "; - if (has_children(sh)) { - rec_debug_tree(sh->second.children()); - } else { - std::cout << " {- " << sh->second.children() << "} "; - } - std::cout << std::endl; - } - std::cout << "--------------------------------------------------------------------------------------" << std::endl; - } - - - /** \brief Recursively prints the simplex_tree, using depth first search. */ - private: - void rec_debug_tree(Siblings * sib) { - std::cout << " {" << sib << "} ("; - for (auto sh = sib->members().begin(); sh != sib->members().end(); ++sh) { - std::cout << " " << sh->first << " [" << sh->second.filtration() << "] "; - if (has_children(sh)) { - rec_debug_tree(sh->second.children()); - } else { - std::cout << " {- " << sh->second.children() << "} "; - } - } - std::cout << ")"; - } - -/*****************************************************************************************************************/ private: Vertex_handle null_vertex_; diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index e623cf7b..7bf5569e 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -98,7 +98,6 @@ class Delaunay_triangulation_off_visitor_reader { std::cout << std::endl; #endif // DEBUG_TRACES // Fill the point cloud - // VR: complex_->insert(Point(point.size(), point.begin(), point.end())); point_cloud.push_back(Point(point.size(), point.begin(), point.end())); } @@ -116,9 +115,7 @@ class Delaunay_triangulation_off_visitor_reader { /** \brief Returns the constructed Delaunay triangulation. * - * @return A pointer on the Delaunay triangulation. - * - * @warning The returned pointer can be nullptr. + * @return A pointer on the Delaunay triangulation. Default value is nullptr. */ Complex* get_complex() const { return complex_; @@ -157,8 +154,8 @@ class Delaunay_triangulation_off_reader { * that are in the OFF file. * * @param[in] name_file OFF file to read. - * - * @warning Check with is_valid() function to see if read operation was successful. + * + * \post Check with is_valid() function to see if read operation was successful. */ Delaunay_triangulation_off_reader(const std::string & name_file) : valid_(false) { @@ -191,9 +188,7 @@ class Delaunay_triangulation_off_reader { /** \brief Returns the constructed Delaunay triangulation. * - * @return A pointer on the Delaunay triangulation. - * - * @warning The returned pointer can be nullptr. + * @return A pointer on the Delaunay triangulation. Default value is nullptr. */ Complex* get_complex() const { if (valid_) @@ -242,12 +237,12 @@ class Delaunay_triangulation_off_writer { public: typedef typename Complex::Point Point; - /** \brief Writes the OFF file from the Delaunay triangulation + /** \brief Writes the OFF file from the Delaunay triangulation. * * @param[in] name_file OFF file to write. * @param[in] complex_ptr pointer on a Delaunay triangulation. - * - * @warning Check with is_valid() function to see if write operation was successful. + * + * \post Check with is_valid() function to see if write operation was successful. */ Delaunay_triangulation_off_writer(const std::string & name_file, Complex* complex_ptr) : valid_(false) { -- cgit v1.2.3 From 5276b0a9e344ed0bb4fdb8b079f2ce86649d12a4 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 22 Mar 2016 16:07:08 +0000 Subject: GUDHI_CHECK was not intuitive. Reverse GUDHI_CHECK calls. No exception when no point is given in contrction. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1070 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: bf6a08d48b5b29a5b24bf5107116e399dfc0a6bc --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 4 ---- src/Simplex_tree/include/gudhi/Simplex_tree.h | 4 ++-- src/common/include/gudhi/Debug_utils.h | 6 +++--- 3 files changed, 5 insertions(+), 9 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 7c64b53e..330b3b34 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -159,7 +159,6 @@ class Alpha_complex : public Simplex_tree<> { * * The type InputPointRange must be a range for which std::begin and * std::end return input iterators on a Kernel::Point_d. - * \exception std::invalid_argument In debug mode, if an empty input point range is passed as argument. */ template Alpha_complex(const InputPointRange& points, @@ -168,9 +167,6 @@ class Alpha_complex : public Simplex_tree<> { auto first = std::begin(points); auto last = std::end(points); - GUDHI_CHECK((first == last), - std::invalid_argument("Alpha_complex::Alpha_complex(InputPointRange) - Empty input point range")); - if (first != last) { // point_dimension function initialization Point_Dimension point_dimension = kernel_.point_dimension_d_object(); diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 7b55df11..aa8f059e 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -454,7 +454,7 @@ class Simplex_tree { * \exception std::invalid_argument In debug mode, if sh is a null_simplex. */ void assign_filtration(Simplex_handle sh, Filtration_value fv) { - GUDHI_CHECK(sh == null_simplex(), + GUDHI_CHECK(sh != null_simplex(), std::invalid_argument("Simplex_tree::assign_filtration - cannot assign filtration on null_simplex")); sh->second.assign_filtration(fv); } @@ -1256,7 +1256,7 @@ class Simplex_tree { */ void remove_maximal_simplex(Simplex_handle sh) { // Guarantee the simplex has no children - GUDHI_CHECK(has_children(sh), + GUDHI_CHECK(!has_children(sh), std::invalid_argument("Simplex_tree::remove_maximal_simplex - argument has children")); // Simplex is a leaf, it means the child is the Siblings owning the leaf diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h index 48d61fef..7573a9db 100644 --- a/src/common/include/gudhi/Debug_utils.h +++ b/src/common/include/gudhi/Debug_utils.h @@ -29,12 +29,12 @@ #define GUDHI_DEBUG #endif -// GUDHI_CHECK throw an exception on condition in debug mode, but does nothing in release mode +// GUDHI_CHECK throw an exception if expression is false in debug mode, but does nothing in release mode // Could assert in release mode, but cmake sets NDEBUG (for "NO DEBUG") in this mode, means assert does nothing. #ifdef GUDHI_DEBUG - #define GUDHI_CHECK(cond, excpt) if (cond) throw excpt + #define GUDHI_CHECK(expression, excpt) if ((expression) == 0) throw excpt #else - #define GUDHI_CHECK(cond, excpt) (void) 0 + #define GUDHI_CHECK(expression, excpt) (void) 0 #endif #define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl -- cgit v1.2.3 From 19fb1ba90b56e120514c98e87fc59bb1635eed29 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 30 Mar 2016 09:26:51 +0000 Subject: Cubical complex for new doxygen version git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1083 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 77e7fa96f9ed2f2ccd9f65bb1f6b325737f863f5 --- .../doc/Cubical_complex_representation.ipe | 732 +++++++++++++++++++++ .../doc/Cubical_complex_representation.png | Bin 0 -> 19167 bytes .../doc/Gudhi_Cubical_Complex_doc.h | 2 +- src/common/doc/main_page.h | 62 +- 4 files changed, 772 insertions(+), 24 deletions(-) create mode 100644 src/Bitmap_cubical_complex/doc/Cubical_complex_representation.ipe create mode 100644 src/Bitmap_cubical_complex/doc/Cubical_complex_representation.png (limited to 'src/common') diff --git a/src/Bitmap_cubical_complex/doc/Cubical_complex_representation.ipe b/src/Bitmap_cubical_complex/doc/Cubical_complex_representation.ipe new file mode 100644 index 00000000..bec245e7 --- /dev/null +++ b/src/Bitmap_cubical_complex/doc/Cubical_complex_representation.ipe @@ -0,0 +1,732 @@ + + + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + +0.6 0 0 0.6 0 0 e + + + + + +0.5 0 0 0.5 0 0 e + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h + + + + + +-0.5 -0.5 m +0.5 -0.5 l +0.5 0.5 l +-0.5 0.5 l +h + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + + + +-0.43 -0.57 m +0.57 0.43 l +0.43 0.57 l +-0.57 -0.43 l +h + + +-0.43 0.57 m +0.57 -0.43 l +0.43 -0.57 l +-0.57 0.43 l +h + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +-1 0.333 m +0 0 l +-1 -0.333 l + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +176 496 m +176 480 l +192 480 l +192 496 l +h + +0 + +192 496 m +192 480 l +240 480 l +240 496 l +h + +1 + +176 496 m +176 480 l +192 480 l +192 496 l +h + +2 + +192 496 m +192 480 l +240 480 l +240 496 l +h + +3 + +176 496 m +176 480 l +192 480 l +192 496 l +h + +4 + +192 496 m +192 480 l +240 480 l +240 496 l +h + +5 + +176 496 m +176 480 l +192 480 l +192 496 l +h + +6 + +192 496 m +192 480 l +240 480 l +240 496 l +h + +7 + +176 496 m +176 480 l +192 480 l +192 496 l +h + +8 + +176 496 m +176 480 l +192 480 l +192 496 l +h + +1 + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + +2 + +192 496 m +192 480 l +240 480 l +240 496 l +h + +3 + +176 496 m +176 480 l +192 480 l +192 496 l +h + +4 + +192 496 m +192 480 l +240 480 l +240 496 l +h + +5 + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + +1 +2 +3 + +160 496 m +160 544 l +144 544 l +144 496 l +h + +9 + +160 496 m +160 544 l +144 544 l +144 496 l +h + +11 + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +192 496 m +192 480 l +240 480 l +240 496 l +h + + +176 496 m +176 480 l +192 480 l +192 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +160 496 m +160 544 l +144 544 l +144 496 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + + +192 544 m +192 496 l +240 496 l +240 544 l +h + +10 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 + +152 456 m +472 456 l + + +152 456 m +152 648 l + + + diff --git a/src/Bitmap_cubical_complex/doc/Cubical_complex_representation.png b/src/Bitmap_cubical_complex/doc/Cubical_complex_representation.png new file mode 100644 index 00000000..afb2a75e Binary files /dev/null and b/src/Bitmap_cubical_complex/doc/Cubical_complex_representation.png differ diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h index cde0b2fc..be4caaad 100644 --- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h +++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h @@ -76,7 +76,7 @@ namespace Cubical_complex { * directions, allows to determine, dimension, neighborhood, boundary and coboundary of every cube \f$C \in * \mathcal{K}\f$. * - * \image html "bitmapAllCubes.png" "Cubical complex. + * \image html "Cubical_complex_representation.png" Cubical complex. * * Note that the cubical complex in the figure above is, in a natural way, a product of one dimensional cubical * complexes in \f$\mathbb{R}\f$. The number of all cubes in each direction is equal \f$2n+1\f$, where \f$n\f$ is the diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 1db1ea8a..56cb82bb 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -3,7 +3,7 @@ * \image html "Gudhi_banner.jpg" "" width=20cm * * \section Introduction Introduction - * The Gudhi library (Geometric Understanding in Higher Dimensions) is a generic open source C++ library for + * The Gudhi library (Geometry Understanding in Higher Dimensions) is a generic open source C++ library for * Computational Topology and Topological Data Analysis * (TDA). * The GUDHI library intends to help the development of new algorithmic solutions in TDA and their transfer to @@ -20,16 +20,32 @@ * We refer to \cite gudhilibrary_ICMS14 for a detailed description of the design of the library. * \section DataStructures Data structures + \subsection CubicalComplexDataStructure Cubical complex + \image html "Cubical_complex_representation.png" "Cubical complex representation" + + + + + +
+ Author: Pawel Dlotko
+ Introduced in: GUDHI 1.3.0
+ Copyright: GPL v3
+
+ The cubical complex is an example of a structured complex useful in computational mathematics (specially + rigorous numerics) and image analysis.
+ User manual: \ref cubical_complex - Reference manual: Gudhi::Cubical_complex::Bitmap_cubical_complex +
\subsection SimplexTreeDataStructure Simplex tree \image html "Simplex_tree_representation.png" "Simplex tree representation"
+ Author: Clément Maria
Introduced in: GUDHI 1.0.0
Copyright: GPL v3
- Clément Maria
The simplex tree is an efficient and flexible data structure for representing general (filtered) simplicial complexes. The data structure is described in \cite boissonnatmariasimplextreealgorithmica .
@@ -42,11 +58,11 @@ +
+ Author: David Salinas
Introduced in: GUDHI 1.1.0
Copyright: GPL v3
- David Salinas
The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit* representation of its simplices \cite socg_blockers_2011,\cite blockers2012. Intuitively, it just stores the 1-skeleton of a simplicial complex with a graph and the set of its "missing faces" that is very small in practice. @@ -62,11 +78,11 @@
+ Author: Siargey Kachanovich
Introduced in: GUDHI 1.3.0
Copyright: GPL v3
- Siargey Kachanovich
Witness complex \f$ Wit(W,L) \f$ is a simplicial complex defined on two sets of points in \f$\mathbb{R}^D\f$. The data structure is described in \cite boissonnatmariasimplextreealgorithmica .
User manual: \ref witness_complex - Reference manual: Gudhi::witness_complex::SimplicialComplexForWitness @@ -75,16 +91,34 @@
\section Toolbox Toolbox + \subsection ContractionToolbox Contraction + \image html "sphere_contraction_representation.png" "Sphere contraction example" + + + + + +
+ Author: David Salinas
+ Introduced in: GUDHI 1.1.0
+ Copyright: GPL v3
+
+ The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge + simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most + used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, + the size of this data-structure is even much lower than the total number of simplices.
+ User manual: \ref contr +
\subsection PersistentCohomologyToolbox Persistent Cohomology \image html "3DTorus_poch.png" "Rips Persistent Cohomology on a 3D Torus" -
+ Author: Clément Maria
Introduced in: GUDHI 1.0.0
Copyright: GPL v3
- Clément Maria
The theory of homology consists in attaching to a topological space a sequence of (homology) groups, capturing global topological features like connected components, holes, cavities, etc. Persistent homology studies the evolution -- birth, life and death -- of these features when the topological space is changing. Consequently, the @@ -96,24 +130,6 @@ User manual: \ref persistent_cohomology
- \subsection ContractionToolbox Contraction - \image html "sphere_contraction_representation.png" "Sphere contraction example" - - - - -
- Introduced in: GUDHI 1.1.0
- Copyright: GPL v3
-
- David Salinas
- The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge - simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification of most - used geometrical complexes of topological data analysis such as the Rips or the Delaunay complexes. In practice, - the size of this data-structure is even much lower than the total number of simplices.
- User manual: \ref contr -
*/ -- cgit v1.2.3 From 001f501ffa371fc810d4f24812d26845bd61f349 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 30 Mar 2016 10:43:10 +0000 Subject: Add cubical complex to main page Homogeneize the author/copyright sections of modules Exclude data and GudhUI from doxygen git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1085 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 1b89161f3253553a02935885334b2fc6cf58055e --- src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h | 3 ++- .../include/gudhi/Bitmap_cubical_complex.h | 11 ++++------- .../include/gudhi/Bitmap_cubical_complex/counter.h | 2 +- .../include/gudhi/Bitmap_cubical_complex_base.h | 7 ++----- ...Bitmap_cubical_complex_periodic_boundary_conditions_base.h | 6 ++---- src/Contraction/include/gudhi/Edge_contraction.h | 1 - src/Doxyfile | 3 ++- .../include/gudhi/Persistent_cohomology.h | 6 +----- src/Simplex_tree/include/gudhi/Simplex_tree.h | 5 +---- src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h | 1 - src/common/doc/main_page.h | 4 ++-- 11 files changed, 17 insertions(+), 32 deletions(-) (limited to 'src/common') diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h index be4caaad..83921427 100644 --- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h +++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h @@ -145,7 +145,8 @@ namespace Cubical_complex { * \section BitmapExamples Examples * End user programs are available in example/Bitmap_cubical_complex folder. - + * + * \copyright GNU General Public License v3. */ /** @} */ // end defgroup cubical_complex diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index 67e1fed3..54ae47d0 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -46,18 +46,15 @@ const bool globalDbg = false; template class is_before_in_filtration; /** - * This is a Bitmap_cubical_complex class. It joints a functionalities of Bitmap_cubical_complex_base and + * @class Bitmap_cubical_complex Bitmap_cubical_complex.h gudhi/Bitmap_cubical_complex.h + * @brief Cubical complex represented as a bitmap. + * @ingroup cubical_complex + * @details This is a Bitmap_cubical_complex class. It joints a functionalities of Bitmap_cubical_complex_base and * Bitmap_cubical_complex_periodic_boundary_conditions_base classes into * Gudhi persistent homology engine. It is a template class that inherit from its template parameter. The template * parameter is supposed to be either Bitmap_cubical_complex_base or * Bitmap_cubical_complex_periodic_boundary_conditions_base class. **/ - -/** - *@class Bitmap_cubical_complex - *@brief Cubical complex represented as a bitmap. - *@ingroup cubical_complex - */ template class Bitmap_cubical_complex : public T { public: diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h index 266ce051..bee19344 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h @@ -31,6 +31,7 @@ namespace Gudhi { namespace Cubical_complex { /** + * @class counter counter.h gudhi/Bitmap_cubical_complex/counter.h * This is an implementation of a counter being a vector of integers. * The constructor of the class takes as an input two vectors W and V. * It assumes that W < V coordinatewise. @@ -40,7 +41,6 @@ namespace Cubical_complex { * The current counter reach the end counter V if the value returned by the increment function is FALSE. * This class is needed for the implementation of a bitmapCubicalComplex. **/ - class counter { public: /** diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h index 7294da98..4bdc94ef 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h @@ -39,13 +39,10 @@ namespace Gudhi { namespace Cubical_complex { /** - * @class Bitmap_cubical_complex_base + * @class Bitmap_cubical_complex_base Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h * @brief Cubical complex represented as a bitmap, class with basic implementation. * @ingroup cubical_complex - */ - -/** - * This is a class implementing a basic bitmap data structure to store cubical complexes. + * @details This is a class implementing a basic bitmap data structure to store cubical complexes. * It implements only the most basic subroutines. * The idea of the bitmap is the following. Our aim is to have a memory efficient * data structure to store d-dimensional cubical complex diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h index a446c0e8..5b7e4115 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h @@ -39,12 +39,10 @@ namespace Cubical_complex { // the cells on the right / top are not in the Bitmap_cubical_complex_periodic_boundary_conditions_base /** - * @class Bitmap_cubical_complex_periodic_boundary_conditions_base + * @class Bitmap_cubical_complex_periodic_boundary_conditions_base Bitmap_cubical_complex_periodic_boundary_conditions_base.h gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h * @brief Cubical complex with periodic boundary conditions represented as a bitmap. * @ingroup cubical_complex - */ -/** - * This is a class implementing a bitmap data structure with periodic boundary conditions. Most of the functions are + * @details This is a class implementing a bitmap data structure with periodic boundary conditions. Most of the functions are * identical to the functions from Bitmap_cubical_complex_base. * The ones that needed to be updated are the constructors and get_boundary_of_a_cell and get_coboundary_of_a_cell. */ diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index 73236db9..1fe563d6 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -226,7 +226,6 @@ Time to simplify and enumerate simplices: \copyright GNU General Public License v3. -\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ /** @} */ // end defgroup } // namespace contraction diff --git a/src/Doxyfile b/src/Doxyfile index 8fe41755..f7fba3e6 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -779,7 +779,8 @@ RECURSIVE = YES # Note that relative paths are relative to the directory from which doxygen is # run. -EXCLUDE = +EXCLUDE = data/ \ + GudhUI/ # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index 3c331f0f..1b86f1f9 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -48,6 +48,7 @@ namespace persistent_cohomology { /** \defgroup persistent_cohomology Persistent Cohomology * + \author Clément Maria Computation of persistent cohomology using the algorithm of \cite DBLP:journals/dcg/SilvaMV11 and \cite DBLP:journals/corr/abs-1208-5018 @@ -169,11 +170,6 @@ points sampling a Klein bottle in \f$\mathbb{R}^5\f$ with a simplex tree, its co Hasse diagram and the computation of persistent homology and multi-field persistent homology for the different representations. - - - \author Clément Maria - \version 1.0 - \date 2014 \copyright GNU General Public License v3. @{ */ diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index 46670b85..ba09db63 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -48,6 +48,7 @@ namespace Gudhi { /** \defgroup simplex_tree Filtered Complexes + * \author Clément Maria * * A simplicial complex \f$\mathbf{K}\f$ * on a set of vertices \f$V = \{1, \cdots ,|V|\}\f$ is a collection of simplices @@ -75,10 +76,6 @@ namespace Gudhi { when accessing the boundary of a simplex, but is less compact and harder to construct from scratch. - - * \author Clément Maria - * \version 1.0 - * \date 2014 * \copyright GNU General Public License v3. * @{ */ diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h index 615b3a81..74c6a71d 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h @@ -240,7 +240,6 @@ their collaboration to write the two initial papers \copyright GNU General Public License v3. -\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim */ /** @} */ // end defgroup diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 56cb82bb..6f3d8603 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -41,7 +41,7 @@ @@ -114,7 +114,7 @@
- Author: Clément Maria
+ Author: Clément Maria
Introduced in: GUDHI 1.0.0
Copyright: GPL v3
-- cgit v1.2.3 From d29bdae1281bb2b9b9778365c5dc3d44cffa4de4 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 31 Mar 2016 13:03:58 +0000 Subject: Fix TBB issue in CMake Add TBB in Doxygen git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1086 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: e3d3251431193d21ebf9344c230d12df624cd905 --- CMakeLists.txt | 4 + src/Bitmap_cubical_complex/example/CMakeLists.txt | 15 +++- .../include/gudhi/Bitmap_cubical_complex.h | 3 +- src/Bitmap_cubical_complex/test/CMakeLists.txt | 3 + src/CMakeLists.txt | 4 + src/Doxyfile | 8 +- src/GudhUI/CMakeLists.txt | 3 + src/Persistent_cohomology/example/CMakeLists.txt | 26 ++++++- src/Persistent_cohomology/test/CMakeLists.txt | 8 +- src/Simplex_tree/example/CMakeLists.txt | 9 +++ src/Simplex_tree/include/gudhi/Simplex_tree.h | 7 +- src/Simplex_tree/test/CMakeLists.txt | 3 + src/common/doc/main_page.h | 89 ++++++++++++++++++++-- 13 files changed, 163 insertions(+), 19 deletions(-) (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index abb66ec2..9cb8b86b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -58,6 +58,10 @@ else() # Find TBB package for parallel sort - not mandatory, just optional. set(TBB_FIND_QUIETLY ON) find_package(TBB) + if (TBB_FOUND) + message("TBB found in ${TBB_LIBRARY_DIRS}") + add_definitions(-DGUDHI_USE_TBB) + endif() # Required programs for unitary tests purpose FIND_PROGRAM( GCOVR_PATH gcovr ) diff --git a/src/Bitmap_cubical_complex/example/CMakeLists.txt b/src/Bitmap_cubical_complex/example/CMakeLists.txt index 8f9cfa80..ad86b763 100644 --- a/src/Bitmap_cubical_complex/example/CMakeLists.txt +++ b/src/Bitmap_cubical_complex/example/CMakeLists.txt @@ -2,16 +2,25 @@ cmake_minimum_required(VERSION 2.6) project(GUDHIBitmap) add_executable ( Bitmap_cubical_complex Bitmap_cubical_complex.cpp ) -target_link_libraries(Bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY}) +target_link_libraries(Bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(Bitmap_cubical_complex ${TBB_RELEASE_LIBRARY}) +endif() add_test(Bitmap_cubical_complex_one_sphere ${CMAKE_CURRENT_BINARY_DIR}/Bitmap_cubical_complex ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalOneSphere.txt) add_test(Bitmap_cubical_complex_two_sphere ${CMAKE_CURRENT_BINARY_DIR}/Bitmap_cubical_complex ${CMAKE_SOURCE_DIR}/data/bitmap/CubicalTwoSphere.txt) add_executable ( Random_bitmap_cubical_complex Random_bitmap_cubical_complex.cpp ) -target_link_libraries(Random_bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY}) +target_link_libraries(Random_bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(Random_bitmap_cubical_complex ${TBB_RELEASE_LIBRARY}) +endif() add_test(Random_bitmap_cubical_complex ${CMAKE_CURRENT_BINARY_DIR}/Random_bitmap_cubical_complex 2 100 100) add_executable ( Bitmap_cubical_complex_periodic_boundary_conditions Bitmap_cubical_complex_periodic_boundary_conditions.cpp ) -target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${Boost_SYSTEM_LIBRARY}) +target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${Boost_SYSTEM_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(Bitmap_cubical_complex_periodic_boundary_conditions ${TBB_RELEASE_LIBRARY}) +endif() add_test(Bitmap_cubical_complex_periodic_2d_torus ${CMAKE_CURRENT_BINARY_DIR}/Bitmap_cubical_complex_periodic_boundary_conditions ${CMAKE_SOURCE_DIR}/data/bitmap/2d_torus.txt) add_test(Bitmap_cubical_complex_periodic_3d_torus ${CMAKE_CURRENT_BINARY_DIR}/Bitmap_cubical_complex_periodic_boundary_conditions ${CMAKE_SOURCE_DIR}/data/bitmap/3d_torus.txt) diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index 54ae47d0..a6ba60d3 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -546,7 +546,8 @@ void Bitmap_cubical_complex::initialize_simplex_associated_to_key() { this->simplex_associated_to_key = std::vector(this->data.size()); std::iota(std::begin(simplex_associated_to_key), std::end(simplex_associated_to_key), 0); #ifdef GUDHI_USE_TBB - tbb::parallel_sort(simplex_associated_to_key, is_before_in_filtration(this)); + tbb::parallel_sort(simplex_associated_to_key.begin(), simplex_associated_to_key.end(), + is_before_in_filtration(this)); #else std::sort(simplex_associated_to_key.begin(), simplex_associated_to_key.end(), is_before_in_filtration(this)); #endif diff --git a/src/Bitmap_cubical_complex/test/CMakeLists.txt b/src/Bitmap_cubical_complex/test/CMakeLists.txt index 97c374e6..96a41c62 100644 --- a/src/Bitmap_cubical_complex/test/CMakeLists.txt +++ b/src/Bitmap_cubical_complex/test/CMakeLists.txt @@ -16,6 +16,9 @@ endif() add_executable ( BitmapCCUT Bitmap_test.cpp ) target_link_libraries(BitmapCCUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(BitmapCCUT ${TBB_RELEASE_LIBRARY}) +endif() # Unitary tests add_test(NAME BitmapCCUT diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 40b7dd58..d4abe96d 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -53,6 +53,10 @@ else() # Find TBB package for parallel sort - not mandatory, just optional. set(TBB_FIND_QUIETLY ON) find_package(TBB) + if (TBB_FOUND) + message("TBB found in ${TBB_LIBRARY_DIRS}") + add_definitions(-DGUDHI_USE_TBB) + endif() # BOOST ISSUE result_of vs C++11 add_definitions(-DBOOST_RESULT_OF_USE_DECLTYPE) diff --git a/src/Doxyfile b/src/Doxyfile index f7fba3e6..63e7145c 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -780,7 +780,8 @@ RECURSIVE = YES # run. EXCLUDE = data/ \ - GudhUI/ + example/ \ + GudhUI/ # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded @@ -813,14 +814,15 @@ EXCLUDE_SYMBOLS = # that contain example code fragments that are included (see the \include # command). -EXAMPLE_PATH = biblio/ +EXAMPLE_PATH = biblio/ \ + example/ # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. -EXAMPLE_PATTERNS = +EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands diff --git a/src/GudhUI/CMakeLists.txt b/src/GudhUI/CMakeLists.txt index 1ee43d91..fd7397ea 100644 --- a/src/GudhUI/CMakeLists.txt +++ b/src/GudhUI/CMakeLists.txt @@ -57,6 +57,9 @@ if ( CGAL_FOUND AND QT4_FOUND AND OPENGL_FOUND AND QGLVIEWER_FOUND ) target_link_libraries( GudhUI ${QT_LIBRARIES} ${QGLVIEWER_LIBRARIES} ) target_link_libraries( GudhUI ${OPENGL_gl_LIBRARY} ${OPENGL_glu_LIBRARY} ) +if (TBB_FOUND) + target_link_libraries( GudhUI ${TBB_RELEASE_LIBRARY}) +endif() else() message(STATUS "NOTICE: GudhUI requires CGAL, the QGLViewer, OpenGL and Qt4, and will not be compiled.") diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 95506631..9702a5f9 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -7,24 +7,37 @@ add_definitions( -DBOOST_ALL_DYN_LINK ) add_executable(plain_homology plain_homology.cpp) target_link_libraries(plain_homology ${Boost_SYSTEM_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(plain_homology ${TBB_RELEASE_LIBRARY}) +endif() add_test(plain_homology ${CMAKE_CURRENT_BINARY_DIR}/plain_homology) add_executable(persistence_from_simple_simplex_tree persistence_from_simple_simplex_tree.cpp) target_link_libraries(persistence_from_simple_simplex_tree ${Boost_SYSTEM_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(persistence_from_simple_simplex_tree ${TBB_RELEASE_LIBRARY}) +endif() add_test(persistence_from_simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_simple_simplex_tree 1 0) add_executable(rips_persistence rips_persistence.cpp) target_link_libraries(rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) - +if (TBB_FOUND) + target_link_libraries(rips_persistence ${TBB_RELEASE_LIBRARY}) +endif() add_test(rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 3 -m 100) add_executable(parallel_rips_persistence parallel_rips_persistence.cpp) target_link_libraries(parallel_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) - +if (TBB_FOUND) + target_link_libraries(parallel_rips_persistence ${TBB_RELEASE_LIBRARY}) +endif() add_test(parallel_rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/parallel_rips_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.txt -r 0.3 -d 3 -p 3 -m 100) add_executable(persistence_from_file persistence_from_file.cpp) target_link_libraries(persistence_from_file ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(persistence_from_file ${TBB_RELEASE_LIBRARY}) +endif() add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 2 -m 0) add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) @@ -34,10 +47,16 @@ if(GMPXX_FOUND AND GMP_FOUND) add_executable(rips_multifield_persistence rips_multifield_persistence.cpp ) target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) + if (TBB_FOUND) + target_link_libraries(rips_multifield_persistence ${TBB_RELEASE_LIBRARY}) + endif() add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 2 -q 71 -m 100) add_executable ( performance_rips_persistence performance_rips_persistence.cpp ) target_link_libraries(performance_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) + if (TBB_FOUND) + target_link_libraries(performance_rips_persistence ${TBB_RELEASE_LIBRARY}) + endif() if(CGAL_FOUND) if (CMAKE_BUILD_TYPE MATCHES Debug) @@ -46,6 +65,9 @@ if(GMPXX_FOUND AND GMP_FOUND) endif() add_executable(alpha_shapes_persistence alpha_shapes_persistence.cpp) target_link_libraries(alpha_shapes_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(alpha_shapes_persistence ${TBB_RELEASE_LIBRARY}) + endif() add_test(alpha_shapes_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5) #add_test(alpha_shapes_persistence_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/alpha_shapes_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) endif() diff --git a/src/Persistent_cohomology/test/CMakeLists.txt b/src/Persistent_cohomology/test/CMakeLists.txt index d16be5be..459cc000 100644 --- a/src/Persistent_cohomology/test/CMakeLists.txt +++ b/src/Persistent_cohomology/test/CMakeLists.txt @@ -12,6 +12,9 @@ endif() add_executable ( PersistentCohomologyUT persistent_cohomology_unit_test.cpp ) target_link_libraries(PersistentCohomologyUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(PersistentCohomologyUT ${TBB_RELEASE_LIBRARY}) +endif() # Unitary tests add_test(NAME PersistentCohomologyUT @@ -22,7 +25,10 @@ add_test(NAME PersistentCohomologyUT if(GMPXX_FOUND AND GMP_FOUND) add_executable ( PersistentCohomologyMultiFieldUT persistent_cohomology_unit_test_multi_field.cpp ) - target_link_libraries(PersistentCohomologyMultiFieldUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) +target_link_libraries(PersistentCohomologyMultiFieldUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) +if (TBB_FOUND) + target_link_libraries(PersistentCohomologyMultiFieldUT ${TBB_RELEASE_LIBRARY}) +endif() # Unitary tests add_test(NAME PersistentCohomologyMultiFieldUT diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt index 200161a6..89a4e053 100644 --- a/src/Simplex_tree/example/CMakeLists.txt +++ b/src/Simplex_tree/example/CMakeLists.txt @@ -2,10 +2,16 @@ cmake_minimum_required(VERSION 2.6) project(GUDHISimplexTreeFromFile) add_executable ( simplex_tree_from_cliques_of_graph simplex_tree_from_cliques_of_graph.cpp ) +if (TBB_FOUND) + target_link_libraries(simplex_tree_from_cliques_of_graph ${TBB_RELEASE_LIBRARY}) +endif() add_test(simplex_tree_from_cliques_of_graph_2 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_cliques_of_graph ${CMAKE_SOURCE_DIR}/data/points/Klein_bottle_complex.txt 2) add_test(simplex_tree_from_cliques_of_graph_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_cliques_of_graph ${CMAKE_SOURCE_DIR}/data/points/Klein_bottle_complex.txt 3) add_executable ( simple_simplex_tree simple_simplex_tree.cpp ) +if (TBB_FOUND) + target_link_libraries(simple_simplex_tree ${TBB_RELEASE_LIBRARY}) +endif() add_test(simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/simple_simplex_tree) add_executable ( mini_simplex_tree mini_simplex_tree.cpp ) @@ -16,5 +22,8 @@ add_test(mini_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/mini_simplex_tree) if(GMP_FOUND AND CGAL_FOUND) add_executable ( simplex_tree_from_alpha_shapes_3 simplex_tree_from_alpha_shapes_3.cpp ) target_link_libraries(simplex_tree_from_alpha_shapes_3 ${GMP_LIBRARIES} ${CGAL_LIBRARY} ${Boost_SYSTEM_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(simplex_tree_from_alpha_shapes_3 ${TBB_RELEASE_LIBRARY}) + endif() add_test(simplex_tree_from_alpha_shapes_3 ${CMAKE_CURRENT_BINARY_DIR}/simplex_tree_from_alpha_shapes_3 ${CMAKE_SOURCE_DIR}/data/points/bunny_5000) endif() diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index ba09db63..dea475ac 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -797,10 +797,11 @@ class Simplex_tree { * possible. */ #ifdef GUDHI_USE_TBB - tbb::parallel_sort(filtration_vect_, is_before_in_filtration(this)); + std::cout << "TBB is ON !!!" << std::endl; + tbb::parallel_sort(filtration_vect_.begin(), filtration_vect_.end(), is_before_in_filtration(this)); #else - std::stable_sort(filtration_vect_.begin(), filtration_vect_.end(), - is_before_in_filtration(this)); + std::cout << "TBB is OFF..." << std::endl; + std::stable_sort(filtration_vect_.begin(), filtration_vect_.end(), is_before_in_filtration(this)); #endif } diff --git a/src/Simplex_tree/test/CMakeLists.txt b/src/Simplex_tree/test/CMakeLists.txt index 1f2f7d33..609d8669 100644 --- a/src/Simplex_tree/test/CMakeLists.txt +++ b/src/Simplex_tree/test/CMakeLists.txt @@ -12,6 +12,9 @@ endif() add_executable ( SimplexTreeUT simplex_tree_unit_test.cpp ) target_link_libraries(SimplexTreeUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +if (TBB_FOUND) + target_link_libraries(SimplexTreeUT ${TBB_RELEASE_LIBRARY}) +endif() # Do not forget to copy test files in current binary dir file(COPY "simplex_tree_for_unit_test.txt" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 6f3d8603..1e088552 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -148,21 +148,72 @@ * * The following example requires the GNU Multiple Precision Arithmetic * Library (GMP) and will not be built if GMP is not installed: - * \li Persistent_cohomology/rips_multifield_persistence + * \li + * Persistent_cohomology/alpha_shapes_persistence.cpp + * \li + * Persistent_cohomology/performance_rips_persistence.cpp + * \li + * Persistent_cohomology/rips_multifield_persistence.cpp + * \li + * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp * * Having GMP version 4.2 or higher installed is recommended. * * \subsection cgal CGAL: * CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms. * + * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to + * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html + * * The following examples require the Computational Geometry Algorithms * Library (CGAL) and will not be built if CGAL is not installed: - * \li GudhUI - * \li Persistent_cohomology/alpha_shapes_persistence - * \li Simplex_tree/simplex_tree_from_alpha_shapes_3 + * \li + * Persistent_cohomology/alpha_shapes_persistence.cpp + * \li + * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp * - * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to - * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html + * The following example requires CGAL version ≥ 4.6: + * \li + * Witness_complex/witness_complex_sphere.cpp + * + * \subsection tbb Threading Building Blocks: + * Intel® TBB lets you easily write parallel + * C++ programs that take full advantage of multicore performance, that are portable and composable, and that have + * future-proof scalability. + * + * Having Intel® TBB installed is recommended to parallelize and accelerate some GUDHI computations. + * + * The following examples are using Intel® TBB if installed: + * \li + * Bitmap_cubical_complex/Bitmap_cubical_complex.cpp + * \li + * Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp + * \li + * Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp + * \li + * Persistent_cohomology/alpha_shapes_persistence.cpp + * \li + * Simplex_tree/simple_simplex_tree.cpp + * \li + * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * \li + * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp + * \li + * Persistent_cohomology/alpha_shapes_persistence.cpp + * \li + * Persistent_cohomology/parallel_rips_persistence.cpp + * \li + * Persistent_cohomology/performance_rips_persistence.cpp + * \li + * Persistent_cohomology/persistence_from_file.cpp + * \li + * Persistent_cohomology/persistence_from_simple_simplex_tree.cpp + * \li + * Persistent_cohomology/plain_homology.cpp + * \li + * Persistent_cohomology/rips_multifield_persistence.cpp + * \li + * Persistent_cohomology/rips_persistence.cpp * * \subsection demos Demos and examples * To build the demos and libraries, run the following commands in a terminal: @@ -206,3 +257,29 @@ make \endverbatim * \verbinclude biblio/how_to_cite_gudhi.bib */ +// List of Gudhi examples - Doxygen needs at least a file tag to analyse comments +/*! @file Examples + * @example Bitmap_cubical_complex/Bitmap_cubical_complex.cpp + * @example Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp + * @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp + * @example Contraction/Garland_heckbert.cpp + * @example Contraction/Rips_contraction.cpp + * @example Persistent_cohomology/alpha_shapes_persistence.cpp + * @example Persistent_cohomology/parallel_rips_persistence.cpp + * @example Persistent_cohomology/performance_rips_persistence.cpp + * @example Persistent_cohomology/persistence_from_file.cpp + * @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp + * @example Persistent_cohomology/plain_homology.cpp + * @example Persistent_cohomology/rips_multifield_persistence.cpp + * @example Persistent_cohomology/rips_persistence.cpp + * @example Simplex_tree/mini_simplex_tree.cpp + * @example Simplex_tree/simple_simplex_tree.cpp + * @example Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp + * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp + * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp + * @example Skeleton_blocker/Skeleton_blocker_link.cpp + * @example Witness_complex/witness_complex_from_file.cpp + * @example Witness_complex/witness_complex_sphere.cpp + */ + -- cgit v1.2.3 From 1233e7dc721673853c9484569e0828b92cb06f09 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 31 Mar 2016 14:46:01 +0000 Subject: Fix #include issue in generated documentation git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1087 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 2b837671f261bb093f599f6c421a3cf21f52b423 --- .../include/gudhi/Bitmap_cubical_complex.h | 6 ++++-- .../include/gudhi/Bitmap_cubical_complex_base.h | 12 ++++++++---- .../include/gudhi/Persistent_cohomology.h | 3 ++- .../include/gudhi/Persistent_cohomology/Field_Zp.h | 3 ++- .../include/gudhi/Persistent_cohomology/Multi_field.h | 3 ++- src/common/doc/main_page.h | 5 +++-- 6 files changed, 21 insertions(+), 11 deletions(-) (limited to 'src/common') diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index a6ba60d3..c1a3af5c 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -318,7 +318,8 @@ class Bitmap_cubical_complex : public T { }; /** - * Filtration_simplex_range provides the ranges for Filtration_simplex_iterator. + * @class Filtration_simplex_range Bitmap_cubical_complex.h gudhi/Bitmap_cubical_complex.h + * @brief Filtration_simplex_range provides the ranges for Filtration_simplex_iterator. **/ class Filtration_simplex_range { // Range over the simplices of the complex in the order of the filtration. @@ -489,7 +490,8 @@ class Bitmap_cubical_complex : public T { }; /** - * Class needed for compatibility with Gudhi. Not useful for other purposes. + * @class Skeleton_simplex_range Bitmap_cubical_complex.h gudhi/Bitmap_cubical_complex.h + * @brief Class needed for compatibility with Gudhi. Not useful for other purposes. **/ class Skeleton_simplex_range { // Range over the simplices of the complex in the order of the filtration. diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h index 4bdc94ef..7904eb5b 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h @@ -190,7 +190,8 @@ class Bitmap_cubical_complex_base { // ITERATORS /** - * Iterator through all cells in the complex (in order they appear in the structure -- i.e. + * @class All_cells_iterator Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h + * @brief Iterator through all cells in the complex (in order they appear in the structure -- i.e. * in lexicographical order). **/ class All_cells_iterator : std::iterator< std::input_iterator_tag, T > { @@ -258,7 +259,8 @@ class Bitmap_cubical_complex_base { } /** - * All_cells_range class provides ranges for All_cells_iterator + * @class All_cells_range Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h + * @brief All_cells_range class provides ranges for All_cells_iterator **/ class All_cells_range { public: @@ -309,7 +311,8 @@ class Bitmap_cubical_complex_base { } /** - * Iterator through top dimensional cells of the complex. The cells appear in order they are stored + * @class Top_dimensional_cells_iterator Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h + * @brief Iterator through top dimensional cells of the complex. The cells appear in order they are stored * in the structure (i.e. in lexicographical order) **/ class Top_dimensional_cells_iterator : std::iterator< std::input_iterator_tag, T > { @@ -411,7 +414,8 @@ class Bitmap_cubical_complex_base { } /** - * Top_dimensional_cells_iterator_range class provides ranges for Top_dimensional_cells_iterator_range + * @class Top_dimensional_cells_range Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h + * @brief Top_dimensional_cells_iterator_range class provides ranges for Top_dimensional_cells_iterator_range **/ class Top_dimensional_cells_range { public: diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index 1b86f1f9..d77b993e 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -174,7 +174,8 @@ different representations. @{ */ -/** \brief Computes the persistent cohomology of a filtered complex. +/** \class Persistent_cohomology Persistent_cohomology.h gudhi/Persistent_cohomology.h + * \brief Computes the persistent cohomology of a filtered complex. * * The computation is implemented with a Compressed Annotation Matrix * (CAM)\cite DBLP:conf/esa/BoissonnatDM13, diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h index 6db16e69..458dff98 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h @@ -30,7 +30,8 @@ namespace Gudhi { namespace persistent_cohomology { -/** \brief Structure representing the coefficient field \f$\mathbb{Z}/p\mathbb{Z}\f$ +/** \class Field_Zp Field_Zp.h gudhi/Persistent_cohomology/Field_Zp.h + * \brief Structure representing the coefficient field \f$\mathbb{Z}/p\mathbb{Z}\f$ * * \implements CoefficientField * \ingroup persistent_cohomology diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h index 38bc08d1..5f987545 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h @@ -32,7 +32,8 @@ namespace Gudhi { namespace persistent_cohomology { -/** \brief Structure representing coefficients in a set of finite fields simultaneously +/** \class Multi_field Multi_field.h gudhi/Persistent_cohomology/Multi_field.h + * \brief Structure representing coefficients in a set of finite fields simultaneously * using the chinese remainder theorem. * * \implements CoefficientField diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 1e088552..af7be0bf 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -127,7 +127,7 @@ Computation of persistent cohomology using the algorithm of \cite DBLP:journals/dcg/SilvaMV11 and \cite DBLP:journals/corr/abs-1208-5018 and the Compressed Annotation Matrix implementation of \cite DBLP:conf/esa/BoissonnatDM13 .
- User manual: \ref persistent_cohomology + User manual: \ref persistent_cohomology - Reference manual: Gudhi::persistent_cohomology::Persistent_cohomology
- Author: Clément Maria
+ Author: Clément Maria
Introduced in: GUDHI 1.0.0
Copyright: GPL v3
@@ -216,12 +216,13 @@ * Persistent_cohomology/rips_persistence.cpp * * \subsection demos Demos and examples - * To build the demos and libraries, run the following commands in a terminal: + * To build the demos and examples, run the following commands in a terminal: \verbatim cd /path-to-gudhi/ mkdir build cd build/ cmake .. make \endverbatim + * A list of examples is available here. * * \subsection testsuites Test suites * To test your build, run the following command in a terminal: -- cgit v1.2.3 From 5aee0d98ee764381f9af75f52fc747cefd3848cb Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 31 Mar 2016 14:49:42 +0000 Subject: Remove Upcoming page. Now available on Gudhi web site git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1088 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 09edbd70b95b6bd60f1324faebda1e24533be9a9 --- src/common/doc/main_page.h | 11 ----------- 1 file changed, 11 deletions(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index af7be0bf..2f52fd7e 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -236,17 +236,6 @@ make \endverbatim * */ -/*! \page Upcoming Upcoming - * - * The library is under active development. New packages to be released next include: - * \li Alpha complex. - * \li Bottleneck distance. - * \li Zig zag persistence. - * \li Witness complex. - * \li Tangential complex. - * \li Clustering. -*/ - /*! \page Citation Acknowledging the GUDHI library * We kindly ask users to cite the GUDHI library as appropriately as possible in their papers, and to mention the use * of the GUDHI library on the web pages of their projects using GUDHI and provide us with links to these web pages. -- cgit v1.2.3 From 178b86f53cb16964de4365948ada341b7012c1da Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 1 Apr 2016 07:56:34 +0000 Subject: Rollback of class #include. Done from Doxyfile with STRIP_FROM_INC_PATH. Modified Gudhi banner to fix "Geometric Uderstanding" into "Geometry Uderstanding" git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/Doxygen_for_GUDHI_1.3.0@1090 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: c5830fb6f583118a5078254fde5d4a97210c10b7 --- .../include/gudhi/Bitmap_cubical_complex.h | 3 --- .../include/gudhi/Bitmap_cubical_complex/counter.h | 5 ++--- .../include/gudhi/Bitmap_cubical_complex_base.h | 5 ----- ..._cubical_complex_periodic_boundary_conditions_base.h | 1 - src/Doxyfile | 2 +- .../include/gudhi/Persistent_cohomology.h | 3 +-- .../include/gudhi/Persistent_cohomology/Field_Zp.h | 3 +-- .../include/gudhi/Persistent_cohomology/Multi_field.h | 3 +-- src/common/doc/Gudhi_banner.jpg | Bin 34437 -> 0 bytes src/common/doc/Gudhi_banner.png | Bin 0 -> 34877 bytes src/common/doc/main_page.h | 2 +- 11 files changed, 7 insertions(+), 20 deletions(-) delete mode 100644 src/common/doc/Gudhi_banner.jpg create mode 100644 src/common/doc/Gudhi_banner.png (limited to 'src/common') diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h index c1a3af5c..56fa297c 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex.h @@ -46,7 +46,6 @@ const bool globalDbg = false; template class is_before_in_filtration; /** - * @class Bitmap_cubical_complex Bitmap_cubical_complex.h gudhi/Bitmap_cubical_complex.h * @brief Cubical complex represented as a bitmap. * @ingroup cubical_complex * @details This is a Bitmap_cubical_complex class. It joints a functionalities of Bitmap_cubical_complex_base and @@ -318,7 +317,6 @@ class Bitmap_cubical_complex : public T { }; /** - * @class Filtration_simplex_range Bitmap_cubical_complex.h gudhi/Bitmap_cubical_complex.h * @brief Filtration_simplex_range provides the ranges for Filtration_simplex_iterator. **/ class Filtration_simplex_range { @@ -490,7 +488,6 @@ class Bitmap_cubical_complex : public T { }; /** - * @class Skeleton_simplex_range Bitmap_cubical_complex.h gudhi/Bitmap_cubical_complex.h * @brief Class needed for compatibility with Gudhi. Not useful for other purposes. **/ class Skeleton_simplex_range { diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h index bee19344..3a8c3988 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex/counter.h @@ -31,9 +31,8 @@ namespace Gudhi { namespace Cubical_complex { /** - * @class counter counter.h gudhi/Bitmap_cubical_complex/counter.h - * This is an implementation of a counter being a vector of integers. - * The constructor of the class takes as an input two vectors W and V. + * @brief This is an implementation of a counter being a vector of integers. + * @details The constructor of the class takes as an input two vectors W and V. * It assumes that W < V coordinatewise. * If the initial counter W is not specified, it is assumed to be vector of zeros. * The class allows to iterate between W and V by using increment() function. diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h index 7904eb5b..2f74ba9e 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_base.h @@ -39,7 +39,6 @@ namespace Gudhi { namespace Cubical_complex { /** - * @class Bitmap_cubical_complex_base Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h * @brief Cubical complex represented as a bitmap, class with basic implementation. * @ingroup cubical_complex * @details This is a class implementing a basic bitmap data structure to store cubical complexes. @@ -190,7 +189,6 @@ class Bitmap_cubical_complex_base { // ITERATORS /** - * @class All_cells_iterator Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h * @brief Iterator through all cells in the complex (in order they appear in the structure -- i.e. * in lexicographical order). **/ @@ -259,7 +257,6 @@ class Bitmap_cubical_complex_base { } /** - * @class All_cells_range Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h * @brief All_cells_range class provides ranges for All_cells_iterator **/ class All_cells_range { @@ -311,7 +308,6 @@ class Bitmap_cubical_complex_base { } /** - * @class Top_dimensional_cells_iterator Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h * @brief Iterator through top dimensional cells of the complex. The cells appear in order they are stored * in the structure (i.e. in lexicographical order) **/ @@ -414,7 +410,6 @@ class Bitmap_cubical_complex_base { } /** - * @class Top_dimensional_cells_range Bitmap_cubical_complex_base.h gudhi/Bitmap_cubical_complex_base.h * @brief Top_dimensional_cells_iterator_range class provides ranges for Top_dimensional_cells_iterator_range **/ class Top_dimensional_cells_range { diff --git a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h index 5b7e4115..0d0f2f84 100644 --- a/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h +++ b/src/Bitmap_cubical_complex/include/gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h @@ -39,7 +39,6 @@ namespace Cubical_complex { // the cells on the right / top are not in the Bitmap_cubical_complex_periodic_boundary_conditions_base /** - * @class Bitmap_cubical_complex_periodic_boundary_conditions_base Bitmap_cubical_complex_periodic_boundary_conditions_base.h gudhi/Bitmap_cubical_complex_periodic_boundary_conditions_base.h * @brief Cubical complex with periodic boundary conditions represented as a bitmap. * @ingroup cubical_complex * @details This is a class implementing a bitmap data structure with periodic boundary conditions. Most of the functions are diff --git a/src/Doxyfile b/src/Doxyfile index 63e7145c..abafdf16 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -153,7 +153,7 @@ STRIP_FROM_PATH = # specify the list of include paths that are normally passed to the compiler # using the -I flag. -STRIP_FROM_INC_PATH = +STRIP_FROM_INC_PATH = include concept # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index d77b993e..1b86f1f9 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -174,8 +174,7 @@ different representations. @{ */ -/** \class Persistent_cohomology Persistent_cohomology.h gudhi/Persistent_cohomology.h - * \brief Computes the persistent cohomology of a filtered complex. +/** \brief Computes the persistent cohomology of a filtered complex. * * The computation is implemented with a Compressed Annotation Matrix * (CAM)\cite DBLP:conf/esa/BoissonnatDM13, diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h index 458dff98..6db16e69 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Field_Zp.h @@ -30,8 +30,7 @@ namespace Gudhi { namespace persistent_cohomology { -/** \class Field_Zp Field_Zp.h gudhi/Persistent_cohomology/Field_Zp.h - * \brief Structure representing the coefficient field \f$\mathbb{Z}/p\mathbb{Z}\f$ +/** \brief Structure representing the coefficient field \f$\mathbb{Z}/p\mathbb{Z}\f$ * * \implements CoefficientField * \ingroup persistent_cohomology diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h index 5f987545..38bc08d1 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology/Multi_field.h @@ -32,8 +32,7 @@ namespace Gudhi { namespace persistent_cohomology { -/** \class Multi_field Multi_field.h gudhi/Persistent_cohomology/Multi_field.h - * \brief Structure representing coefficients in a set of finite fields simultaneously +/** \brief Structure representing coefficients in a set of finite fields simultaneously * using the chinese remainder theorem. * * \implements CoefficientField diff --git a/src/common/doc/Gudhi_banner.jpg b/src/common/doc/Gudhi_banner.jpg deleted file mode 100644 index ebd3d8af..00000000 Binary files a/src/common/doc/Gudhi_banner.jpg and /dev/null differ diff --git a/src/common/doc/Gudhi_banner.png b/src/common/doc/Gudhi_banner.png new file mode 100644 index 00000000..18e8a672 Binary files /dev/null and b/src/common/doc/Gudhi_banner.png differ diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 2f52fd7e..1e0dbcda 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -1,6 +1,6 @@ /*! \mainpage * \tableofcontents - * \image html "Gudhi_banner.jpg" "" width=20cm + * \image html "Gudhi_banner.png" "" width=20cm * * \section Introduction Introduction * The Gudhi library (Geometry Understanding in Higher Dimensions) is a generic open source C++ library for -- cgit v1.2.3 From fde42a187d4a1a7abb4eaa3239cd884f8731b08a Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 1 Apr 2016 08:50:48 +0000 Subject: cpplint/cppcheck fixes git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1091 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 1c274cd8c8a3c76fe35f96c7186bcbf3d8bee3e8 --- src/Simplex_tree/include/gudhi/Simplex_tree.h | 14 +++++++------- src/common/include/gudhi/reader_utils.h | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) (limited to 'src/common') diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index f5bc0a11..afc7d613 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -955,7 +955,7 @@ class Simplex_tree { bool operator()(const Simplex_handle sh1, const Simplex_handle sh2) const { // Not using st_->filtration(sh1) because it uselessly tests for null_simplex. if (sh1->second.filtration() != sh2->second.filtration()) { - return sh1->second.filtration() < sh2->second.filtration(); + return sh1->second.filtration() < sh2->second.filtration(); } // is sh1 a proper subface of sh2 return st_->reverse_lexicographic_order(sh1, sh2); @@ -1194,16 +1194,16 @@ class Simplex_tree { bool prune_above_filtration(Filtration_value filt) { return rec_prune_above_filtration(root(), filt); } - + private: bool rec_prune_above_filtration(Siblings* sib, Filtration_value filt) { - auto&& list=sib->members(); + auto&& list = sib->members(); auto last = std::remove_if(list.begin(), list.end(), [=](Dit_value_t& simplex) { - if (simplex.second.filtration()<=filt) return false; + if (simplex.second.filtration() <= filt) return false; if (has_children(&simplex)) rec_delete(simplex.second.children()); return true; }); - + bool modified = (last != list.end()); if (last == list.begin() && sib != root()) { // Removing the whole siblings, parent becomes a leaf. @@ -1213,8 +1213,8 @@ class Simplex_tree { } else { // Keeping some elements of siblings. Remove the others, and recurse in the remaining ones. list.erase(last, list.end()); - for(auto&& simplex : list) - if(has_children(&simplex)) + for (auto&& simplex : list) + if (has_children(&simplex)) modified |= rec_prune_above_filtration(simplex.second.children(), filt); } return modified; diff --git a/src/common/include/gudhi/reader_utils.h b/src/common/include/gudhi/reader_utils.h index da2c2c36..899f9df6 100644 --- a/src/common/include/gudhi/reader_utils.h +++ b/src/common/include/gudhi/reader_utils.h @@ -161,7 +161,7 @@ bool read_simplex(std::istream & in_, std::vector< Vertex_handle > & simplex, Fi simplex.push_back(v); } in_ >> fil; - in_.ignore((std::numeric_limits::max)(), '\n'); // ignore until the carriage return + in_.ignore((std::numeric_limits::max)(), '\n'); // ignore until the carriage return return true; } -- cgit v1.2.3 From 3d592b82f837219ee9ecd8e33120563edb4e76ab Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 6 Apr 2016 06:19:05 +0000 Subject: Last modif git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1097 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: d02c8b6bf27616ef515464a35e48b7871a69f9a8 --- .../include/gudhi/Delaunay_triangulation_off_io.h | 47 ++++++++++++++++++++-- 1 file changed, 43 insertions(+), 4 deletions(-) (limited to 'src/common') diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h index 7bf5569e..50be9a59 100644 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ b/src/common/include/gudhi/Delaunay_triangulation_off_io.h @@ -27,6 +27,10 @@ #include #include +#include + +#include + #include "gudhi/Off_reader.h" namespace Gudhi { @@ -42,8 +46,9 @@ template class Delaunay_triangulation_off_visitor_reader { private: Complex* complex_; - typedef typename Complex::Point Point; - std::vector point_cloud; + typedef typename Complex::Point_d Point_d; + typedef typename Complex::size_type size_type; + std::vector point_cloud; public: // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO. @@ -98,7 +103,7 @@ class Delaunay_triangulation_off_visitor_reader { std::cout << std::endl; #endif // DEBUG_TRACES // Fill the point cloud - point_cloud.push_back(Point(point.size(), point.begin(), point.end())); + point_cloud.push_back(Point_d(point.size(), point.begin(), point.end())); } // Off_reader visitor maximal_face implementation - not used @@ -110,7 +115,11 @@ class Delaunay_triangulation_off_visitor_reader { void done() { // It is advised to insert all the points at a time in a Delaunay Triangulation because points are sorted at the // beginning of the insertion - complex_->insert(point_cloud.begin(), point_cloud.end()); + size_type inserted = complex_->insert(point_cloud.begin(), point_cloud.end()); + if (inserted != (point_cloud.end() -point_cloud.begin())) { + std::cerr << "Delaunay_triangulation_off_visitor_reader::done - insertion failed " << inserted << " != " << + (point_cloud.end() -point_cloud.begin()) << "\n"; + } } /** \brief Returns the constructed Delaunay triangulation. @@ -120,6 +129,36 @@ class Delaunay_triangulation_off_visitor_reader { Complex* get_complex() const { return complex_; } + + private: + template + size_type insert_with_index(const PointRangeIterator& first, const PointRangeIterator& last) { + size_type vertices_before_insertion = complex_->number_of_vertices(); + std::vector points(first, last); + + std::vector indices; + indices.reserve(points.size()); + + // Creates a vector {0, 1, ..., N-1} + std::copy(boost::counting_iterator(0), boost::counting_iterator(points.size()), + std::back_inserter(indices)); + + // Sort indices considering CGAL spatial sort + typedef CGAL::Spatial_sort_traits_adapter_d Search_traits_d; + spatial_sort(indices.begin(),indices.end(),Search_traits_d(&(points[0]))); + + typename Delaunay_triangulation::Full_cell_handle hint; + for (typename std::vector::const_iterator it = indices.begin(), end = indices.end(); + it != end; ++it) { + typename Delaunay_triangulation::Vertex_handle pos = complex_->insert(points[*it], hint); + // Save index value as data to retrieve it after insertion + pos->data() = *it; + hint = pos->full_cell(); + } + + return (complex_->number_of_vertices() - vertices_before_insertion); + } + }; /** -- cgit v1.2.3 From fb22bc9ca84f5b3c55a598bf0c903a73c117e783 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 6 Apr 2016 11:08:33 +0000 Subject: Replace Delaunay_triangulation_off_io.h and Delaunay_triangulation_off_rw.cpp with Points_off_io.h and CGAL_points_off_reader.cpp Adapt UT and examples for this Adapt Alpha complex for it Alpha complex is now inserting points in a faster way (after a spatial_sort). Remove Alpha complex construction from a pointer on Delaunay triangulation (no more needed). Adapt documentation to all these modifications Forbid copy/move constructor/assignment operator on Alpha complex git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1098 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 08a673b66451b5cb03fbdf482d696d93b35d220f --- src/Alpha_complex/doc/Intro_alpha_complex.h | 6 +- src/Alpha_complex/doc/alpha_complex_doc.ipe | 136 ++++-- src/Alpha_complex/doc/alpha_complex_doc.png | Bin 25150 -> 25554 bytes src/Alpha_complex/doc/alpha_complex_doc_420.ipe | 514 +++++++++++++++++++++ src/Alpha_complex/doc/alpha_complex_doc_420.png | Bin 0 -> 80794 bytes src/Alpha_complex/doc/alpha_complex_doc_421.ipe | 514 --------------------- src/Alpha_complex/doc/alpha_complex_doc_421.png | Bin 100798 -> 0 bytes .../doc/alpha_complex_representation.ipe | 16 +- .../doc/alpha_complex_representation.png | Bin 14628 -> 14606 bytes .../example/Alpha_complex_from_off.cpp | 8 +- .../example/Alpha_complex_from_points.cpp | 15 +- .../example/alphaoffreader_for_doc_32.txt | 26 +- .../example/alphaoffreader_for_doc_60.txt | 36 +- src/Alpha_complex/include/gudhi/Alpha_complex.h | 109 +++-- src/Alpha_complex/test/Alpha_complex_unit_test.cpp | 2 - .../example/alpha_complex_persistence.cpp | 3 +- src/common/example/CGAL_points_off_reader.cpp | 43 ++ src/common/example/CMakeLists.txt | 12 +- .../example/Delaunay_triangulation_off_rw.cpp | 54 --- src/common/example/cgaloffreader_result.txt | 7 + .../example/dtoffrw_alphashapedoc_result.off | 15 - .../example/dtoffrw_alphashapedoc_result.txt | 2 - .../include/gudhi/Delaunay_triangulation_off_io.h | 348 -------------- src/common/include/gudhi/Points_off_io.h | 178 +++++++ src/common/test/CMakeLists.txt | 14 +- src/common/test/dtoffrw_alphashapedoc_result.off | 22 +- src/common/test/dtoffrw_unit_test.cpp | 90 ---- src/common/test/points_off_reader_unit_test.cpp | 78 ++++ 28 files changed, 1044 insertions(+), 1204 deletions(-) create mode 100644 src/Alpha_complex/doc/alpha_complex_doc_420.ipe create mode 100644 src/Alpha_complex/doc/alpha_complex_doc_420.png delete mode 100644 src/Alpha_complex/doc/alpha_complex_doc_421.ipe delete mode 100644 src/Alpha_complex/doc/alpha_complex_doc_421.png create mode 100644 src/common/example/CGAL_points_off_reader.cpp delete mode 100644 src/common/example/Delaunay_triangulation_off_rw.cpp create mode 100644 src/common/example/cgaloffreader_result.txt delete mode 100644 src/common/example/dtoffrw_alphashapedoc_result.off delete mode 100644 src/common/example/dtoffrw_alphashapedoc_result.txt delete mode 100644 src/common/include/gudhi/Delaunay_triangulation_off_io.h create mode 100644 src/common/include/gudhi/Points_off_io.h delete mode 100644 src/common/test/dtoffrw_unit_test.cpp create mode 100644 src/common/test/points_off_reader_unit_test.cpp (limited to 'src/common') diff --git a/src/Alpha_complex/doc/Intro_alpha_complex.h b/src/Alpha_complex/doc/Intro_alpha_complex.h index 0dea2b16..9d0dcefa 100644 --- a/src/Alpha_complex/doc/Intro_alpha_complex.h +++ b/src/Alpha_complex/doc/Intro_alpha_complex.h @@ -112,14 +112,14 @@ namespace alphacomplex { * * \subsubsection dimension2 Dimension 2 * - * From the example above, it means the algorithm looks into each triangle ([4,2,1], [2,4,6], [4,5,6], ...), + * From the example above, it means the algorithm looks into each triangle ([0,1,2], [0,2,4], [1,2,3], ...), * computes the filtration value of the triangle, and then propagates the filtration value as described * here : - * \image html "alpha_complex_doc_421.png" "Filtration value propagation example" + * \image html "alpha_complex_doc_420.png" "Filtration value propagation example" * * \subsubsection dimension1 Dimension 1 * - * Then, the algorithm looks into each edge ([1,2], [4,2], [4,1], ...), + * Then, the algorithm looks into each edge ([0,1], [0,2], [1,2], ...), * computes the filtration value of the edge (in this case, propagation will have no effect). * * \subsubsection dimension0 Dimension 0 diff --git a/src/Alpha_complex/doc/alpha_complex_doc.ipe b/src/Alpha_complex/doc/alpha_complex_doc.ipe index 99bd05af..baf0d26a 100644 --- a/src/Alpha_complex/doc/alpha_complex_doc.ipe +++ b/src/Alpha_complex/doc/alpha_complex_doc.ipe @@ -1,7 +1,7 @@ - + @@ -253,13 +253,13 @@ h 320 580 l Delaunay triangulation -2 -6 -4 -5 -1 -3 -0 +0 +1 +2 +3 +4 +5 +6 280 660 m 300 710 l @@ -314,7 +314,7 @@ h -3 +2 300 688 m 300 676 l @@ -322,15 +322,14 @@ h 312 688 l h - +2 + 300 688 m 300 676 l 312 676 l 312 688 l h -4 -3 300 688 m 300 676 l @@ -338,6 +337,8 @@ h 312 688 l h +4 +1 300 688 m 300 676 l @@ -345,39 +346,15 @@ h 312 688 l h -4 -1 - -300 688 m -300 676 l -312 676 l -312 688 l -h - - -300 688 m -300 676 l -312 676 l -312 688 l -h - -5 - + 300 688 m 300 676 l 312 676 l 312 688 l h -5 +4 3 - -300 688 m -300 676 l -312 676 l -312 688 l -h - 300 688 m 300 676 l @@ -385,7 +362,6 @@ h 312 688 l h -4 2 300 688 m @@ -401,7 +377,7 @@ h 312 688 l h -4 +3 6 300 688 m @@ -418,14 +394,14 @@ h 312 688 l h - + 300 688 m 300 676 l 312 676 l 312 688 l h -6 +6 6 300 688 m @@ -442,22 +418,22 @@ h 312 688 l h - + 300 688 m 300 676 l 312 676 l 312 688 l h -6 - +6 + 300 688 m 300 676 l 312 676 l 312 688 l h -6 +6 292 716 m 292 728 l @@ -514,11 +490,11 @@ h 4 5 6 - + 436 708 m 436 716 l - + 364 708 m 364 716 l @@ -535,11 +511,11 @@ h 308 716 l 308 716 l - + 264 688 m 268 696 l - + 292 688 m 292 696 l @@ -555,5 +531,65 @@ h 448 612 m 448 620 l +3 + +300 688 m +300 676 l +312 676 l +312 688 l +h + + +300 688 m +300 676 l +312 676 l +312 688 l +h + +6 + +364 688 m +364 696 l + + +300 688 m +300 676 l +312 676 l +312 688 l +h + +6 + +300 688 m +300 676 l +312 676 l +312 688 l +h + +6 + +436 708 m +436 716 l + + +300 688 m +300 676 l +312 676 l +312 688 l +h + +6 + +300 688 m +300 676 l +312 676 l +312 688 l +h + +6 + +436 708 m +436 716 l + diff --git a/src/Alpha_complex/doc/alpha_complex_doc.png b/src/Alpha_complex/doc/alpha_complex_doc.png index cfe3ede6..0b6201da 100644 Binary files a/src/Alpha_complex/doc/alpha_complex_doc.png and b/src/Alpha_complex/doc/alpha_complex_doc.png differ diff --git a/src/Alpha_complex/doc/alpha_complex_doc_420.ipe b/src/Alpha_complex/doc/alpha_complex_doc_420.ipe new file mode 100644 index 00000000..5d1d29d4 --- /dev/null +++ b/src/Alpha_complex/doc/alpha_complex_doc_420.ipe @@ -0,0 +1,514 @@ + + + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + +0.6 0 0 0.6 0 0 e + + + + + +0.5 0 0 0.5 0 0 e + + +0.6 0 0 0.6 0 0 e +0.4 0 0 0.4 0 0 e + + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h + + + + + +-0.5 -0.5 m +0.5 -0.5 l +0.5 0.5 l +-0.5 0.5 l +h + + +-0.6 -0.6 m +0.6 -0.6 l +0.6 0.6 l +-0.6 0.6 l +h +-0.4 -0.4 m +0.4 -0.4 l +0.4 0.4 l +-0.4 0.4 l +h + + + + + + +-0.43 -0.57 m +0.57 0.43 l +0.43 0.57 l +-0.57 -0.43 l +h + + +-0.43 0.57 m +0.57 -0.43 l +0.43 -0.57 l +-0.57 0.43 l +h + + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-0.8 0 l +-1 -0.333 l +h + + + + +-1 0.333 m +0 0 l +-1 -0.333 l + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + +0 0 m +-1 0.333 l +-1 -0.333 l +h +-1 0 m +-2 0.333 l +-2 -0.333 l +h + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +Cell [4,2,0] +0 +1 +2 +3 +4 +5 +6 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + + +77.2727 0 0 77.2727 243.636 591.818 e + + +243.428 591.569 m +186.061 643.28 l + +$\alpha_{420}$ + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +[2,0] is Gabriel $\rightarrow$ $\alpha_{20}$ is not$\\$ +modified (NaN) + +0 +2 +3 +4 +5 +6 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + +$\alpha_{20}$ + +290 530 m +320 580 l + + +29.1548 0 0 29.1548 305 555 e + + +304.883 555.015 m +334.509 555.015 l + + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +[0,4] is not Gabriel $\rightarrow$ $\alpha_{40} = \alpha_{420}$ +0 +3 +5 +6 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + +$\alpha_{40}$ + +290 530 m +280 660 l + + +320 580 m +350 520 l +290 530 l +320 580 l +320 580 l + + +320 580 m +280 660 l +290 530 l +320 580 l +320 580 l + + +320 580 m +370 580 l +350 520 l +320 580 l + +0 +1 +2 +3 +5 +6 + +280 660 m +300 710 l +370 690 l +280 660 l + + +320 580 m +370 690 l +370 580 l +320 580 l + + +280 660 m +370 690 l +320 580 l +280 660 l + +$\alpha_{42}$ +4 + +406.093 497.775 m +446.094 418.092 l + + +44.5799 0 0 44.5799 425.934 457.774 e + + +425.854 457.774 m +470.795 457.774 l + +[2,4] is Gabriel $\rightarrow$ $\alpha_{42}$ is not modified (NaN) + + +205.028 596.091 m +110.946 544.02 l + + +280.768 588.99 m +280.768 547.57 l + + +341.123 594.316 m +413.904 554.079 l + +For all faces of [4,2,0] +N.B. : is Gabriel on a single point has no sense. +Dimension =2 - $\sigma$ = [4,2,0] + +247.333 430.892 m +311.764 430.892 l + + + + + + + + + + + + + + +1 + + + + + +4 + + +1 + + +2 + +65.192 0 0 65.192 285 595 e + + + + + + + + + + + + + diff --git a/src/Alpha_complex/doc/alpha_complex_doc_420.png b/src/Alpha_complex/doc/alpha_complex_doc_420.png new file mode 100644 index 00000000..ef7187f7 Binary files /dev/null and b/src/Alpha_complex/doc/alpha_complex_doc_420.png differ diff --git a/src/Alpha_complex/doc/alpha_complex_doc_421.ipe b/src/Alpha_complex/doc/alpha_complex_doc_421.ipe deleted file mode 100644 index 727816c5..00000000 --- a/src/Alpha_complex/doc/alpha_complex_doc_421.ipe +++ /dev/null @@ -1,514 +0,0 @@ - - - - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0.6 0 0 0.6 0 0 e -0.4 0 0 0.4 0 0 e - - - - -0.6 0 0 0.6 0 0 e - - - - - -0.5 0 0 0.5 0 0 e - - -0.6 0 0 0.6 0 0 e -0.4 0 0 0.4 0 0 e - - - - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h --0.4 -0.4 m -0.4 -0.4 l -0.4 0.4 l --0.4 0.4 l -h - - - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h - - - - - --0.5 -0.5 m -0.5 -0.5 l -0.5 0.5 l --0.5 0.5 l -h - - --0.6 -0.6 m -0.6 -0.6 l -0.6 0.6 l --0.6 0.6 l -h --0.4 -0.4 m -0.4 -0.4 l -0.4 0.4 l --0.4 0.4 l -h - - - - - - --0.43 -0.57 m -0.57 0.43 l -0.43 0.57 l --0.57 -0.43 l -h - - --0.43 0.57 m -0.57 -0.43 l -0.43 -0.57 l --0.57 0.43 l -h - - - - - -0 0 m --1 0.333 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --0.8 0 l --1 -0.333 l -h - - - - -0 0 m --1 0.333 l --0.8 0 l --1 -0.333 l -h - - - - --1 0.333 m -0 0 l --1 -0.333 l - - - - -0 0 m --1 0.333 l --1 -0.333 l -h --1 0 m --2 0.333 l --2 -0.333 l -h - - - - -0 0 m --1 0.333 l --1 -0.333 l -h --1 0 m --2 0.333 l --2 -0.333 l -h - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -Cell [4,2,1] -2 -6 -4 -5 -1 -3 -0 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - - -77.2727 0 0 77.2727 243.636 591.818 e - - -243.428 591.569 m -186.061 643.28 l - -$\alpha_{421}$ - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -[4,2] is Gabriel $\rightarrow$ $\alpha_{42}$ is not$\\$ -modified (NaN) - -2 -4 -5 -1 -3 -0 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - -$\alpha_{42}$ - -290 530 m -320 580 l - - -29.1548 0 0 29.1548 305 555 e - - -304.883 555.015 m -334.509 555.015 l - - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -[2,1] is not Gabriel $\rightarrow$ $\alpha_{21} = \alpha_{421}$ -2 -5 -3 -0 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - -$\alpha_{12}$ - -290 530 m -280 660 l - - -320 580 m -350 520 l -290 530 l -320 580 l -320 580 l - - -320 580 m -280 660 l -290 530 l -320 580 l -320 580 l - - -320 580 m -370 580 l -350 520 l -320 580 l - -2 -6 -4 -5 -3 -0 - -280 660 m -300 710 l -370 690 l -280 660 l - - -320 580 m -370 690 l -370 580 l -320 580 l - - -280 660 m -370 690 l -320 580 l -280 660 l - -$\alpha_{41}$ -1 - -406.093 497.775 m -446.094 418.092 l - - -44.5799 0 0 44.5799 425.934 457.774 e - - -425.854 457.774 m -470.795 457.774 l - -[4,1] is Gabriel $\rightarrow$ $\alpha_{41}$ is not modified (NaN) - - -205.028 596.091 m -110.946 544.02 l - - -280.768 588.99 m -280.768 547.57 l - - -341.123 594.316 m -413.904 554.079 l - -For all faces of [4,2,1] -N.B. : is Gabriel on a single point has no sense. -Dimension =2 - $\sigma$ = [4,2,1] - -247.333 430.892 m -311.764 430.892 l - - - - - - - - - - - - - - -6 - - - - - -1 - - -6 - - -4 - -65.192 0 0 65.192 285 595 e - - - - - - - - - - - - - diff --git a/src/Alpha_complex/doc/alpha_complex_doc_421.png b/src/Alpha_complex/doc/alpha_complex_doc_421.png deleted file mode 100644 index 1cce4402..00000000 Binary files a/src/Alpha_complex/doc/alpha_complex_doc_421.png and /dev/null differ diff --git a/src/Alpha_complex/doc/alpha_complex_representation.ipe b/src/Alpha_complex/doc/alpha_complex_representation.ipe index fead1661..e8096b93 100644 --- a/src/Alpha_complex/doc/alpha_complex_representation.ipe +++ b/src/Alpha_complex/doc/alpha_complex_representation.ipe @@ -1,7 +1,7 @@ - + @@ -251,13 +251,13 @@ h h Alpha complex -2 -6 -4 -5 -1 -3 -0 +0 +1 +2 +3 +4 +5 +6 58.1341 0 0 58.1341 218.925 692.601 e diff --git a/src/Alpha_complex/doc/alpha_complex_representation.png b/src/Alpha_complex/doc/alpha_complex_representation.png index 9833bff3..7b81cd69 100644 Binary files a/src/Alpha_complex/doc/alpha_complex_representation.png and b/src/Alpha_complex/doc/alpha_complex_representation.png differ diff --git a/src/Alpha_complex/example/Alpha_complex_from_off.cpp b/src/Alpha_complex/example/Alpha_complex_from_off.cpp index 18a1a20d..963ef5ca 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_off.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_off.cpp @@ -4,17 +4,15 @@ #include #include -void usage(char * const progName) { +void usage(int nbArgs, char * const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; std::cerr << "Usage: " << progName << " filename.off alpha_square_max_value [ouput_file.txt]\n"; std::cerr << " i.e.: " << progName << " ../../data/points/alphacomplexdoc.off 60.0\n"; exit(-1); // ----- >> } int main(int argc, char **argv) { - if ((argc != 3) && (argc != 4)) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n"; - usage(argv[0]); - } + if ((argc != 3) && (argc != 4)) usage(argc, (argv[0] - 1)); std::string off_file_name(argv[1]); double alpha_square_max_value = atof(argv[2]); diff --git a/src/Alpha_complex/example/Alpha_complex_from_points.cpp b/src/Alpha_complex/example/Alpha_complex_from_points.cpp index 815e40d7..cd17af1e 100644 --- a/src/Alpha_complex/example/Alpha_complex_from_points.cpp +++ b/src/Alpha_complex/example/Alpha_complex_from_points.cpp @@ -4,13 +4,26 @@ #include #include #include +#include // for numeric limits typedef CGAL::Epick_d< CGAL::Dimension_tag<2> > Kernel; typedef Kernel::Point_d Point; typedef std::vector Vector_of_points; +void usage(int nbArgs, char * const progName) { + std::cerr << "Error: Number of arguments (" << nbArgs << ") is not correct\n"; + std::cerr << "Usage: " << progName << " [alpha_square_max_value]\n"; + std::cerr << " i.e.: " << progName << " 60.0\n"; + exit(-1); // ----- >> +} + int main(int argc, char **argv) { - double alpha_square_max_value = 60.0; + if ((argc != 1) && (argc != 2)) usage(argc, (argv[0] - 1)); + + // Delaunay complex if alpha_square_max_value is not given by the user. + double alpha_square_max_value = std::numeric_limits::infinity(); + if (argc == 2) + alpha_square_max_value = atof(argv[1]); // ---------------------------------------------------------------------------- // Init of a list of points diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt index 5869fdff..13183e86 100644 --- a/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt +++ b/src/Alpha_complex/example/alphaoffreader_for_doc_32.txt @@ -7,16 +7,16 @@ Iterator on alpha complex simplices in the filtration order, with [filtration va ( 4 ) -> [0] ( 5 ) -> [0] ( 6 ) -> [0] - ( 5 4 ) -> [6.25] - ( 3 1 ) -> [7.25] - ( 4 2 ) -> [8.5] - ( 6 2 ) -> [9.25] - ( 6 5 ) -> [10] - ( 6 4 ) -> [11.25] - ( 6 5 4 ) -> [12.5] - ( 6 4 2 ) -> [12.9959] - ( 3 0 ) -> [13.25] - ( 4 1 ) -> [20] - ( 1 0 ) -> [22.7367] - ( 3 1 0 ) -> [22.7367] - ( 5 0 ) -> [30.25] + ( 3 2 ) -> [6.25] + ( 5 4 ) -> [7.25] + ( 2 0 ) -> [8.5] + ( 1 0 ) -> [9.25] + ( 3 1 ) -> [10] + ( 2 1 ) -> [11.25] + ( 3 2 1 ) -> [12.5] + ( 2 1 0 ) -> [12.9959] + ( 6 5 ) -> [13.25] + ( 4 2 ) -> [20] + ( 6 4 ) -> [22.7367] + ( 6 5 4 ) -> [22.7367] + ( 6 3 ) -> [30.25] diff --git a/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt index 1d17a58a..71f29a00 100644 --- a/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt +++ b/src/Alpha_complex/example/alphaoffreader_for_doc_60.txt @@ -7,21 +7,21 @@ Iterator on alpha complex simplices in the filtration order, with [filtration va ( 4 ) -> [0] ( 5 ) -> [0] ( 6 ) -> [0] - ( 5 4 ) -> [6.25] - ( 3 1 ) -> [7.25] - ( 4 2 ) -> [8.5] - ( 6 2 ) -> [9.25] - ( 6 5 ) -> [10] - ( 6 4 ) -> [11.25] - ( 6 5 4 ) -> [12.5] - ( 6 4 2 ) -> [12.9959] - ( 3 0 ) -> [13.25] - ( 4 1 ) -> [20] - ( 1 0 ) -> [22.7367] - ( 3 1 0 ) -> [22.7367] - ( 5 0 ) -> [30.25] - ( 4 0 ) -> [36.5] - ( 5 4 0 ) -> [36.5] - ( 4 1 0 ) -> [37.2449] - ( 2 1 ) -> [59.7107] - ( 4 2 1 ) -> [59.7107] + ( 3 2 ) -> [6.25] + ( 5 4 ) -> [7.25] + ( 2 0 ) -> [8.5] + ( 1 0 ) -> [9.25] + ( 3 1 ) -> [10] + ( 2 1 ) -> [11.25] + ( 3 2 1 ) -> [12.5] + ( 2 1 0 ) -> [12.9959] + ( 6 5 ) -> [13.25] + ( 4 2 ) -> [20] + ( 6 4 ) -> [22.7367] + ( 6 5 4 ) -> [22.7367] + ( 6 3 ) -> [30.25] + ( 6 2 ) -> [36.5] + ( 6 3 2 ) -> [36.5] + ( 6 4 2 ) -> [37.2449] + ( 4 0 ) -> [59.7107] + ( 4 2 0 ) -> [59.7107] diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 2b27a459..21eb5f48 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -27,14 +27,16 @@ #include #include #include -// to construct a Delaunay_triangulation from a OFF file -#include +// to construct Alpha_complex from a OFF file of points +#include #include #include // isnan, fmax +//#include #include #include +#include #include #include @@ -43,6 +45,7 @@ #include #include // std::pair #include +#include // for std::iota namespace Gudhi { @@ -57,7 +60,7 @@ namespace alphacomplex { * \details * The data structure can be constructed from a CGAL Delaunay triangulation (for more informations on CGAL Delaunay * triangulation, please refer to the corresponding chapter in page http://doc.cgal.org/latest/Triangulation/) or from - * an OFF file (cf. Delaunay_triangulation_off_reader). + * an OFF file (cf. Points_off_reader). * * Please refer to \ref alpha_complex for examples. * @@ -74,13 +77,19 @@ namespace alphacomplex { template> class Alpha_complex : public Simplex_tree<> { public: + // Add an int in TDS to save point index in the structure + typedef CGAL::Triangulation_data_structure, + CGAL::Triangulation_full_cell > TDS; /** \brief A Delaunay triangulation of a set of points in \f$ \mathbb{R}^D\f$.*/ - typedef typename CGAL::Delaunay_triangulation Delaunay_triangulation; + typedef CGAL::Delaunay_triangulation Delaunay_triangulation; + /** \brief A point in Euclidean space.*/ typedef typename Kernel::Point_d Point_d; /** \brief Geometric traits class that provides the geometric types and predicates needed by Delaunay * triangulations.*/ typedef Kernel Geom_traits; + private: // From Simplex_tree // Type required to insert into a simplex_tree (with or without subfaces). @@ -104,7 +113,7 @@ class Alpha_complex : public Simplex_tree<> { // Double map type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa. typedef typename std::map< CGAL_vertex_iterator, Vertex_handle > Map_vertex_iterator_to_handle; - typedef typename std::vector< CGAL_vertex_iterator > Vector_vertex_iterator; + typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Vector_vertex_iterator; private: /** \brief Map to switch from CGAL vertex iterator to simplex tree vertex handle.*/ @@ -128,28 +137,13 @@ class Alpha_complex : public Simplex_tree<> { Alpha_complex(const std::string& off_file_name, Filtration_value max_alpha_square = std::numeric_limits::infinity()) : triangulation_(nullptr) { - Gudhi::Delaunay_triangulation_off_reader off_reader(off_file_name); + Gudhi::Points_off_reader off_reader(off_file_name); if (!off_reader.is_valid()) { std::cerr << "Alpha_complex - Unable to read file " << off_file_name << "\n"; exit(-1); // ----- >> } - triangulation_ = off_reader.get_complex(); - init(max_alpha_square); - } - /** \brief Alpha_complex constructor from a Delaunay triangulation. - * - * @param[in] triangulation_ptr Pointer on a - * CGAL::Delaunay_triangulation \cite cgal:hdj-t-15b. - * Alpha_complex takes ownership of the Delaunay_triangulation object, which must have been allocated using operator - * new. - * @param[in] max_alpha_square maximum for alpha square value. Default value is +\f$\infty\f$. - */ - Alpha_complex(Delaunay_triangulation* triangulation_ptr, - Filtration_value max_alpha_square = std::numeric_limits::infinity()) - : triangulation_(triangulation_ptr) { - init(max_alpha_square); + init_from_range(off_reader.get_point_cloud(), max_alpha_square); } /** \brief Alpha_complex constructor from a list of points. @@ -164,23 +158,7 @@ class Alpha_complex : public Simplex_tree<> { Alpha_complex(const InputPointRange& points, Filtration_value max_alpha_square = std::numeric_limits::infinity()) : triangulation_(nullptr) { - auto first = std::begin(points); - auto last = std::end(points); - - if (first != last) { - // point_dimension function initialization - Point_Dimension point_dimension = kernel_.point_dimension_d_object(); - - // Delaunay triangulation is point dimension. - triangulation_ = new Delaunay_triangulation(point_dimension(*first)); - - size_type inserted = triangulation_->insert(first, last); - if (inserted != (last -first)) { - std::cerr << "Alpha_complex - insertion failed " << inserted << " != " << (last -first) << "\n"; - exit(-1); // ----- >> - } - init(max_alpha_square); - } + init_from_range(points, max_alpha_square); } /** \brief Alpha_complex destructor. @@ -191,6 +169,12 @@ class Alpha_complex : public Simplex_tree<> { delete triangulation_; } + // Forbid copy/move constructor/assignment operator + Alpha_complex(const Alpha_complex& other) = delete; + Alpha_complex& operator= (const Alpha_complex& other) = delete; + Alpha_complex (Alpha_complex&& other) = delete; + Alpha_complex& operator= (Alpha_complex&& other) = delete; + /** \brief get_point returns the point corresponding to the vertex given as parameter. * * @param[in] vertex Vertex handle of the point to retrieve. @@ -202,6 +186,44 @@ class Alpha_complex : public Simplex_tree<> { } private: + template + void init_from_range(const InputPointRange& points, Filtration_value max_alpha_square) { + auto first = std::begin(points); + auto last = std::end(points); + if (first != last) { + // point_dimension function initialization + Point_Dimension point_dimension = kernel_.point_dimension_d_object(); + + // Delaunay triangulation is point dimension. + triangulation_ = new Delaunay_triangulation(point_dimension(*first)); + + std::vector points(first, last); + + // Creates a vector {0, 1, ..., N-1} + std::vector indices(boost::counting_iterator(0), + boost::counting_iterator(points.size())); + + // Sort indices considering CGAL spatial sort + typedef CGAL::Spatial_sort_traits_adapter_d Search_traits_d; + spatial_sort(indices.begin(),indices.end(),Search_traits_d(&(points[0]))); + + typename Delaunay_triangulation::Full_cell_handle hint; + for (auto index : indices) { + typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(points[index], hint); + // Save index value as data to retrieve it after insertion + pos->data() = index; + hint = pos->full_cell(); + } + + if (triangulation_->number_of_vertices() != (last -first)) { + std::cerr << "Alpha_complex - insertion failed " << triangulation_->number_of_vertices() << " != " << + (last -first) << "\n"; + exit(-1); // ----- >> + } + init(max_alpha_square); + } + } + /** \brief Initialize the Alpha_complex from the Delaunay triangulation. * * @param[in] max_alpha_square maximum for alpha square value. @@ -233,18 +255,15 @@ class Alpha_complex : public Simplex_tree<> { // -------------------------------------------------------------------------------------------- // double map to retrieve simplex tree vertex handles from CGAL vertex iterator and vice versa - // Start to insert at handle = 0 - default integer value - Vertex_handle vertex_handle = Vertex_handle(); // Loop on triangulation vertices list for (CGAL_vertex_iterator vit = triangulation_->vertices_begin(); vit != triangulation_->vertices_end(); ++vit) { if (!triangulation_->is_infinite(*vit)) { #ifdef DEBUG_TRACES - std::cout << "Vertex insertion - " << vertex_handle << " -> " << vit->point() << std::endl; + std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl; #endif // DEBUG_TRACES - vertex_iterator_to_handle_.emplace(vit, vertex_handle); - vertex_handle_to_iterator_.push_back(vit); - vertex_handle++; + vertex_iterator_to_handle_.emplace(vit, vit->data()); + vertex_handle_to_iterator_.emplace(vit->data(), vit); } } // -------------------------------------------------------------------------------------------- diff --git a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp index 315582d1..80b39924 100644 --- a/src/Alpha_complex/test/Alpha_complex_unit_test.cpp +++ b/src/Alpha_complex/test/Alpha_complex_unit_test.cpp @@ -32,8 +32,6 @@ #include #include -// to construct a Delaunay_triangulation from a OFF file -#include #include // Use dynamic_dimension_tag for the user to be able to set dimension diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp index d2f9a4a2..8f9f077c 100644 --- a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp +++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp @@ -1,8 +1,7 @@ #include #include -// to construct a Delaunay_triangulation from a OFF file -#include + #include #include diff --git a/src/common/example/CGAL_points_off_reader.cpp b/src/common/example/CGAL_points_off_reader.cpp new file mode 100644 index 00000000..076afd5b --- /dev/null +++ b/src/common/example/CGAL_points_off_reader.cpp @@ -0,0 +1,43 @@ +#include + +// For CGAL points type in dimension d +// cf. http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Point__d.html +#include + +#include +#include + +typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; +typedef typename Kernel::Point_d Point_d; + +void usage(int argc, char * const progName) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; + exit(-1); +} + +int main(int argc, char **argv) { + if (argc != 2) usage(argc, (argv[0] - 1)); + + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + exit(-1); + } + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + + int n = 0; + for (auto point : point_cloud) { + std::cout << "Point[" << n << "] = "; + for (int i = 0; i < point.dimension(); i++) + std::cout << point[i] << " "; + std::cout << "\n"; + ++n; + } + return 0; +} diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 91e78ea2..2914756e 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -9,15 +9,9 @@ if(CGAL_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - add_executable ( dtoffrw Delaunay_triangulation_off_rw.cpp ) - target_link_libraries(dtoffrw ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_test(dtoffrw ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) - - if (DIFF_PATH) - # Do not forget to copy test results files in current binary dir - file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - add_test(dtoffrw_result_off_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off ${CMAKE_CURRENT_BINARY_DIR}/result.off) - endif() + add_executable ( cgaloffreader CGAL_points_off_reader.cpp ) + target_link_libraries(cgaloffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_test(cgaloffreader ${CMAKE_CURRENT_BINARY_DIR}/cgaloffreader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") diff --git a/src/common/example/Delaunay_triangulation_off_rw.cpp b/src/common/example/Delaunay_triangulation_off_rw.cpp deleted file mode 100644 index 4c7a9aaf..00000000 --- a/src/common/example/Delaunay_triangulation_off_rw.cpp +++ /dev/null @@ -1,54 +0,0 @@ -// to construct a Delaunay_triangulation from a OFF file -#include - -#include -#include - -#include -#include - -// Use dynamic_dimension_tag for the user to be able to set dimension -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; -typedef CGAL::Delaunay_triangulation T; -// The triangulation uses the default instantiation of the -// TriangulationDataStructure template parameter - -void usage(char * const progName) { - std::cerr << "Usage: " << progName << " inputFile.off outputFile.off" << std::endl; - exit(-1); -} - -int main(int argc, char **argv) { - if (argc != 3) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; - usage(argv[0]); - } - - std::string offInputFile(argv[1]); - // Read the OFF file (input file name given as parameter) and triangulates points - Gudhi::Delaunay_triangulation_off_reader off_reader(offInputFile); - // Check the read operation was correct - if (!off_reader.is_valid()) { - std::cerr << "Unable to read file " << offInputFile << std::endl; - exit(-1); - } - - // Retrieve the triangulation - T* triangulation = off_reader.get_complex(); - // Operations on triangulation - std::cout << "Number of vertices= " << triangulation->number_of_vertices() << std::endl; - std::cout << "Number of finite full cells= " << triangulation->number_of_finite_full_cells() << std::endl; - - std::string outFileName(argv[2]); - std::string offOutputFile(outFileName); - // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces - Gudhi::Delaunay_triangulation_off_writer off_writer(offOutputFile, triangulation); - - // Check the write operation was correct - if (!off_writer.is_valid()) { - std::cerr << "Unable to write file " << offOutputFile << std::endl; - exit(-1); - } - - return 0; -} diff --git a/src/common/example/cgaloffreader_result.txt b/src/common/example/cgaloffreader_result.txt new file mode 100644 index 00000000..1deb8dbd --- /dev/null +++ b/src/common/example/cgaloffreader_result.txt @@ -0,0 +1,7 @@ +Point[0] = 1 1 +Point[1] = 7 0 +Point[2] = 4 6 +Point[3] = 9 6 +Point[4] = 0 14 +Point[5] = 2 19 +Point[6] = 9 17 diff --git a/src/common/example/dtoffrw_alphashapedoc_result.off b/src/common/example/dtoffrw_alphashapedoc_result.off deleted file mode 100644 index d1839a43..00000000 --- a/src/common/example/dtoffrw_alphashapedoc_result.off +++ /dev/null @@ -1,15 +0,0 @@ -nOFF -2 7 6 0 -9 17 -0 14 -1 1 -2 19 -4 6 -9 6 -7 0 -3 5 0 4 -3 0 1 4 -3 3 1 0 -3 4 1 2 -3 5 4 6 -3 6 4 2 diff --git a/src/common/example/dtoffrw_alphashapedoc_result.txt b/src/common/example/dtoffrw_alphashapedoc_result.txt deleted file mode 100644 index 8e659740..00000000 --- a/src/common/example/dtoffrw_alphashapedoc_result.txt +++ /dev/null @@ -1,2 +0,0 @@ -Number of vertices= 7 -Number of finite full cells= 6 diff --git a/src/common/include/gudhi/Delaunay_triangulation_off_io.h b/src/common/include/gudhi/Delaunay_triangulation_off_io.h deleted file mode 100644 index 50be9a59..00000000 --- a/src/common/include/gudhi/Delaunay_triangulation_off_io.h +++ /dev/null @@ -1,348 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2015 INRIA Saclay (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ -#ifndef DELAUNAY_TRIANGULATION_OFF_IO_H_ -#define DELAUNAY_TRIANGULATION_OFF_IO_H_ - -#include -#include -#include -#include - -#include - -#include - -#include "gudhi/Off_reader.h" - -namespace Gudhi { - -/** - * \class Delaunay_triangulation_off_visitor_reader Delaunay_triangulation_off_io.h gudhi/Delaunay_triangulation_off_io.h - * \brief OFF file visitor implementation according to Off_reader in order to construct a CGAL Delaunay triangulation. - * - * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page - * http://doc.cgal.org/latest/Triangulation/ - */ -template -class Delaunay_triangulation_off_visitor_reader { - private: - Complex* complex_; - typedef typename Complex::Point_d Point_d; - typedef typename Complex::size_type size_type; - std::vector point_cloud; - - public: - // TODO(VR) : Pass a Complex as a parameter is required, even if not used. Otherwise, compilation is KO. - - /** \brief Delaunay_triangulation_off_visitor_reader constructor - * - * @param[in] complex_ptr_ pointer on a Delaunay triangulation. - */ - Delaunay_triangulation_off_visitor_reader(Complex* complex_ptr_) - : complex_(nullptr) { } - - /** \brief Off_reader visitor init implementation. - * - * The init parameters are set from OFF file header. - * Dimension value is required in order to construct Delaunay triangulation. - * - * @param[in] dim space dimension of vertices. - * @param[in] num_vertices number of vertices in the OFF file (not used). - * @param[in] num_faces number of faces in the OFF file (not used). - * @param[in] num_edges number of edges in the OFF file (not used). - */ - void init(int dim, int num_vertices, int num_faces, int num_edges) { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << - num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; -#endif // DEBUG_TRACES - if (num_faces > 0) { - std::cerr << "Delaunay_triangulation_off_visitor_reader::init faces are not taken into account from OFF " << - "file for Delaunay triangulation - faces are computed.\n"; - } - if (num_edges > 0) { - std::cerr << "Delaunay_triangulation_off_visitor_reader::init edges are not taken into account from OFF " << - "file for Delaunay triangulation - edges are computed.\n"; - } - // Complex construction with dimension from file - complex_ = new Complex(dim); - } - - /** \brief Off_reader visitor point implementation. - * - * The point function is called on each vertex line from OFF file. - * This function inserts the vertex in the Delaunay triangulation. - * - * @param[in] point vector of vertex coordinates. - */ - void point(const std::vector& point) { -#ifdef DEBUG_TRACES - std::cout << "Delaunay_triangulation_off_visitor_reader::point "; - for (auto coordinate : point) { - std::cout << coordinate << " | "; - } - std::cout << std::endl; -#endif // DEBUG_TRACES - // Fill the point cloud - point_cloud.push_back(Point_d(point.size(), point.begin(), point.end())); - } - - // Off_reader visitor maximal_face implementation - not used - void maximal_face(const std::vector& face) { - // For Delaunay Triangulation, only points are read - } - - // Off_reader visitor done implementation - void done() { - // It is advised to insert all the points at a time in a Delaunay Triangulation because points are sorted at the - // beginning of the insertion - size_type inserted = complex_->insert(point_cloud.begin(), point_cloud.end()); - if (inserted != (point_cloud.end() -point_cloud.begin())) { - std::cerr << "Delaunay_triangulation_off_visitor_reader::done - insertion failed " << inserted << " != " << - (point_cloud.end() -point_cloud.begin()) << "\n"; - } - } - - /** \brief Returns the constructed Delaunay triangulation. - * - * @return A pointer on the Delaunay triangulation. Default value is nullptr. - */ - Complex* get_complex() const { - return complex_; - } - - private: - template - size_type insert_with_index(const PointRangeIterator& first, const PointRangeIterator& last) { - size_type vertices_before_insertion = complex_->number_of_vertices(); - std::vector points(first, last); - - std::vector indices; - indices.reserve(points.size()); - - // Creates a vector {0, 1, ..., N-1} - std::copy(boost::counting_iterator(0), boost::counting_iterator(points.size()), - std::back_inserter(indices)); - - // Sort indices considering CGAL spatial sort - typedef CGAL::Spatial_sort_traits_adapter_d Search_traits_d; - spatial_sort(indices.begin(),indices.end(),Search_traits_d(&(points[0]))); - - typename Delaunay_triangulation::Full_cell_handle hint; - for (typename std::vector::const_iterator it = indices.begin(), end = indices.end(); - it != end; ++it) { - typename Delaunay_triangulation::Vertex_handle pos = complex_->insert(points[*it], hint); - // Save index value as data to retrieve it after insertion - pos->data() = *it; - hint = pos->full_cell(); - } - - return (complex_->number_of_vertices() - vertices_before_insertion); - } - -}; - -/** - * \class Delaunay_triangulation_off_reader Delaunay_triangulation_off_io.h gudhi/Delaunay_triangulation_off_io.h - * \brief OFF file reader implementation in order to construct a Delaunay triangulation. - * - * This class is using the Delaunay_triangulation_off_visitor_reader to visit the OFF file according to Off_reader. - * - * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page - * http://doc.cgal.org/latest/Triangulation/ - * - * \section Example - * - * This example loads points from an OFF file and builds the Delaunay triangulation. - * Then, it is asked to display the number of vertices and finites full cells from the Delaunay triangulation. - * - * \include Delaunay_triangulation_off_rw.cpp - * - * When launching: - * - * \code $> ./dtoffrw ../../data/points/alphacomplexdoc.off triangulated.off - * \endcode - * - * the program output is: - * - * \include dtoffrw_alphashapedoc_result.txt - */ -template -class Delaunay_triangulation_off_reader { - public: - /** \brief Reads the OFF file and constructs the Delaunay triangulation from the points - * that are in the OFF file. - * - * @param[in] name_file OFF file to read. - * - * \post Check with is_valid() function to see if read operation was successful. - */ - Delaunay_triangulation_off_reader(const std::string & name_file) - : valid_(false) { - std::ifstream stream(name_file); - if (stream.is_open()) { - Delaunay_triangulation_off_visitor_reader off_visitor(complex_); - Off_reader off_reader(stream); - valid_ = off_reader.read(off_visitor); - if (valid_) { - complex_ = off_visitor.get_complex(); - if (complex_ == nullptr) { - std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader off_visitor returns " << - "an empty pointer\n"; - valid_ = false; - } - } - } else { - std::cerr << "Delaunay_triangulation_off_reader::Delaunay_triangulation_off_reader could not open file " << - name_file << "\n"; - } - } - - /** \brief Returns if the OFF file read operation was successful or not. - * - * @return OFF file read status. - */ - bool is_valid() const { - return valid_; - } - - /** \brief Returns the constructed Delaunay triangulation. - * - * @return A pointer on the Delaunay triangulation. Default value is nullptr. - */ - Complex* get_complex() const { - if (valid_) - return complex_; - return nullptr; - } - - private: - /** \brief OFF file read status.*/ - bool valid_; - /** \brief A pointer on the Delaunay triangulation.*/ - Complex* complex_; -}; - -/** - * \class Delaunay_triangulation_off_writer Delaunay_triangulation_off_io.h gudhi/Delaunay_triangulation_off_io.h - * \brief OFF file writer from a Delaunay triangulation. - * - * This class constructs the OFF file header according to http://www.geomview.org/docs/html/OFF.html - * - * The header is followed by the list of points coordinates (Delaunay triangulation vertices) - * - * And finally is followed by the list of faces (Delaunay triangulation finite full cells) - * - * For more informations on CGAL Delaunay triangulation, please refer to the corresponding chapter in page - * http://doc.cgal.org/latest/Triangulation/ - * - * \section Example - * - * This example loads points from an OFF file and builds the Delaunay triangulation. - * Then, the Delaunay triangulation is saved in a new file including the triangulation as a list of faces. - * - * \include Delaunay_triangulation_off_rw.cpp - * - * When launching: - * - * \code $> ./dtoffrw ../../data/points/alphashapedoc.off triangulated.off - * \endcode - * - * The result will be an OFF file of dimension 2 with the 7 points from alphashapedoc.off followed by the 6 - * triangulations of dimension 3 (the first value on each faces): - * \include dtoffrw_alphashapedoc_result.off - */ -template -class Delaunay_triangulation_off_writer { - public: - typedef typename Complex::Point Point; - - /** \brief Writes the OFF file from the Delaunay triangulation. - * - * @param[in] name_file OFF file to write. - * @param[in] complex_ptr pointer on a Delaunay triangulation. - * - * \post Check with is_valid() function to see if write operation was successful. - */ - Delaunay_triangulation_off_writer(const std::string & name_file, Complex* complex_ptr) - : valid_(false) { - std::ofstream stream(name_file); - if (stream.is_open()) { - if (complex_ptr->current_dimension() == 3) { - // OFF header - stream << "OFF" << std::endl; - // no endl on next line - don't know why... - stream << complex_ptr->number_of_vertices() << " " << complex_ptr->number_of_finite_full_cells() << " 0"; - } else { - // nOFF header - stream << "nOFF" << std::endl; - // no endl on next line - don't know why... - stream << complex_ptr->current_dimension() << " " << complex_ptr->number_of_vertices() << " " << - complex_ptr->number_of_finite_full_cells() << " 0"; - } - - // bimap to retrieve vertex handles from points and vice versa - std::map< Point, int > points_to_vh; - // Start to insert at default handle value - int vertex_handle = int(); - - // Points list - for (auto vit = complex_ptr->vertices_begin(); vit != complex_ptr->vertices_end(); ++vit) { - for (auto Coord = vit->point().cartesian_begin(); Coord != vit->point().cartesian_end(); ++Coord) { - stream << *Coord << " "; - } - stream << std::endl; - points_to_vh[vit->point()] = vertex_handle; - vertex_handle++; - } - - for (auto cit = complex_ptr->finite_full_cells_begin(); cit != complex_ptr->finite_full_cells_end(); ++cit) { - stream << std::distance(cit->vertices_begin(), cit->vertices_end()) << " "; - for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { - stream << points_to_vh[(*vit)->point()] - 1 << " "; - } - stream << std::endl; - } - stream.close(); - valid_ = true; - } else { - std::cerr << "Delaunay_triangulation_off_writer::Delaunay_triangulation_off_writer could not open file " << - name_file << "\n"; - } - } - - /** \brief Returns if the OFF write operation was successful or not. - * - * @return OFF file write status. - */ - bool is_valid() const { - return valid_; - } - - private: - /* \brief OFF file write status. */ - bool valid_; -}; - -} // namespace Gudhi - -#endif // DELAUNAY_TRIANGULATION_OFF_IO_H_ diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h new file mode 100644 index 00000000..d9f9a74b --- /dev/null +++ b/src/common/include/gudhi/Points_off_io.h @@ -0,0 +1,178 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#ifndef POINTS_OFF_IO_H_ +#define POINTS_OFF_IO_H_ + +#include +#include +#include +#include + +#include + +namespace Gudhi { + +/** + * \brief OFF file visitor implementation according to Off_reader in order to read points from an OFF file. + */ +template +class Points_off_visitor_reader { + private: + std::vector point_cloud; + + public: + /** \brief Off_reader visitor init implementation. + * + * The init parameters are set from OFF file header. + * Dimension value is required in order to construct Alpha complex. + * + * @param[in] dim space dimension of vertices. + * @param[in] num_vertices number of vertices in the OFF file (not used). + * @param[in] num_faces number of faces in the OFF file (not used). + * @param[in] num_edges number of edges in the OFF file (not used). + */ + void init(int dim, int num_vertices, int num_faces, int num_edges) { +#ifdef DEBUG_TRACES + std::cout << "Points_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << + num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; +#endif // DEBUG_TRACES + if (num_faces > 0) { + std::cerr << "Points_off_visitor_reader::init faces are not taken into account from OFF file for Points.\n"; + } + if (num_edges > 0) { + std::cerr << "Points_off_visitor_reader::init edges are not taken into account from OFF file for Points.\n"; + } + } + + /** \brief Off_reader visitor point implementation. + * + * The point function is called on each vertex line from OFF file. + * This function inserts the vertex in the Alpha complex. + * + * @param[in] point vector of vertex coordinates. + */ + void point(const std::vector& point) { +#ifdef DEBUG_TRACES + std::cout << "Points_off_visitor_reader::point "; + for (auto coordinate : point) { + std::cout << coordinate << " | "; + } + std::cout << std::endl; +#endif // DEBUG_TRACES + // Fill the point cloud + point_cloud.push_back(Point_d(point.size(), point.begin(), point.end())); + } + + // Off_reader visitor maximal_face implementation - Only points are read + void maximal_face(const std::vector& face) { } + + // Off_reader visitor done implementation - Only points are read + void done() { } + + /** \brief Point cloud getter. + * + * @return point_cloud. + */ + const std::vector& get_point_cloud() { + return point_cloud; + } + +}; + +/** + * \brief OFF file reader implementation in order to read points from an OFF file. + * + * This class is using the Points_off_visitor_reader to visit the OFF file according to Off_reader. + * + * Point_d must have a constructor with the following form: + * + * \code template Point_d::Point_d(int d, InputIterator first, InputIterator last) \endcode + * + * where d is the point dimension. + * + * \section Example + * + * This example loads points from an OFF file and builds a vector of CGAL points in dimension d. + * Then, it is asked to display the points. + * + * \include CGAL_points_off_reader.cpp + * + * When launching: + * + * \code $> ./cgaloffreader ../../data/points/alphacomplexdoc.off + * \endcode + * + * the program output is: + * + * \include cgaloffreader_result.txt + */ +template +class Points_off_reader { + public: + /** \brief Reads the OFF file and constructs the Alpha complex from the points + * that are in the OFF file. + * + * @param[in] name_file OFF file to read. + * + * \post Check with is_valid() function to see if read operation was successful. + */ + Points_off_reader(const std::string& name_file) + : valid_(false) { + std::ifstream stream(name_file); + if (stream.is_open()) { + Off_reader off_reader(stream); + Points_off_visitor_reader off_visitor; + valid_ = off_reader.read(off_visitor); + if (valid_) { + point_cloud = off_visitor.get_point_cloud(); + } + } else { + std::cerr << "Points_off_reader::Points_off_reader could not open file " << name_file << "\n"; + } + } + + /** \brief Returns if the OFF file read operation was successful or not. + * + * @return OFF file read status. + */ + bool is_valid() const { + return valid_; + } + + /** \brief Point cloud getter. + * + * @return point_cloud. + */ + const std::vector& get_point_cloud() { + return point_cloud; + } + + private: + /** \brief point_cloud.*/ + std::vector point_cloud; + /** \brief OFF file read status.*/ + bool valid_; +}; + +} // namespace Gudhi + +#endif // POINTS_OFF_IO_H_ diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 12eecda8..6205f0e4 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -18,22 +18,16 @@ if(CGAL_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - add_executable ( dtoffrw_UT dtoffrw_unit_test.cpp ) - target_link_libraries(dtoffrw_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + add_executable ( poffreader_UT points_off_reader_unit_test.cpp ) + target_link_libraries(poffreader_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) # Do not forget to copy test files in current binary dir file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) # Unitary tests - add_test(dtoffrw_UT ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_UT + add_test(poffreader_UT ${CMAKE_CURRENT_BINARY_DIR}/poffreader_UT # XML format for Jenkins xUnit plugin - --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/dtoffrw_UT.xml --log_level=test_suite --report_level=no) - - if (DIFF_PATH) - # Do not forget to copy test result files in current binary dir - file(COPY "dtoffrw_alphashapedoc_result.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - add_test(dtoffrw_diff_files_UT ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/UT.off ${CMAKE_CURRENT_BINARY_DIR}/dtoffrw_alphashapedoc_result.off) - endif() + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/poffreader_UT.xml --log_level=test_suite --report_level=no) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") diff --git a/src/common/test/dtoffrw_alphashapedoc_result.off b/src/common/test/dtoffrw_alphashapedoc_result.off index d1839a43..1deb8dbd 100644 --- a/src/common/test/dtoffrw_alphashapedoc_result.off +++ b/src/common/test/dtoffrw_alphashapedoc_result.off @@ -1,15 +1,7 @@ -nOFF -2 7 6 0 -9 17 -0 14 -1 1 -2 19 -4 6 -9 6 -7 0 -3 5 0 4 -3 0 1 4 -3 3 1 0 -3 4 1 2 -3 5 4 6 -3 6 4 2 +Point[0] = 1 1 +Point[1] = 7 0 +Point[2] = 4 6 +Point[3] = 9 6 +Point[4] = 0 14 +Point[5] = 2 19 +Point[6] = 9 17 diff --git a/src/common/test/dtoffrw_unit_test.cpp b/src/common/test/dtoffrw_unit_test.cpp deleted file mode 100644 index f682df1a..00000000 --- a/src/common/test/dtoffrw_unit_test.cpp +++ /dev/null @@ -1,90 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2015 INRIA Saclay (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -// to construct a Delaunay_triangulation from a OFF file -#include "gudhi/Delaunay_triangulation_off_io.h" - -#include -#include - -#include - -#include -#include - -#define BOOST_TEST_DYN_LINK -#define BOOST_TEST_MODULE "delaunay_triangulation_off_read_write" -#include - -// Use dynamic_dimension_tag for the user to be able to set dimension -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > K; -typedef CGAL::Delaunay_triangulation T; - -BOOST_AUTO_TEST_CASE( Delaunay_triangulation_doc_test ) -{ - // Read the OFF file (input file name given as parameter) and triangulates points - Gudhi::Delaunay_triangulation_off_reader off_reader("alphacomplexdoc.off"); - // Check the read operation was correct - BOOST_CHECK(off_reader.is_valid()); - - // Retrieve the triangulation - T* triangulation = off_reader.get_complex(); - BOOST_CHECK(triangulation != nullptr); - // Operations on triangulation - BOOST_CHECK(triangulation->number_of_vertices() == 7); - BOOST_CHECK(triangulation->number_of_finite_full_cells() == 6); - - // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces - Gudhi::Delaunay_triangulation_off_writer off_writer("UT.off", triangulation); - - // Check the write operation was correct - BOOST_CHECK(off_writer.is_valid()); - - delete triangulation; -} - -BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) -{ - Gudhi::Delaunay_triangulation_off_reader off_reader("some_impossible_weird_file_name.off"); - // Check the read operation was correct - BOOST_CHECK(!off_reader.is_valid()); - T* triangulation = off_reader.get_complex(); - BOOST_CHECK(triangulation == nullptr); -} - -BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_write_test ) -{ - // Read the OFF file (input file name given as parameter) and triangulates points - Gudhi::Delaunay_triangulation_off_reader off_reader("alphacomplexdoc.off"); - - // Retrieve the triangulation - T* triangulation = off_reader.get_complex(); - - // Write the OFF file (output file name given as parameter) with the points and triangulated cells as faces - Gudhi::Delaunay_triangulation_off_writer off_writer("/some_impossible_weird_directory_name/another_weird_directory_name/some_impossible_weird_file_name.off", triangulation); - - // Check the write operation was correct - BOOST_CHECK(!off_writer.is_valid()); - - delete triangulation; -} - diff --git a/src/common/test/points_off_reader_unit_test.cpp b/src/common/test/points_off_reader_unit_test.cpp new file mode 100644 index 00000000..73e19cbc --- /dev/null +++ b/src/common/test/points_off_reader_unit_test.cpp @@ -0,0 +1,78 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +// For CGAL points type in dimension d +// cf. http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Point__d.html +#include + +#include +#include +#include + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "points_off_read_write" +#include + +typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; +typedef typename Kernel::Point_d Point_d; + +BOOST_AUTO_TEST_CASE( points_doc_test ) +{ + // Read the OFF file (input file name given as parameter) and triangulates points + Gudhi::Points_off_reader off_reader("alphacomplexdoc.off"); + // Check the read operation was correct + BOOST_CHECK(off_reader.is_valid()); + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + BOOST_CHECK(point_cloud.size() == 7); + + std::vector expected_points; + std::vector point = {1.0, 1.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + point = {7.0, 0.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + point = {4.0, 6.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + point = {9.0, 6.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + point = {0.0, 14.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + point = {2.0, 19.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + point = {9.0, 17.0}; + expected_points.push_back(Point_d(2, point.begin(), point.end())); + + BOOST_CHECK(point_cloud == expected_points); +} + +BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) +{ + Gudhi::Points_off_reader off_reader("some_impossible_weird_file_name.off"); + // Check the read operation was correct + BOOST_CHECK(!off_reader.is_valid()); + + std::vector point_cloud = off_reader.get_point_cloud(); + BOOST_CHECK(point_cloud.size() == 0); +} -- cgit v1.2.3 From 01d50e3c77a6f8a2aa7e538f90512482af85d317 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 6 Apr 2016 11:18:19 +0000 Subject: CGAL version requires 4.7 to access CGAL Point_d dimension() function git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1099 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: fe6bb4958f7d98427fb705b7c02b64bddb7e190d --- src/common/example/CMakeLists.txt | 4 ++-- src/common/test/CMakeLists.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) (limited to 'src/common') diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 2914756e..5aeaa8c6 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -1,9 +1,9 @@ cmake_minimum_required(VERSION 2.6) project(GUDHIDelaunayTriangulationOffFileReadWrite) -# need CGAL 4.6 +# need CGAL 4.7 if(CGAL_FOUND) - if (NOT CGAL_VERSION VERSION_LESS 4.6.0) + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 6205f0e4..789546ae 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -10,9 +10,9 @@ if (GPROF_PATH) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") endif() -# need CGAL 4.6 +# need CGAL 4.7 if(CGAL_FOUND) - if (NOT CGAL_VERSION VERSION_LESS 4.6.0) + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") -- cgit v1.2.3 From 8e76285d6d2ea35a9709117896390b718f1f15f1 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Wed, 6 Apr 2016 21:11:38 +0000 Subject: Marc review fix cpplint/cppcheck fix git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1102 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: f47277da05debb56dd9a4a96bf89ec7b3ffa2e27 --- src/Alpha_complex/include/gudhi/Alpha_complex.h | 22 ++++++++-------------- .../concept/FilteredComplex.h | 2 +- src/common/example/CGAL_points_off_reader.cpp | 1 + src/common/include/gudhi/Off_reader.h | 1 + src/common/include/gudhi/Points_off_io.h | 7 +++---- 5 files changed, 14 insertions(+), 19 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/include/gudhi/Alpha_complex.h b/src/Alpha_complex/include/gudhi/Alpha_complex.h index 21eb5f48..753fa58a 100644 --- a/src/Alpha_complex/include/gudhi/Alpha_complex.h +++ b/src/Alpha_complex/include/gudhi/Alpha_complex.h @@ -33,7 +33,6 @@ #include #include // isnan, fmax -//#include #include #include #include @@ -79,7 +78,7 @@ class Alpha_complex : public Simplex_tree<> { public: // Add an int in TDS to save point index in the structure typedef CGAL::Triangulation_data_structure, + CGAL::Triangulation_vertex, CGAL::Triangulation_full_cell > TDS; /** \brief A Delaunay triangulation of a set of points in \f$ \mathbb{R}^D\f$.*/ typedef CGAL::Delaunay_triangulation Delaunay_triangulation; @@ -111,13 +110,10 @@ class Alpha_complex : public Simplex_tree<> { // size_type type from CGAL. typedef typename Delaunay_triangulation::size_type size_type; - // Double map type to switch from CGAL vertex iterator to simplex tree vertex handle and vice versa. - typedef typename std::map< CGAL_vertex_iterator, Vertex_handle > Map_vertex_iterator_to_handle; + // Map type to switch from simplex tree vertex handle to CGAL vertex iterator. typedef typename std::map< Vertex_handle, CGAL_vertex_iterator > Vector_vertex_iterator; private: - /** \brief Map to switch from CGAL vertex iterator to simplex tree vertex handle.*/ - Map_vertex_iterator_to_handle vertex_iterator_to_handle_; /** \brief Vertex iterator vector to switch from simplex tree vertex handle to CGAL vertex iterator. * Vertex handles are inserted sequentially, starting at 0.*/ Vector_vertex_iterator vertex_handle_to_iterator_; @@ -198,15 +194,15 @@ class Alpha_complex : public Simplex_tree<> { triangulation_ = new Delaunay_triangulation(point_dimension(*first)); std::vector points(first, last); - + // Creates a vector {0, 1, ..., N-1} std::vector indices(boost::counting_iterator(0), boost::counting_iterator(points.size())); - + // Sort indices considering CGAL spatial sort typedef CGAL::Spatial_sort_traits_adapter_d Search_traits_d; - spatial_sort(indices.begin(),indices.end(),Search_traits_d(&(points[0]))); - + spatial_sort(indices.begin(), indices.end(), Search_traits_d(&(points[0]))); + typename Delaunay_triangulation::Full_cell_handle hint; for (auto index : indices) { typename Delaunay_triangulation::Vertex_handle pos = triangulation_->insert(points[index], hint); @@ -261,8 +257,6 @@ class Alpha_complex : public Simplex_tree<> { #ifdef DEBUG_TRACES std::cout << "Vertex insertion - " << vit->data() << " -> " << vit->point() << std::endl; #endif // DEBUG_TRACES - - vertex_iterator_to_handle_.emplace(vit, vit->data()); vertex_handle_to_iterator_.emplace(vit->data(), vit); } } @@ -278,10 +272,10 @@ class Alpha_complex : public Simplex_tree<> { for (auto vit = cit->vertices_begin(); vit != cit->vertices_end(); ++vit) { if (*vit != nullptr) { #ifdef DEBUG_TRACES - std::cout << " " << vertex_iterator_to_handle_[*vit]; + std::cout << " " << (*vit)->data(); #endif // DEBUG_TRACES // Vector of vertex construction for simplex_tree structure - vertexVector.push_back(vertex_iterator_to_handle_[*vit]); + vertexVector.push_back((*vit)->data()); } } #ifdef DEBUG_TRACES diff --git a/src/Persistent_cohomology/concept/FilteredComplex.h b/src/Persistent_cohomology/concept/FilteredComplex.h index e124d524..949aafc2 100644 --- a/src/Persistent_cohomology/concept/FilteredComplex.h +++ b/src/Persistent_cohomology/concept/FilteredComplex.h @@ -138,6 +138,6 @@ Filtration_simplex_range filtration_simplex_range(); * @todo use an enum? Just a bool? */ //int is_before_in_filtration(Simplex_handle s, Simplex_handle t); -/*************************************************/ +/*************************************************/ }; diff --git a/src/common/example/CGAL_points_off_reader.cpp b/src/common/example/CGAL_points_off_reader.cpp index 076afd5b..45e9f1e6 100644 --- a/src/common/example/CGAL_points_off_reader.cpp +++ b/src/common/example/CGAL_points_off_reader.cpp @@ -6,6 +6,7 @@ #include #include +#include typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; typedef typename Kernel::Point_d Point_d; diff --git a/src/common/include/gudhi/Off_reader.h b/src/common/include/gudhi/Off_reader.h index 2420ae72..4fcd2af2 100644 --- a/src/common/include/gudhi/Off_reader.h +++ b/src/common/include/gudhi/Off_reader.h @@ -31,6 +31,7 @@ #include #include #include +#include namespace Gudhi { diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index d9f9a74b..79287e3c 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -22,13 +22,13 @@ #ifndef POINTS_OFF_IO_H_ #define POINTS_OFF_IO_H_ +#include + #include #include #include #include -#include - namespace Gudhi { /** @@ -87,7 +87,7 @@ class Points_off_visitor_reader { // Off_reader visitor done implementation - Only points are read void done() { } - + /** \brief Point cloud getter. * * @return point_cloud. @@ -95,7 +95,6 @@ class Points_off_visitor_reader { const std::vector& get_point_cloud() { return point_cloud; } - }; /** -- cgit v1.2.3 From 3cabde73eaf1a29f36a6014bccc795349666346c Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 7 Apr 2016 16:07:03 +0000 Subject: get_point_cloud can be const git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/alphashapes@1105 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: c9b905e8f01c671964b4151883a6385e47adaa06 --- src/common/include/gudhi/Points_off_io.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'src/common') diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index 79287e3c..1bd5c1e6 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -92,7 +92,7 @@ class Points_off_visitor_reader { * * @return point_cloud. */ - const std::vector& get_point_cloud() { + const std::vector& get_point_cloud() const { return point_cloud; } }; @@ -161,7 +161,7 @@ class Points_off_reader { * * @return point_cloud. */ - const std::vector& get_point_cloud() { + const std::vector& get_point_cloud() const { return point_cloud; } -- cgit v1.2.3 From 437ccdf9616d91534af91cd8a0090b6f32d6e65b Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 14 Apr 2016 12:49:09 +0000 Subject: Add of src/common/include/gudhi/Points_3D_off_io.h to read specific 3D OFF Files for CGAL Point_3 Add an example to read 3D OFF Files for CGAL Point_3 Modify alpha_complex_3d_persistence.cpp to read OFF files Add periodic_alpha_complex_3d_persistence.cpp in Persistent_cohomology examples Add info about new examples in README git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/periodic_alpha_complex_3d@1116 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 6b423b598869a0f4f9c62d93c93b69efd93c0161 --- data/points/grid_10_10_10_in_0_1.off | 1002 ++++++++++++++++++++ data/points/tore3D_300.off | 302 ++++++ src/Persistent_cohomology/example/CMakeLists.txt | 8 +- src/Persistent_cohomology/example/README | 98 +- .../example/alpha_complex_3d_persistence.cpp | 24 +- .../example/alpha_complex_persistence.cpp | 2 +- .../periodic_alpha_complex_3d_persistence.cpp | 303 ++++++ src/common/example/CGAL_3D_points_off_reader.cpp | 41 + src/common/example/CGAL_points_off_reader.cpp | 18 +- src/common/example/CMakeLists.txt | 4 + src/common/include/gudhi/Points_3D_off_io.h | 206 ++++ src/common/include/gudhi/Points_off_io.h | 15 +- 12 files changed, 1990 insertions(+), 33 deletions(-) create mode 100644 data/points/grid_10_10_10_in_0_1.off create mode 100644 data/points/tore3D_300.off create mode 100644 src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp create mode 100644 src/common/example/CGAL_3D_points_off_reader.cpp create mode 100644 src/common/include/gudhi/Points_3D_off_io.h (limited to 'src/common') diff --git a/data/points/grid_10_10_10_in_0_1.off b/data/points/grid_10_10_10_in_0_1.off new file mode 100644 index 00000000..f90eb9df --- /dev/null +++ b/data/points/grid_10_10_10_in_0_1.off @@ -0,0 +1,1002 @@ +OFF +1000 0 0 +0 0 0 +0 0 0.1 +0 0 0.2 +0 0 0.3 +0 0 0.4 +0 0 0.5 +0 0 0.6 +0 0 0.7 +0 0 0.8 +0 0 0.9 +0 0.1 0 +0 0.1 0.1 +0 0.1 0.2 +0 0.1 0.3 +0 0.1 0.4 +0 0.1 0.5 +0 0.1 0.6 +0 0.1 0.7 +0 0.1 0.8 +0 0.1 0.9 +0 0.2 0 +0 0.2 0.1 +0 0.2 0.2 +0 0.2 0.3 +0 0.2 0.4 +0 0.2 0.5 +0 0.2 0.6 +0 0.2 0.7 +0 0.2 0.8 +0 0.2 0.9 +0 0.3 0 +0 0.3 0.1 +0 0.3 0.2 +0 0.3 0.3 +0 0.3 0.4 +0 0.3 0.5 +0 0.3 0.6 +0 0.3 0.7 +0 0.3 0.8 +0 0.3 0.9 +0 0.4 0 +0 0.4 0.1 +0 0.4 0.2 +0 0.4 0.3 +0 0.4 0.4 +0 0.4 0.5 +0 0.4 0.6 +0 0.4 0.7 +0 0.4 0.8 +0 0.4 0.9 +0 0.5 0 +0 0.5 0.1 +0 0.5 0.2 +0 0.5 0.3 +0 0.5 0.4 +0 0.5 0.5 +0 0.5 0.6 +0 0.5 0.7 +0 0.5 0.8 +0 0.5 0.9 +0 0.6 0 +0 0.6 0.1 +0 0.6 0.2 +0 0.6 0.3 +0 0.6 0.4 +0 0.6 0.5 +0 0.6 0.6 +0 0.6 0.7 +0 0.6 0.8 +0 0.6 0.9 +0 0.7 0 +0 0.7 0.1 +0 0.7 0.2 +0 0.7 0.3 +0 0.7 0.4 +0 0.7 0.5 +0 0.7 0.6 +0 0.7 0.7 +0 0.7 0.8 +0 0.7 0.9 +0 0.8 0 +0 0.8 0.1 +0 0.8 0.2 +0 0.8 0.3 +0 0.8 0.4 +0 0.8 0.5 +0 0.8 0.6 +0 0.8 0.7 +0 0.8 0.8 +0 0.8 0.9 +0 0.9 0 +0 0.9 0.1 +0 0.9 0.2 +0 0.9 0.3 +0 0.9 0.4 +0 0.9 0.5 +0 0.9 0.6 +0 0.9 0.7 +0 0.9 0.8 +0 0.9 0.9 +0.1 0 0 +0.1 0 0.1 +0.1 0 0.2 +0.1 0 0.3 +0.1 0 0.4 +0.1 0 0.5 +0.1 0 0.6 +0.1 0 0.7 +0.1 0 0.8 +0.1 0 0.9 +0.1 0.1 0 +0.1 0.1 0.1 +0.1 0.1 0.2 +0.1 0.1 0.3 +0.1 0.1 0.4 +0.1 0.1 0.5 +0.1 0.1 0.6 +0.1 0.1 0.7 +0.1 0.1 0.8 +0.1 0.1 0.9 +0.1 0.2 0 +0.1 0.2 0.1 +0.1 0.2 0.2 +0.1 0.2 0.3 +0.1 0.2 0.4 +0.1 0.2 0.5 +0.1 0.2 0.6 +0.1 0.2 0.7 +0.1 0.2 0.8 +0.1 0.2 0.9 +0.1 0.3 0 +0.1 0.3 0.1 +0.1 0.3 0.2 +0.1 0.3 0.3 +0.1 0.3 0.4 +0.1 0.3 0.5 +0.1 0.3 0.6 +0.1 0.3 0.7 +0.1 0.3 0.8 +0.1 0.3 0.9 +0.1 0.4 0 +0.1 0.4 0.1 +0.1 0.4 0.2 +0.1 0.4 0.3 +0.1 0.4 0.4 +0.1 0.4 0.5 +0.1 0.4 0.6 +0.1 0.4 0.7 +0.1 0.4 0.8 +0.1 0.4 0.9 +0.1 0.5 0 +0.1 0.5 0.1 +0.1 0.5 0.2 +0.1 0.5 0.3 +0.1 0.5 0.4 +0.1 0.5 0.5 +0.1 0.5 0.6 +0.1 0.5 0.7 +0.1 0.5 0.8 +0.1 0.5 0.9 +0.1 0.6 0 +0.1 0.6 0.1 +0.1 0.6 0.2 +0.1 0.6 0.3 +0.1 0.6 0.4 +0.1 0.6 0.5 +0.1 0.6 0.6 +0.1 0.6 0.7 +0.1 0.6 0.8 +0.1 0.6 0.9 +0.1 0.7 0 +0.1 0.7 0.1 +0.1 0.7 0.2 +0.1 0.7 0.3 +0.1 0.7 0.4 +0.1 0.7 0.5 +0.1 0.7 0.6 +0.1 0.7 0.7 +0.1 0.7 0.8 +0.1 0.7 0.9 +0.1 0.8 0 +0.1 0.8 0.1 +0.1 0.8 0.2 +0.1 0.8 0.3 +0.1 0.8 0.4 +0.1 0.8 0.5 +0.1 0.8 0.6 +0.1 0.8 0.7 +0.1 0.8 0.8 +0.1 0.8 0.9 +0.1 0.9 0 +0.1 0.9 0.1 +0.1 0.9 0.2 +0.1 0.9 0.3 +0.1 0.9 0.4 +0.1 0.9 0.5 +0.1 0.9 0.6 +0.1 0.9 0.7 +0.1 0.9 0.8 +0.1 0.9 0.9 +0.2 0 0 +0.2 0 0.1 +0.2 0 0.2 +0.2 0 0.3 +0.2 0 0.4 +0.2 0 0.5 +0.2 0 0.6 +0.2 0 0.7 +0.2 0 0.8 +0.2 0 0.9 +0.2 0.1 0 +0.2 0.1 0.1 +0.2 0.1 0.2 +0.2 0.1 0.3 +0.2 0.1 0.4 +0.2 0.1 0.5 +0.2 0.1 0.6 +0.2 0.1 0.7 +0.2 0.1 0.8 +0.2 0.1 0.9 +0.2 0.2 0 +0.2 0.2 0.1 +0.2 0.2 0.2 +0.2 0.2 0.3 +0.2 0.2 0.4 +0.2 0.2 0.5 +0.2 0.2 0.6 +0.2 0.2 0.7 +0.2 0.2 0.8 +0.2 0.2 0.9 +0.2 0.3 0 +0.2 0.3 0.1 +0.2 0.3 0.2 +0.2 0.3 0.3 +0.2 0.3 0.4 +0.2 0.3 0.5 +0.2 0.3 0.6 +0.2 0.3 0.7 +0.2 0.3 0.8 +0.2 0.3 0.9 +0.2 0.4 0 +0.2 0.4 0.1 +0.2 0.4 0.2 +0.2 0.4 0.3 +0.2 0.4 0.4 +0.2 0.4 0.5 +0.2 0.4 0.6 +0.2 0.4 0.7 +0.2 0.4 0.8 +0.2 0.4 0.9 +0.2 0.5 0 +0.2 0.5 0.1 +0.2 0.5 0.2 +0.2 0.5 0.3 +0.2 0.5 0.4 +0.2 0.5 0.5 +0.2 0.5 0.6 +0.2 0.5 0.7 +0.2 0.5 0.8 +0.2 0.5 0.9 +0.2 0.6 0 +0.2 0.6 0.1 +0.2 0.6 0.2 +0.2 0.6 0.3 +0.2 0.6 0.4 +0.2 0.6 0.5 +0.2 0.6 0.6 +0.2 0.6 0.7 +0.2 0.6 0.8 +0.2 0.6 0.9 +0.2 0.7 0 +0.2 0.7 0.1 +0.2 0.7 0.2 +0.2 0.7 0.3 +0.2 0.7 0.4 +0.2 0.7 0.5 +0.2 0.7 0.6 +0.2 0.7 0.7 +0.2 0.7 0.8 +0.2 0.7 0.9 +0.2 0.8 0 +0.2 0.8 0.1 +0.2 0.8 0.2 +0.2 0.8 0.3 +0.2 0.8 0.4 +0.2 0.8 0.5 +0.2 0.8 0.6 +0.2 0.8 0.7 +0.2 0.8 0.8 +0.2 0.8 0.9 +0.2 0.9 0 +0.2 0.9 0.1 +0.2 0.9 0.2 +0.2 0.9 0.3 +0.2 0.9 0.4 +0.2 0.9 0.5 +0.2 0.9 0.6 +0.2 0.9 0.7 +0.2 0.9 0.8 +0.2 0.9 0.9 +0.3 0 0 +0.3 0 0.1 +0.3 0 0.2 +0.3 0 0.3 +0.3 0 0.4 +0.3 0 0.5 +0.3 0 0.6 +0.3 0 0.7 +0.3 0 0.8 +0.3 0 0.9 +0.3 0.1 0 +0.3 0.1 0.1 +0.3 0.1 0.2 +0.3 0.1 0.3 +0.3 0.1 0.4 +0.3 0.1 0.5 +0.3 0.1 0.6 +0.3 0.1 0.7 +0.3 0.1 0.8 +0.3 0.1 0.9 +0.3 0.2 0 +0.3 0.2 0.1 +0.3 0.2 0.2 +0.3 0.2 0.3 +0.3 0.2 0.4 +0.3 0.2 0.5 +0.3 0.2 0.6 +0.3 0.2 0.7 +0.3 0.2 0.8 +0.3 0.2 0.9 +0.3 0.3 0 +0.3 0.3 0.1 +0.3 0.3 0.2 +0.3 0.3 0.3 +0.3 0.3 0.4 +0.3 0.3 0.5 +0.3 0.3 0.6 +0.3 0.3 0.7 +0.3 0.3 0.8 +0.3 0.3 0.9 +0.3 0.4 0 +0.3 0.4 0.1 +0.3 0.4 0.2 +0.3 0.4 0.3 +0.3 0.4 0.4 +0.3 0.4 0.5 +0.3 0.4 0.6 +0.3 0.4 0.7 +0.3 0.4 0.8 +0.3 0.4 0.9 +0.3 0.5 0 +0.3 0.5 0.1 +0.3 0.5 0.2 +0.3 0.5 0.3 +0.3 0.5 0.4 +0.3 0.5 0.5 +0.3 0.5 0.6 +0.3 0.5 0.7 +0.3 0.5 0.8 +0.3 0.5 0.9 +0.3 0.6 0 +0.3 0.6 0.1 +0.3 0.6 0.2 +0.3 0.6 0.3 +0.3 0.6 0.4 +0.3 0.6 0.5 +0.3 0.6 0.6 +0.3 0.6 0.7 +0.3 0.6 0.8 +0.3 0.6 0.9 +0.3 0.7 0 +0.3 0.7 0.1 +0.3 0.7 0.2 +0.3 0.7 0.3 +0.3 0.7 0.4 +0.3 0.7 0.5 +0.3 0.7 0.6 +0.3 0.7 0.7 +0.3 0.7 0.8 +0.3 0.7 0.9 +0.3 0.8 0 +0.3 0.8 0.1 +0.3 0.8 0.2 +0.3 0.8 0.3 +0.3 0.8 0.4 +0.3 0.8 0.5 +0.3 0.8 0.6 +0.3 0.8 0.7 +0.3 0.8 0.8 +0.3 0.8 0.9 +0.3 0.9 0 +0.3 0.9 0.1 +0.3 0.9 0.2 +0.3 0.9 0.3 +0.3 0.9 0.4 +0.3 0.9 0.5 +0.3 0.9 0.6 +0.3 0.9 0.7 +0.3 0.9 0.8 +0.3 0.9 0.9 +0.4 0 0 +0.4 0 0.1 +0.4 0 0.2 +0.4 0 0.3 +0.4 0 0.4 +0.4 0 0.5 +0.4 0 0.6 +0.4 0 0.7 +0.4 0 0.8 +0.4 0 0.9 +0.4 0.1 0 +0.4 0.1 0.1 +0.4 0.1 0.2 +0.4 0.1 0.3 +0.4 0.1 0.4 +0.4 0.1 0.5 +0.4 0.1 0.6 +0.4 0.1 0.7 +0.4 0.1 0.8 +0.4 0.1 0.9 +0.4 0.2 0 +0.4 0.2 0.1 +0.4 0.2 0.2 +0.4 0.2 0.3 +0.4 0.2 0.4 +0.4 0.2 0.5 +0.4 0.2 0.6 +0.4 0.2 0.7 +0.4 0.2 0.8 +0.4 0.2 0.9 +0.4 0.3 0 +0.4 0.3 0.1 +0.4 0.3 0.2 +0.4 0.3 0.3 +0.4 0.3 0.4 +0.4 0.3 0.5 +0.4 0.3 0.6 +0.4 0.3 0.7 +0.4 0.3 0.8 +0.4 0.3 0.9 +0.4 0.4 0 +0.4 0.4 0.1 +0.4 0.4 0.2 +0.4 0.4 0.3 +0.4 0.4 0.4 +0.4 0.4 0.5 +0.4 0.4 0.6 +0.4 0.4 0.7 +0.4 0.4 0.8 +0.4 0.4 0.9 +0.4 0.5 0 +0.4 0.5 0.1 +0.4 0.5 0.2 +0.4 0.5 0.3 +0.4 0.5 0.4 +0.4 0.5 0.5 +0.4 0.5 0.6 +0.4 0.5 0.7 +0.4 0.5 0.8 +0.4 0.5 0.9 +0.4 0.6 0 +0.4 0.6 0.1 +0.4 0.6 0.2 +0.4 0.6 0.3 +0.4 0.6 0.4 +0.4 0.6 0.5 +0.4 0.6 0.6 +0.4 0.6 0.7 +0.4 0.6 0.8 +0.4 0.6 0.9 +0.4 0.7 0 +0.4 0.7 0.1 +0.4 0.7 0.2 +0.4 0.7 0.3 +0.4 0.7 0.4 +0.4 0.7 0.5 +0.4 0.7 0.6 +0.4 0.7 0.7 +0.4 0.7 0.8 +0.4 0.7 0.9 +0.4 0.8 0 +0.4 0.8 0.1 +0.4 0.8 0.2 +0.4 0.8 0.3 +0.4 0.8 0.4 +0.4 0.8 0.5 +0.4 0.8 0.6 +0.4 0.8 0.7 +0.4 0.8 0.8 +0.4 0.8 0.9 +0.4 0.9 0 +0.4 0.9 0.1 +0.4 0.9 0.2 +0.4 0.9 0.3 +0.4 0.9 0.4 +0.4 0.9 0.5 +0.4 0.9 0.6 +0.4 0.9 0.7 +0.4 0.9 0.8 +0.4 0.9 0.9 +0.5 0 0 +0.5 0 0.1 +0.5 0 0.2 +0.5 0 0.3 +0.5 0 0.4 +0.5 0 0.5 +0.5 0 0.6 +0.5 0 0.7 +0.5 0 0.8 +0.5 0 0.9 +0.5 0.1 0 +0.5 0.1 0.1 +0.5 0.1 0.2 +0.5 0.1 0.3 +0.5 0.1 0.4 +0.5 0.1 0.5 +0.5 0.1 0.6 +0.5 0.1 0.7 +0.5 0.1 0.8 +0.5 0.1 0.9 +0.5 0.2 0 +0.5 0.2 0.1 +0.5 0.2 0.2 +0.5 0.2 0.3 +0.5 0.2 0.4 +0.5 0.2 0.5 +0.5 0.2 0.6 +0.5 0.2 0.7 +0.5 0.2 0.8 +0.5 0.2 0.9 +0.5 0.3 0 +0.5 0.3 0.1 +0.5 0.3 0.2 +0.5 0.3 0.3 +0.5 0.3 0.4 +0.5 0.3 0.5 +0.5 0.3 0.6 +0.5 0.3 0.7 +0.5 0.3 0.8 +0.5 0.3 0.9 +0.5 0.4 0 +0.5 0.4 0.1 +0.5 0.4 0.2 +0.5 0.4 0.3 +0.5 0.4 0.4 +0.5 0.4 0.5 +0.5 0.4 0.6 +0.5 0.4 0.7 +0.5 0.4 0.8 +0.5 0.4 0.9 +0.5 0.5 0 +0.5 0.5 0.1 +0.5 0.5 0.2 +0.5 0.5 0.3 +0.5 0.5 0.4 +0.5 0.5 0.5 +0.5 0.5 0.6 +0.5 0.5 0.7 +0.5 0.5 0.8 +0.5 0.5 0.9 +0.5 0.6 0 +0.5 0.6 0.1 +0.5 0.6 0.2 +0.5 0.6 0.3 +0.5 0.6 0.4 +0.5 0.6 0.5 +0.5 0.6 0.6 +0.5 0.6 0.7 +0.5 0.6 0.8 +0.5 0.6 0.9 +0.5 0.7 0 +0.5 0.7 0.1 +0.5 0.7 0.2 +0.5 0.7 0.3 +0.5 0.7 0.4 +0.5 0.7 0.5 +0.5 0.7 0.6 +0.5 0.7 0.7 +0.5 0.7 0.8 +0.5 0.7 0.9 +0.5 0.8 0 +0.5 0.8 0.1 +0.5 0.8 0.2 +0.5 0.8 0.3 +0.5 0.8 0.4 +0.5 0.8 0.5 +0.5 0.8 0.6 +0.5 0.8 0.7 +0.5 0.8 0.8 +0.5 0.8 0.9 +0.5 0.9 0 +0.5 0.9 0.1 +0.5 0.9 0.2 +0.5 0.9 0.3 +0.5 0.9 0.4 +0.5 0.9 0.5 +0.5 0.9 0.6 +0.5 0.9 0.7 +0.5 0.9 0.8 +0.5 0.9 0.9 +0.6 0 0 +0.6 0 0.1 +0.6 0 0.2 +0.6 0 0.3 +0.6 0 0.4 +0.6 0 0.5 +0.6 0 0.6 +0.6 0 0.7 +0.6 0 0.8 +0.6 0 0.9 +0.6 0.1 0 +0.6 0.1 0.1 +0.6 0.1 0.2 +0.6 0.1 0.3 +0.6 0.1 0.4 +0.6 0.1 0.5 +0.6 0.1 0.6 +0.6 0.1 0.7 +0.6 0.1 0.8 +0.6 0.1 0.9 +0.6 0.2 0 +0.6 0.2 0.1 +0.6 0.2 0.2 +0.6 0.2 0.3 +0.6 0.2 0.4 +0.6 0.2 0.5 +0.6 0.2 0.6 +0.6 0.2 0.7 +0.6 0.2 0.8 +0.6 0.2 0.9 +0.6 0.3 0 +0.6 0.3 0.1 +0.6 0.3 0.2 +0.6 0.3 0.3 +0.6 0.3 0.4 +0.6 0.3 0.5 +0.6 0.3 0.6 +0.6 0.3 0.7 +0.6 0.3 0.8 +0.6 0.3 0.9 +0.6 0.4 0 +0.6 0.4 0.1 +0.6 0.4 0.2 +0.6 0.4 0.3 +0.6 0.4 0.4 +0.6 0.4 0.5 +0.6 0.4 0.6 +0.6 0.4 0.7 +0.6 0.4 0.8 +0.6 0.4 0.9 +0.6 0.5 0 +0.6 0.5 0.1 +0.6 0.5 0.2 +0.6 0.5 0.3 +0.6 0.5 0.4 +0.6 0.5 0.5 +0.6 0.5 0.6 +0.6 0.5 0.7 +0.6 0.5 0.8 +0.6 0.5 0.9 +0.6 0.6 0 +0.6 0.6 0.1 +0.6 0.6 0.2 +0.6 0.6 0.3 +0.6 0.6 0.4 +0.6 0.6 0.5 +0.6 0.6 0.6 +0.6 0.6 0.7 +0.6 0.6 0.8 +0.6 0.6 0.9 +0.6 0.7 0 +0.6 0.7 0.1 +0.6 0.7 0.2 +0.6 0.7 0.3 +0.6 0.7 0.4 +0.6 0.7 0.5 +0.6 0.7 0.6 +0.6 0.7 0.7 +0.6 0.7 0.8 +0.6 0.7 0.9 +0.6 0.8 0 +0.6 0.8 0.1 +0.6 0.8 0.2 +0.6 0.8 0.3 +0.6 0.8 0.4 +0.6 0.8 0.5 +0.6 0.8 0.6 +0.6 0.8 0.7 +0.6 0.8 0.8 +0.6 0.8 0.9 +0.6 0.9 0 +0.6 0.9 0.1 +0.6 0.9 0.2 +0.6 0.9 0.3 +0.6 0.9 0.4 +0.6 0.9 0.5 +0.6 0.9 0.6 +0.6 0.9 0.7 +0.6 0.9 0.8 +0.6 0.9 0.9 +0.7 0 0 +0.7 0 0.1 +0.7 0 0.2 +0.7 0 0.3 +0.7 0 0.4 +0.7 0 0.5 +0.7 0 0.6 +0.7 0 0.7 +0.7 0 0.8 +0.7 0 0.9 +0.7 0.1 0 +0.7 0.1 0.1 +0.7 0.1 0.2 +0.7 0.1 0.3 +0.7 0.1 0.4 +0.7 0.1 0.5 +0.7 0.1 0.6 +0.7 0.1 0.7 +0.7 0.1 0.8 +0.7 0.1 0.9 +0.7 0.2 0 +0.7 0.2 0.1 +0.7 0.2 0.2 +0.7 0.2 0.3 +0.7 0.2 0.4 +0.7 0.2 0.5 +0.7 0.2 0.6 +0.7 0.2 0.7 +0.7 0.2 0.8 +0.7 0.2 0.9 +0.7 0.3 0 +0.7 0.3 0.1 +0.7 0.3 0.2 +0.7 0.3 0.3 +0.7 0.3 0.4 +0.7 0.3 0.5 +0.7 0.3 0.6 +0.7 0.3 0.7 +0.7 0.3 0.8 +0.7 0.3 0.9 +0.7 0.4 0 +0.7 0.4 0.1 +0.7 0.4 0.2 +0.7 0.4 0.3 +0.7 0.4 0.4 +0.7 0.4 0.5 +0.7 0.4 0.6 +0.7 0.4 0.7 +0.7 0.4 0.8 +0.7 0.4 0.9 +0.7 0.5 0 +0.7 0.5 0.1 +0.7 0.5 0.2 +0.7 0.5 0.3 +0.7 0.5 0.4 +0.7 0.5 0.5 +0.7 0.5 0.6 +0.7 0.5 0.7 +0.7 0.5 0.8 +0.7 0.5 0.9 +0.7 0.6 0 +0.7 0.6 0.1 +0.7 0.6 0.2 +0.7 0.6 0.3 +0.7 0.6 0.4 +0.7 0.6 0.5 +0.7 0.6 0.6 +0.7 0.6 0.7 +0.7 0.6 0.8 +0.7 0.6 0.9 +0.7 0.7 0 +0.7 0.7 0.1 +0.7 0.7 0.2 +0.7 0.7 0.3 +0.7 0.7 0.4 +0.7 0.7 0.5 +0.7 0.7 0.6 +0.7 0.7 0.7 +0.7 0.7 0.8 +0.7 0.7 0.9 +0.7 0.8 0 +0.7 0.8 0.1 +0.7 0.8 0.2 +0.7 0.8 0.3 +0.7 0.8 0.4 +0.7 0.8 0.5 +0.7 0.8 0.6 +0.7 0.8 0.7 +0.7 0.8 0.8 +0.7 0.8 0.9 +0.7 0.9 0 +0.7 0.9 0.1 +0.7 0.9 0.2 +0.7 0.9 0.3 +0.7 0.9 0.4 +0.7 0.9 0.5 +0.7 0.9 0.6 +0.7 0.9 0.7 +0.7 0.9 0.8 +0.7 0.9 0.9 +0.8 0 0 +0.8 0 0.1 +0.8 0 0.2 +0.8 0 0.3 +0.8 0 0.4 +0.8 0 0.5 +0.8 0 0.6 +0.8 0 0.7 +0.8 0 0.8 +0.8 0 0.9 +0.8 0.1 0 +0.8 0.1 0.1 +0.8 0.1 0.2 +0.8 0.1 0.3 +0.8 0.1 0.4 +0.8 0.1 0.5 +0.8 0.1 0.6 +0.8 0.1 0.7 +0.8 0.1 0.8 +0.8 0.1 0.9 +0.8 0.2 0 +0.8 0.2 0.1 +0.8 0.2 0.2 +0.8 0.2 0.3 +0.8 0.2 0.4 +0.8 0.2 0.5 +0.8 0.2 0.6 +0.8 0.2 0.7 +0.8 0.2 0.8 +0.8 0.2 0.9 +0.8 0.3 0 +0.8 0.3 0.1 +0.8 0.3 0.2 +0.8 0.3 0.3 +0.8 0.3 0.4 +0.8 0.3 0.5 +0.8 0.3 0.6 +0.8 0.3 0.7 +0.8 0.3 0.8 +0.8 0.3 0.9 +0.8 0.4 0 +0.8 0.4 0.1 +0.8 0.4 0.2 +0.8 0.4 0.3 +0.8 0.4 0.4 +0.8 0.4 0.5 +0.8 0.4 0.6 +0.8 0.4 0.7 +0.8 0.4 0.8 +0.8 0.4 0.9 +0.8 0.5 0 +0.8 0.5 0.1 +0.8 0.5 0.2 +0.8 0.5 0.3 +0.8 0.5 0.4 +0.8 0.5 0.5 +0.8 0.5 0.6 +0.8 0.5 0.7 +0.8 0.5 0.8 +0.8 0.5 0.9 +0.8 0.6 0 +0.8 0.6 0.1 +0.8 0.6 0.2 +0.8 0.6 0.3 +0.8 0.6 0.4 +0.8 0.6 0.5 +0.8 0.6 0.6 +0.8 0.6 0.7 +0.8 0.6 0.8 +0.8 0.6 0.9 +0.8 0.7 0 +0.8 0.7 0.1 +0.8 0.7 0.2 +0.8 0.7 0.3 +0.8 0.7 0.4 +0.8 0.7 0.5 +0.8 0.7 0.6 +0.8 0.7 0.7 +0.8 0.7 0.8 +0.8 0.7 0.9 +0.8 0.8 0 +0.8 0.8 0.1 +0.8 0.8 0.2 +0.8 0.8 0.3 +0.8 0.8 0.4 +0.8 0.8 0.5 +0.8 0.8 0.6 +0.8 0.8 0.7 +0.8 0.8 0.8 +0.8 0.8 0.9 +0.8 0.9 0 +0.8 0.9 0.1 +0.8 0.9 0.2 +0.8 0.9 0.3 +0.8 0.9 0.4 +0.8 0.9 0.5 +0.8 0.9 0.6 +0.8 0.9 0.7 +0.8 0.9 0.8 +0.8 0.9 0.9 +0.9 0 0 +0.9 0 0.1 +0.9 0 0.2 +0.9 0 0.3 +0.9 0 0.4 +0.9 0 0.5 +0.9 0 0.6 +0.9 0 0.7 +0.9 0 0.8 +0.9 0 0.9 +0.9 0.1 0 +0.9 0.1 0.1 +0.9 0.1 0.2 +0.9 0.1 0.3 +0.9 0.1 0.4 +0.9 0.1 0.5 +0.9 0.1 0.6 +0.9 0.1 0.7 +0.9 0.1 0.8 +0.9 0.1 0.9 +0.9 0.2 0 +0.9 0.2 0.1 +0.9 0.2 0.2 +0.9 0.2 0.3 +0.9 0.2 0.4 +0.9 0.2 0.5 +0.9 0.2 0.6 +0.9 0.2 0.7 +0.9 0.2 0.8 +0.9 0.2 0.9 +0.9 0.3 0 +0.9 0.3 0.1 +0.9 0.3 0.2 +0.9 0.3 0.3 +0.9 0.3 0.4 +0.9 0.3 0.5 +0.9 0.3 0.6 +0.9 0.3 0.7 +0.9 0.3 0.8 +0.9 0.3 0.9 +0.9 0.4 0 +0.9 0.4 0.1 +0.9 0.4 0.2 +0.9 0.4 0.3 +0.9 0.4 0.4 +0.9 0.4 0.5 +0.9 0.4 0.6 +0.9 0.4 0.7 +0.9 0.4 0.8 +0.9 0.4 0.9 +0.9 0.5 0 +0.9 0.5 0.1 +0.9 0.5 0.2 +0.9 0.5 0.3 +0.9 0.5 0.4 +0.9 0.5 0.5 +0.9 0.5 0.6 +0.9 0.5 0.7 +0.9 0.5 0.8 +0.9 0.5 0.9 +0.9 0.6 0 +0.9 0.6 0.1 +0.9 0.6 0.2 +0.9 0.6 0.3 +0.9 0.6 0.4 +0.9 0.6 0.5 +0.9 0.6 0.6 +0.9 0.6 0.7 +0.9 0.6 0.8 +0.9 0.6 0.9 +0.9 0.7 0 +0.9 0.7 0.1 +0.9 0.7 0.2 +0.9 0.7 0.3 +0.9 0.7 0.4 +0.9 0.7 0.5 +0.9 0.7 0.6 +0.9 0.7 0.7 +0.9 0.7 0.8 +0.9 0.7 0.9 +0.9 0.8 0 +0.9 0.8 0.1 +0.9 0.8 0.2 +0.9 0.8 0.3 +0.9 0.8 0.4 +0.9 0.8 0.5 +0.9 0.8 0.6 +0.9 0.8 0.7 +0.9 0.8 0.8 +0.9 0.8 0.9 +0.9 0.9 0 +0.9 0.9 0.1 +0.9 0.9 0.2 +0.9 0.9 0.3 +0.9 0.9 0.4 +0.9 0.9 0.5 +0.9 0.9 0.6 +0.9 0.9 0.7 +0.9 0.9 0.8 +0.9 0.9 0.9 diff --git a/data/points/tore3D_300.off b/data/points/tore3D_300.off new file mode 100644 index 00000000..00eecbc2 --- /dev/null +++ b/data/points/tore3D_300.off @@ -0,0 +1,302 @@ +OFF +300 0 0 +0.959534781242014 -0.418347167310406 0.302236706360499 +2.16795051781987 1.8534755312793 -0.523120176939808 +-2.38752605996323 -1.50911089699331 -0.565888742230855 +-2.70428287516913 -1.25687539753238 0.188394161758062 +-1.2293192351549 -1.6433666240448 -0.998632128417844 +-0.541060581221073 0.874317877271137 -0.235772670127843 +-0.680895633395962 -1.18447800621889 0.773528325120879 +0.307818039582861 1.45003941576708 0.855593436948771 +0.168334095447179 -1.04004287990976 0.322931047726699 +-0.693132044992659 2.91846761160023 -0.0265488214271136 +2.21829768934691 -1.80825620958286 0.507029717781912 +1.39353197432761 -1.00232018065434 -0.95898987509937 +1.25103367489055 0.670258962753535 -0.814097483693208 +0.256075322268679 0.980760885647654 -0.164603689388124 +-1.03049290335496 -0.548251215382241 -0.553663669101879 +-1.4770698664818 1.55244993730008 -0.989743336676147 +2.81127276382533 -0.973240065906209 -0.222333302099467 +1.92081177957661 0.371630788782385 0.999050476863813 +0.940122029559839 0.781599816466946 0.628994896033071 +0.306997277631142 -2.63033061729372 0.761482466789273 +0.612817640625854 2.28021124717378 -0.932517620554714 +-0.749699607389099 1.24801965739463 -0.839010966831176 +-0.154361066579755 1.04815680087852 0.339688867662217 +-0.262639970818703 1.12445753696261 0.534327877655949 +0.36717667006869 -1.01676645451003 0.394334855760279 +0.424035547682691 -0.917168809434391 -0.144175092700111 +2.08261296666578 -2.05785707858966 -0.373061099708758 +0.651279530942164 -0.91244335335117 0.476884993705875 +-2.93865473739998 0.176739480659163 0.330046256428844 +0.0752592625269588 -2.86104297523846 0.506852758144546 +2.42108789315239 -1.57246048079025 -0.46192589897907 +-1.61942559663475 -1.10542050497529 0.999228952936353 +0.692389459564839 0.770549419741852 0.265645889867695 +1.51841358234726 -1.00021735880109 -0.98334383059774 +2.85075282389761 0.814388843476368 -0.262995967489467 +-0.784392975900731 -0.623934865952297 -0.0675029801783761 +2.95477190325865 -0.01545374755798 0.297209425434383 +-2.01178925877753 0.819250335239531 0.98506143304802 +-1.06049119415128 -0.640702917987696 0.64876172557636 +0.424777646365108 -0.912213211492002 0.111760882407564 +1.62717960754164 -2.51706084785163 0.0745558427305672 +0.991179499609619 -0.137272754245949 0.0357741533639773 +0.586676853840703 -0.981514568432931 0.516124042728287 +0.996296801968011 0.163993329632496 -0.138970552607153 +0.999735468785909 2.8102215879738 -0.18492224634737 +1.80656679908849 -0.264164139672896 0.984706448083062 +-1.41495369339859 2.29441949474757 -0.718394190427821 +-0.608066058828841 -1.94541607390611 -0.999268905344167 +2.15894083249888 1.6976186891867 -0.66545230252024 +-0.0920339025393292 -1.14740764785059 -0.52854186528706 +-0.0803524956678571 -2.82987037713862 0.556256089759466 +1.73755451382877 -1.38298334066552 -0.975330014733214 +1.339576888853 -1.69568142514835 0.986958907306434 +0.941158226372244 1.33453565427992 -0.930230027566694 +-1.64738843362436 2.11800058817634 -0.730187211545338 +-2.09522315192051 2.14264431725583 -0.0797808501517505 +-2.70163529798441 -0.512752030638191 -0.661592848269346 +0.0012406381564526 -2.43305836022187 -0.90136573192226 +1.26857330331965 -0.754387632740404 0.851677186577399 +0.555984630753723 -0.853190194061578 -0.190731240639216 +-0.376652419334438 0.953098196145437 -0.221429003703334 +0.575823956072062 -1.52858842228987 0.930397979228448 +-0.917665722737145 0.426001305594148 -0.152684444024179 +-2.2612659986582 1.45631970411773 0.724147482366059 +-2.4975593505263 0.336020143137794 -0.854128541403725 +-1.21733135240706 -0.635660112064281 -0.779262523564629 +-0.869857012314377 2.58076078125195 -0.690415729999257 +-1.40500337086406 -0.601791852462944 -0.881844312277268 +2.31904992184587 -1.05819185575653 -0.835775763770501 +1.19729717177012 -1.57894012102566 -0.999829933326901 +1.49128833410868 -0.646133202611446 0.927124838238863 +-0.646639588616155 1.64689880969244 0.973024656841064 +-1.07933947117704 0.185618755846084 -0.425802800488271 +-2.25890103703752 0.840567429045019 -0.911984072088236 +1.02056337401279 -0.444785727078535 -0.46229962045738 +-2.01362172913345 1.77850332507576 0.727048210112551 +-1.7152404634659 -1.95129617448897 0.801495726550204 +0.24904814258364 -1.73684228270244 0.969423708704471 +-0.97192898217864 0.340274559305984 -0.242197940224517 +0.335252717095645 1.05330582776341 0.446812017005886 +0.0757351764289982 -1.003454918955 0.112151625141631 +-2.12620574772059 -1.59311887240156 0.754033287759111 +-0.325964621805956 -0.967660697886875 -0.204281119631407 +0.847444429135404 -2.75134349865297 0.477009941828313 +-2.52939644202034 1.59075929357512 0.154220894882662 +-1.51910975922194 2.47536837366441 -0.426830229587188 +-1.23951192974026 -2.45646514226776 -0.659762364526391 +0.00537063209046765 -2.98895632800773 -0.148175024086866 +0.578604147500042 -1.40605218842367 -0.877514074075442 +1.32814619313677 0.993211756161157 0.939862017190046 +2.21921387799351 1.69625165174388 0.608911392663736 +2.11087050271442 -1.62983612347069 0.745183392373354 +1.20744870574619 0.759485725637224 -0.819168429715268 +-1.85454257923694 0.27227622276858 -0.992083907655554 +1.12410933384486 -0.192246452457068 0.511017950537958 +0.508634803443603 -0.874107029353032 -0.150053592184677 +1.8703001619597 0.692655600076433 0.999984548045959 +1.64361625376311 -1.21013144565734 0.999157018995579 +0.840118117818047 -2.50035600571423 0.770266412736703 +2.3287036316336 -1.17791884555773 0.79265889315273 +-2.54280253777906 0.888179845147398 -0.720498448371396 +-2.06783509487145 -1.09828285186568 -0.939917075668418 +0.200760576094878 0.999051233659456 0.194124743559801 +1.57705178275634 -1.36916375652601 0.9960789528748 +1.27101041453082 -0.844959876278814 -0.880656839462615 +2.26878096107158 -1.9325250189773 0.197654241802863 +1.00297464475457 2.79196340984832 -0.256097547101918 +-1.40380898461312 2.64725539056151 0.0843284379977918 +-0.0517392801851798 1.58464174765503 0.910043016348823 +-0.301340591032907 1.33506850520998 -0.775501362836281 +-0.916802035602795 2.82057847024232 -0.259150467395372 +-2.3956686819712 1.05176302652714 -0.787450409390176 +0.260687064694077 -0.989291907955316 0.21352399518671 +1.12792686627226 2.77426524629341 0.101946125819402 +-0.0684132204807785 -2.16283457272975 0.986474250179203 +2.93446835470829 0.29913341943434 0.313235563230203 +0.032755222852554 1.65788790402753 0.939776880892029 +-0.909983475613723 2.25856131650061 -0.900435993208869 +-0.654785891138605 -2.77631341957818 -0.522754517843354 +-0.637731499509436 0.816845593255333 -0.267023168326767 +-2.09200542287803 -0.48535592606264 -0.98905160843396 +2.47346067782732 0.00404347414316116 -0.880813179367053 +-1.07226402798667 -1.22257426020764 0.92749836802861 +1.39068341588794 -0.68753111885771 -0.893709581266259 +2.49123495155807 -0.780792651284873 0.791842221382225 +1.52017311675094 -0.606521706417438 0.931673164101109 +-1.41022678618765 0.805159290147271 -0.926575312433509 +-2.43386589806161 -0.47159898411599 -0.877741390593103 +-0.48020605858504 1.36831644955489 -0.835252691435911 +0.87912613023492 0.619156516496662 0.380637258865281 +0.253085874295989 1.43445725355682 -0.839482019455938 +0.517703192423887 2.78341770290716 0.556042888577917 +-1.00656408919327 -2.07658747018639 0.951489907279078 +-1.40077608835454 0.925515316363222 0.947050044378871 +2.07634345953723 -1.10323384918967 0.936285655309915 +-1.57233200628071 -0.286150759083885 0.915709183775737 +-1.15178699195696 -0.454341172294152 0.647764766715205 +0.760883532100737 2.73972227857741 -0.537259031193132 +2.40478710552954 0.399947334915386 0.899063394403055 +-1.37636264245145 2.1239854039931 0.847405598008672 +0.620060205826966 -0.788865441879053 -0.0822217432568619 +-1.31255288285741 0.709433414132035 -0.861362373356423 +2.87811168494233 0.586136824835297 -0.348820144844317 +1.46215508160988 -2.61812761438328 0.050023918341349 +1.59775483991913 2.14438930309337 0.738569238086525 +2.16166999311135 0.234499849009414 -0.984683361538415 +-2.627348149981 -1.44784295277905 0.0162526714459874 +1.55466115277639 -2.06080606798315 -0.813580914672985 +1.4768627986643 0.291640310070862 -0.86911101944411 +0.36020381665219 -0.932877801044002 -0.00278947687261815 +1.02111844342504 -0.846230498172804 0.738907750969927 +0.0753375442562172 -2.00343644006518 0.999988226833092 +-0.13730950236629 0.991363204668133 -0.0406636790006198 +-2.36318789164257 -1.83848352727692 -0.108400279147113 +-0.80461379190523 -1.6474745725615 -0.986034710893435 +1.7756647596058 -2.38360544396859 -0.233741336935155 +1.69896254446626 -0.343351559842879 0.963782400468442 +0.0494913193213498 -2.46795356936345 -0.883490137791417 +1.35772824903814 0.589658412058176 -0.854314907317433 +-1.96435893659605 -2.22155414885828 -0.260516416434745 +1.35473240963385 0.346732354142229 0.798797675885133 +0.400966681880103 2.47568026636498 0.861391979701686 +-1.62104506824445 -0.572439707452304 0.959751471457789 +-0.867237113132388 0.754407264547685 0.525890831738318 +1.08774066807182 0.58266518858005 -0.642803142528264 +-1.62210891849555 -2.48905492938415 0.239222440583498 +1.29180344928963 0.216732874635193 -0.723674450462894 +-2.23977014077204 1.04128789846265 0.8826717211881 +-2.15042174348158 -1.07255921184869 -0.915173594366247 +1.20646041838032 2.20670800057226 -0.857204278269845 +-1.59665966804813 2.12974640237861 -0.749686641314759 +-0.00507896505098647 1.00004934186706 0.0111568054264668 +1.08896774211599 -0.54893426659595 -0.625155433738454 +-2.08838518173774 1.31789005319132 0.882958674731457 +-2.67026050322772 0.777218712412565 0.624441286594906 +0.134410054836508 1.03453453869622 -0.290843938402197 +-1.15657597782331 0.21336107716996 0.566722675765672 +-1.12091291528735 -0.197701697854327 -0.507272467213624 +2.20360569490773 2.03438086720851 -0.0424844996967955 +-0.190560364964061 -1.95805452687492 0.999465390706348 +-1.69646814346356 -0.625828205037699 0.981438239882928 +-0.304352636706423 -2.83100688552149 -0.531082871783868 +0.14563390066437 -2.1691590077372 0.984738172749555 +-2.34365544864258 -0.143838445921824 0.937470314360672 +1.02710725533131 -0.411656637698526 0.449124549869131 +-1.61565579085794 -1.08738989914736 -0.998620980457037 +-0.717364891783532 1.50452842131031 0.942855738747352 +-0.578567558143299 0.853096156802936 0.246207825092783 +1.11793653693318 0.455945552854421 0.609663175028121 +2.39251443993049 0.176334486690512 0.916949275349228 +2.9155642712549 -0.62746034690765 0.18721929843073 +-1.14806683434513 2.59996359591672 0.539229179127317 +-0.177857300266215 1.90119125961188 0.995895769538786 +0.35914818712455 2.31920052987544 -0.937922693979931 +1.2743666522523 -0.690075814269372 0.834645084284416 +2.9302841310705 -0.642986734356231 0.000995094268308586 +-1.26975451435037 1.91229429559509 -0.955354478719998 +0.0738366615932967 1.2694977539948 -0.685198041292763 +-1.69716232712318 1.22850232281973 0.995464670669604 +0.84570997159476 2.13803629884409 -0.954183333932217 +0.352468648163491 -2.9344545563609 -0.294839028922669 +1.60389935177392 1.74998587824519 0.927507865916284 +-2.11217212504527 0.760269273860796 0.969564968832133 +1.74217621467257 2.29099479959881 0.47835741399359 +2.92346903414395 0.330804589449986 -0.33526021272958 +-2.35952494893432 0.250879618289894 -0.927901668132089 +-1.41032225684219 -0.170788763321419 0.815061677973598 +-1.01611744261575 -2.68326939535803 -0.494423710160221 +-1.59344928656495 2.26723050382079 -0.636622362537764 +-0.39580260135079 -2.90869384965014 0.35332697871175 +-1.2619469130673 -1.96838196535269 -0.941085144982828 +-1.30548566796483 1.57897857482689 0.998809944306817 +-1.35584805950535 -0.237100218600359 0.781761994033332 +2.52115787439036 1.0589742629827 0.678574394474946 +-1.95614957999606 1.84500176798258 0.724791088391566 +1.4979623349839 1.58854216289188 0.983033167483811 +-0.981146595813074 -0.312003286041912 0.241344264758116 +1.8959899672511 0.736814649744932 -0.999417494784511 +-0.757097460644456 -0.653318267664153 0.00461774302265985 +1.10316023266325 -2.54112393275506 -0.637744617153837 +-1.72022005250621 2.23190220939628 0.575364412789703 +2.94799384971966 0.358547670782175 -0.244227908472639 +-1.00994005619833 -0.0359408889344281 0.14507521649066 +1.00132300605616 0.624998791048906 0.572891974870137 +2.3085624824673 1.91312252759207 0.0591476364274699 +-1.6659611493105 -1.84465800051153 0.874183099883255 +1.04218128888347 -0.0688739697225331 0.294844086473301 +-0.601696551108497 -2.17864473742223 0.965553097225366 +1.08052048270212 2.3454565036691 0.812916093418994 +-1.79474335184654 -0.705336370592575 -0.997431127830519 +1.12310671921089 0.346525230427012 0.565643836449263 +0.696749595851333 -1.32918567757458 -0.86644723350324 +2.13602598032304 -0.51308315076563 -0.980446803768366 +-1.93196921945397 1.69157833829893 -0.823120136150302 +-2.01941018956472 0.0465497013536062 0.999801046182706 +-2.59423705179264 1.10327839008222 -0.573659802885399 +-0.585219166556388 1.2580168195987 -0.790451471385005 +-0.0576402469775295 1.41565353591293 0.812347628291534 +2.3891312946018 1.26300791722059 0.711751109733218 +2.30156990743563 0.405909870828655 0.941472642528813 +0.275589260456702 -1.00168998250037 -0.276232537275217 +-1.29248016136238 -0.876528620996052 0.898813208471374 +2.35322045727023 1.36303382650264 -0.694525299956941 +1.54876331953525 -1.90130806390003 -0.891879829401408 +0.16593187575783 1.69907888603775 0.956162102627631 +-1.52945389781439 1.36994725318236 0.998579274948504 +0.947255866427042 0.321393560626499 -0.0242347842318913 +-0.849266801325161 -1.69619250008471 -0.994673549743623 +1.17007223184327 -1.45748256057479 0.991388144859652 +-1.02065656396392 -0.234068996172272 -0.303449207922113 +-1.79030334572357 -0.338548456021292 0.984036300930214 +-0.202580788439636 -2.9098032393728 -0.399239754145888 +0.556542406671504 0.835949987061395 0.0922790328770012 +-0.192048894344178 2.82750618177242 0.551732977447003 +-2.4383233660105 1.32432593198794 -0.632262737666956 +1.00457987324054 2.2017998675896 0.907456835650233 +0.258230866817704 1.01719292156891 0.310599733745205 +-0.667635905646098 -0.747459126240117 -0.0665059738190087 +-0.925217466789865 -0.458919419892981 -0.253939651272238 +0.593031990252279 0.895690762363157 -0.37806299536721 +2.41891123803036 1.29269511281484 -0.669666978131638 +2.86198580293885 0.898401929715067 0.0252390801762425 +0.577555242801644 2.9381443296706 -0.105947290476281 +-1.08511464203043 -1.17367759499812 -0.915830310368355 +-0.00823512252879893 -2.34071693036362 0.940160678630731 +0.723355997258736 0.693575864721822 -0.0654384491946278 +-2.45564096089468 -0.108134097337387 0.888941553150783 +1.0554937129765 2.48105527060468 0.717810492633049 +0.0900114873059034 -1.35462061558498 0.766376101065206 +1.07215905411075 0.711684540080838 -0.701026091958739 +1.7642570870584 -1.10328014310579 -0.996728357140097 +-2.42237083502851 -1.22877182282345 0.69789165141999 +-2.29573991343012 1.39431631359088 -0.727611414530717 +-0.80608542331762 -0.591802110438813 -0.00185677113225586 +0.584373976640234 -1.2455966078976 -0.781316331338531 +-0.117129848865082 1.30318973566841 -0.722321820498621 +-0.427012578549237 -2.23637426000762 0.960934425748682 +1.84229944864514 -1.55651807400289 -0.911270828095356 +0.203997692272774 1.21919989241584 -0.645392194686482 +0.446215983259144 -1.74060177047245 -0.979155323351272 +-2.26029729565112 -1.9493984229134 -0.173628460146742 +-1.25890926878714 2.70615042882704 0.174571595362577 +1.56639306015901 0.363207544125242 -0.919944405133645 +-1.34865311186347 0.197323883232669 -0.77087378354129 +2.34234493513434 -1.5350786183092 0.599274108730333 +-0.128003282153838 2.84277775092986 0.533724957778819 +2.67959573383432 0.576255466056955 -0.671661470652464 +-1.22133840956565 1.75573360172354 -0.9903269014343 +-2.65079844065683 0.801278641234516 -0.638940464730991 +1.77058917671795 -1.05700903893028 -0.998069966649505 +-0.0617876530073753 2.73595305745592 -0.676273466837526 +-1.02613271221592 -0.32948466837676 0.386553542254037 +0.289216189984019 2.86987925980331 0.46670029349679 +0.0221376688554938 -1.6367078071517 0.931733618120448 +0.9880285207657 1.3702732104138 -0.95051910813202 +-2.35149763797368 -1.28585604226228 -0.733114009304653 +-0.984949657118942 -2.15300005263466 -0.929983557964422 +0.815196720196319 0.609995430318387 -0.189686296979061 +-1.36318181784209 0.682494702259896 -0.879709239261867 +-0.562439533917299 2.60180320419631 -0.749591065583496 diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 2f94ed15..ee6ba541 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -53,8 +53,14 @@ if(GMPXX_FOUND AND GMP_FOUND) if(CGAL_FOUND) add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp) target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + + add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp) + target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off 2 0) + if (TBB_FOUND) target_link_libraries(alpha_complex_3d_persistence ${TBB_RELEASE_LIBRARY}) + target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_RELEASE_LIBRARY}) endif() add_test(alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/bunny_5000 2 0.5) @@ -65,7 +71,7 @@ if(GMPXX_FOUND AND GMP_FOUND) if (EIGEN3_FOUND) message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") include( ${EIGEN3_USE_FILE} ) - + add_executable (alpha_complex_persistence alpha_complex_persistence.cpp) target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) if (TBB_FOUND) diff --git a/src/Persistent_cohomology/example/README b/src/Persistent_cohomology/example/README index 8c71ccf5..92b80c76 100644 --- a/src/Persistent_cohomology/example/README +++ b/src/Persistent_cohomology/example/README @@ -4,13 +4,13 @@ cd /path-to-example/ cmake . make - -Example of use : +*********************************************************************************************************************** +Example of use of RIPS: Computation of the persistent homology with Z/2Z coefficients of the Rips complex on points sampling a Klein bottle: -./rips_persistence ../../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -m 100 +./rips_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -m 100 output: 210 0 0 inf @@ -29,7 +29,7 @@ where with Z/3Z coefficients: -./rips_persistence ../../../data/points/Kl.txt -r 0.25 -d 3 -p 3 -m 100 +./rips_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 3 -m 100 output: 3 0 0 inf @@ -37,7 +37,7 @@ output: and the computation with Z/2Z and Z/3Z coefficients simultaneously: -./rips_multifield_persistence ../../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 3 -m 100 +./rips_multifield_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 3 -m 100 output: 6 0 0 inf @@ -47,10 +47,96 @@ output: and finally the computation with all Z/pZ for 2 <= p <= 71 (20 first prime numbers): - ./rips_multifield_persistence ../../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 71 -m 100 + ./rips_multifield_persistence ../../data/points/Kl.txt -r 0.25 -d 3 -p 2 -q 71 -m 100 output: 557940830126698960967415390 0 0 inf 557940830126698960967415390 1 0.0702103 inf 2 1 0.0702103 inf 2 2 0.159992 inf + +*********************************************************************************************************************** +Example of use of ALPHA: + +For a more verbose mode, please run cmake with option "DEBUG_TRACES=TRUE" and recompile the programs. + +1) 3D special case +------------------ +Computation of the persistent homology with Z/2Z coefficients of the alpha complex on points +sampling a torus 3D: + +./alpha_complex_3d_persistence ../../data/points/tore3D_300.off 2 0.45 + +output: +Simplex_tree dim: 3 +2 0 0 inf +2 1 0.0682162 1.0001 +2 1 0.0934117 1.00003 +2 2 0.56444 1.03938 + +Here we retrieve expected Betti numbers on a tore 3D: +Betti numbers[0] = 1 +Betti numbers[1] = 2 +Betti numbers[2] = 1 + +N.B.: alpha_complex_3d_persistence accepts only OFF files in 3D dimension. + +2) d-Dimension case +------------------- +Computation of the persistent homology with Z/2Z coefficients of the alpha complex on points +sampling a torus 3D: + +./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off + +output: +Alpha complex is of dimension 3 - 9273 simplices - 300 vertices. +Simplex_tree dim: 3 +2 0 0 inf +2 1 0.0682162 1.0001 +2 1 0.0934117 1.00003 +2 2 0.56444 1.03938 + +Here we retrieve expected Betti numbers on a tore 3D: +Betti numbers[0] = 1 +Betti numbers[1] = 2 +Betti numbers[2] = 1 + +N.B.: alpha_complex_persistence accepts OFF files in d-Dimension. + +3) 3D periodic special case +--------------------------- +./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off 2 0.0 + +output: +Periodic Delaunay computed. +Simplex_tree dim: 3 +2 0 0.0866025 inf +2 1 0.0866025 inf +2 1 0.0866025 inf +2 1 0.0866025 inf +2 2 0.0866025 inf +2 2 0.0866025 inf +2 2 0.0866025 inf + +N.B.: periodic_alpha_complex_3d_persistence accepts only OFF files in 3D dimension. In this example, the periodic cube +is hard coded to { x = [0,1]; y = [0,1]; z = [0,1] } + +*********************************************************************************************************************** +Example of use of PLAIN HOMOLOGY: + +This example computes the plain homology of the following simplicial complex without filtration values: + /* Complex to build. */ + /* 1 3 */ + /* o---o */ + /* /X\ / */ + /* o---o o */ + /* 2 0 4 */ + +./plain_homology + +output: +2 0 0 inf +2 0 0 inf +2 1 0 inf + +Here we retrieve the 2 entities {0,1,2,3} and {4} (Betti numbers[0] = 2) and the hole in {0,1,3} (Betti numbers[1] = 1) \ No newline at end of file diff --git a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp index 01497c7c..48fbb91a 100644 --- a/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp +++ b/src/Persistent_cohomology/example/alpha_complex_3d_persistence.cpp @@ -20,9 +20,9 @@ * along with this program. If not, see . */ -#include #include #include +#include #include #include @@ -139,20 +139,18 @@ int main(int argc, char * const argv[]) { } // Read points from file - std::string filegraph = argv[1]; - std::list lp; - std::ifstream is(filegraph.c_str()); - int n; - is >> n; -#ifdef DEBUG_TRACES - std::cout << "Reading " << n << " points " << std::endl; -#endif // DEBUG_TRACES - Point_3 p; - for (; n > 0; n--) { - is >> p; - lp.push_back(p); + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_3D_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + usage(argv[0]); } + // Retrieve the triangulation + std::vector lp = off_reader.get_point_cloud(); + // alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode. Alpha_shape_3 as(lp.begin(), lp.end(), 0, Alpha_shape_3::GENERAL); #ifdef DEBUG_TRACES diff --git a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp index 8f9f077c..17fb84d2 100644 --- a/src/Persistent_cohomology/example/alpha_complex_persistence.cpp +++ b/src/Persistent_cohomology/example/alpha_complex_persistence.cpp @@ -29,7 +29,7 @@ int main(int argc, char **argv) { // ---------------------------------------------------------------------------- // Init of an alpha complex from an OFF file // ---------------------------------------------------------------------------- - typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; + using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; Gudhi::alphacomplex::Alpha_complex alpha_complex_from_file(off_file_points, alpha_square_max_value); // ---------------------------------------------------------------------------- diff --git a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp new file mode 100644 index 00000000..e9425066 --- /dev/null +++ b/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp @@ -0,0 +1,303 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +// Traits +using K = CGAL::Exact_predicates_inexact_constructions_kernel; +using PK = CGAL::Periodic_3_Delaunay_triangulation_traits_3; +// Vertex type +using DsVb = CGAL::Periodic_3_triangulation_ds_vertex_base_3<>; +using Vb = CGAL::Triangulation_vertex_base_3; +using AsVb = CGAL::Alpha_shape_vertex_base_3; +// Cell type +using DsCb = CGAL::Periodic_3_triangulation_ds_cell_base_3<>; +using Cb = CGAL::Triangulation_cell_base_3; +using AsCb = CGAL::Alpha_shape_cell_base_3; +using Tds = CGAL::Triangulation_data_structure_3; +using P3DT3 = CGAL::Periodic_3_Delaunay_triangulation_3; +using Alpha_shape_3 = CGAL::Alpha_shape_3; +using Point_3 = PK::Point_3; + +// filtration with alpha values needed type definition +using Alpha_value_type = Alpha_shape_3::FT; +using Object = CGAL::Object; +using Dispatch = CGAL::Dispatch_output_iterator< + CGAL::cpp11::tuple, + CGAL::cpp11::tuple >, + std::back_insert_iterator< std::vector > > >; +using Cell_handle = Alpha_shape_3::Cell_handle; +using Facet = Alpha_shape_3::Facet; +using Edge_3 = Alpha_shape_3::Edge; +using Vertex_list = std::list; + +// gudhi type definition +using ST = Gudhi::Simplex_tree; +using Simplex_tree_vertex = ST::Vertex_handle; +using Alpha_shape_simplex_tree_map = std::map; +using Alpha_shape_simplex_tree_pair = std::pair; +using Simplex_tree_vector_vertex = std::vector< Simplex_tree_vertex >; +using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology< + ST, Gudhi::persistent_cohomology::Field_Zp >; + +Vertex_list from(const Cell_handle& ch) { + Vertex_list the_list; + for (auto i = 0; i < 4; i++) { +#ifdef DEBUG_TRACES + std::cout << "from cell[" << i << "]=" << ch->vertex(i)->point() << std::endl; +#endif // DEBUG_TRACES + the_list.push_back(ch->vertex(i)); + } + return the_list; +} + +Vertex_list from(const Facet& fct) { + Vertex_list the_list; + for (auto i = 0; i < 4; i++) { + if (fct.second != i) { +#ifdef DEBUG_TRACES + std::cout << "from facet=[" << i << "]" << fct.first->vertex(i)->point() << std::endl; +#endif // DEBUG_TRACES + the_list.push_back(fct.first->vertex(i)); + } + } + return the_list; +} + +Vertex_list from(const Edge_3& edg) { + Vertex_list the_list; + for (auto i = 0; i < 4; i++) { + if ((edg.second == i) || (edg.third == i)) { +#ifdef DEBUG_TRACES + std::cout << "from edge[" << i << "]=" << edg.first->vertex(i)->point() << std::endl; +#endif // DEBUG_TRACES + the_list.push_back(edg.first->vertex(i)); + } + } + return the_list; +} + +Vertex_list from(const Alpha_shape_3::Vertex_handle& vh) { + Vertex_list the_list; +#ifdef DEBUG_TRACES + std::cout << "from vertex=" << vh->point() << std::endl; +#endif // DEBUG_TRACES + the_list.push_back(vh); + return the_list; +} + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << + " path_to_file_graph coeff_field_characteristic[integer > 0] min_persistence[float >= -1.0]\n"; + exit(-1); +} + +int main(int argc, char * const argv[]) { + // program args management + if (argc != 4) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct\n"; + usage(argv[0]); + } + + int coeff_field_characteristic = 0; + int returnedScanValue = sscanf(argv[2], "%d", &coeff_field_characteristic); + if ((returnedScanValue == EOF) || (coeff_field_characteristic <= 0)) { + std::cerr << "Error: " << argv[2] << " is not correct\n"; + usage(argv[0]); + } + + Filtration_value min_persistence = 0.0; + returnedScanValue = sscanf(argv[3], "%lf", &min_persistence); + if ((returnedScanValue == EOF) || (min_persistence < -1.0)) { + std::cerr << "Error: " << argv[3] << " is not correct\n"; + usage(argv[0]); + } + + // Read points from file + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_3D_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + usage(argv[0]); + } + + // Retrieve the triangulation + std::vector lp = off_reader.get_point_cloud(); + + // Define the periodic cube + P3DT3 pdt(PK::Iso_cuboid_3(0,0,0,1,1,1)); + // Heuristic for inserting large point sets (if pts is reasonably large) + pdt.insert(lp.begin(), lp.end(), true); + // As pdt won't be modified anymore switch to 1-sheeted cover if possible + if (pdt.is_triangulation_in_1_sheet()) pdt.convert_to_1_sheeted_covering(); + std::cout << "Periodic Delaunay computed." << std::endl; + + // alpha shape construction from points. CGAL has a strange behavior in REGULARIZED mode. This is the default mode + // Maybe need to set it to GENERAL mode + Alpha_shape_3 as(pdt, Alpha_shape_3::GENERAL); + + // filtration with alpha values from alpha shape + std::vector the_objects; + std::vector the_alpha_values; + + Dispatch disp = CGAL::dispatch_output(std::back_inserter(the_objects), + std::back_inserter(the_alpha_values)); + + as.filtration_with_alpha_values(disp); +#ifdef DEBUG_TRACES + std::cout << "filtration_with_alpha_values returns : " << the_objects.size() << " objects" << std::endl; +#endif // DEBUG_TRACES + + Alpha_shape_3::size_type count_vertices = 0; + Alpha_shape_3::size_type count_edges = 0; + Alpha_shape_3::size_type count_facets = 0; + Alpha_shape_3::size_type count_cells = 0; + + // Loop on objects vector + Vertex_list vertex_list; + ST simplex_tree; + Alpha_shape_simplex_tree_map map_cgal_simplex_tree; + std::vector::iterator the_alpha_value_iterator = the_alpha_values.begin(); + int dim_max = 0; + Filtration_value filtration_max = 0.0; + for (auto object_iterator : the_objects) { + // Retrieve Alpha shape vertex list from object + if (const Cell_handle * cell = CGAL::object_cast(&object_iterator)) { + vertex_list = from(*cell); + count_cells++; + if (dim_max < 3) { + // Cell is of dim 3 + dim_max = 3; + } + } else if (const Facet * facet = CGAL::object_cast(&object_iterator)) { + vertex_list = from(*facet); + count_facets++; + if (dim_max < 2) { + // Facet is of dim 2 + dim_max = 2; + } + } else if (const Edge_3 * edge = CGAL::object_cast(&object_iterator)) { + vertex_list = from(*edge); + count_edges++; + if (dim_max < 1) { + // Edge_3 is of dim 1 + dim_max = 1; + } + } else if (const Alpha_shape_3::Vertex_handle * vertex = + CGAL::object_cast(&object_iterator)) { + count_vertices++; + vertex_list = from(*vertex); + } + // Construction of the vector of simplex_tree vertex from list of alpha_shapes vertex + Simplex_tree_vector_vertex the_simplex_tree; + for (auto the_alpha_shape_vertex : vertex_list) { + Alpha_shape_simplex_tree_map::iterator the_map_iterator = map_cgal_simplex_tree.find(the_alpha_shape_vertex); + if (the_map_iterator == map_cgal_simplex_tree.end()) { + // alpha shape not found + Simplex_tree_vertex vertex = map_cgal_simplex_tree.size(); +#ifdef DEBUG_TRACES + std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] not found - insert " << vertex << std::endl; +#endif // DEBUG_TRACES + the_simplex_tree.push_back(vertex); + map_cgal_simplex_tree.insert(Alpha_shape_simplex_tree_pair(the_alpha_shape_vertex, vertex)); + } else { + // alpha shape found + Simplex_tree_vertex vertex = the_map_iterator->second; +#ifdef DEBUG_TRACES + std::cout << "vertex [" << the_alpha_shape_vertex->point() << "] found in " << vertex << std::endl; +#endif // DEBUG_TRACES + the_simplex_tree.push_back(vertex); + } + } + // Construction of the simplex_tree + Filtration_value filtr = std::sqrt(*the_alpha_value_iterator); +#ifdef DEBUG_TRACES + std::cout << "filtration = " << filtr << std::endl; +#endif // DEBUG_TRACES + if (filtr > filtration_max) { + filtration_max = filtr; + } + simplex_tree.insert_simplex(the_simplex_tree, filtr); + if (the_alpha_value_iterator != the_alpha_values.end()) + ++the_alpha_value_iterator; + else + std::cout << "This shall not happen" << std::endl; + } + simplex_tree.set_filtration(filtration_max); + simplex_tree.set_dimension(dim_max); + +#ifdef DEBUG_TRACES + std::cout << "vertices \t\t" << count_vertices << std::endl; + std::cout << "edges \t\t" << count_edges << std::endl; + std::cout << "facets \t\t" << count_facets << std::endl; + std::cout << "cells \t\t" << count_cells << std::endl; + + + std::cout << "Information of the Simplex Tree: " << std::endl; + std::cout << " Number of vertices = " << simplex_tree.num_vertices() << " "; + std::cout << " Number of simplices = " << simplex_tree.num_simplices() << std::endl << std::endl; + std::cout << " Dimension = " << simplex_tree.dimension() << " "; + std::cout << " filtration = " << simplex_tree.filtration() << std::endl << std::endl; +#endif // DEBUG_TRACES + +#ifdef DEBUG_TRACES + std::cout << "Iterator on vertices: " << std::endl; + for (auto vertex : simplex_tree.complex_vertex_range()) { + std::cout << vertex << " "; + } +#endif // DEBUG_TRACES + + // Sort the simplices in the order of the filtration + simplex_tree.initialize_filtration(); + + std::cout << "Simplex_tree dim: " << simplex_tree.dimension() << std::endl; + // Compute the persistence diagram of the complex + Persistent_cohomology pcoh(simplex_tree); + // initializes the coefficient field for homology + pcoh.init_coefficients(coeff_field_characteristic); + + pcoh.compute_persistent_cohomology(min_persistence); + + pcoh.output_diagram(); + + return 0; +} diff --git a/src/common/example/CGAL_3D_points_off_reader.cpp b/src/common/example/CGAL_3D_points_off_reader.cpp new file mode 100644 index 00000000..d48bb17d --- /dev/null +++ b/src/common/example/CGAL_3D_points_off_reader.cpp @@ -0,0 +1,41 @@ +#include + +#include + +#include +#include +#include + +using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel; +using Point_3 = Kernel::Point_3; + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; + exit(-1); +} + +int main(int argc, char **argv) { + if (argc != 2) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_3D_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + usage(argv[0]); + } + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + + int n {0}; + for (auto point : point_cloud) { + ++n; + std::cout << "Point[" << n << "] = (" << point[0] << ", " << point[1] << ", " << point[2] << ")\n"; + } + return 0; +} diff --git a/src/common/example/CGAL_points_off_reader.cpp b/src/common/example/CGAL_points_off_reader.cpp index 45e9f1e6..997b47c1 100644 --- a/src/common/example/CGAL_points_off_reader.cpp +++ b/src/common/example/CGAL_points_off_reader.cpp @@ -8,17 +8,19 @@ #include #include -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; -typedef typename Kernel::Point_d Point_d; +using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; +using Point_d = typename Kernel::Point_d; -void usage(int argc, char * const progName) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; +void usage(char * const progName) { std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; exit(-1); } int main(int argc, char **argv) { - if (argc != 2) usage(argc, (argv[0] - 1)); + if (argc != 2) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } std::string offInputFile(argv[1]); // Read the OFF file (input file name given as parameter) and triangulate points @@ -26,16 +28,16 @@ int main(int argc, char **argv) { // Check the read operation was correct if (!off_reader.is_valid()) { std::cerr << "Unable to read file " << offInputFile << std::endl; - exit(-1); + usage(argv[0]); } // Retrieve the triangulation std::vector point_cloud = off_reader.get_point_cloud(); - int n = 0; + int n {0}; for (auto point : point_cloud) { std::cout << "Point[" << n << "] = "; - for (int i = 0; i < point.dimension(); i++) + for (int i {0}; i < point.dimension(); i++) std::cout << point[i] << " "; std::cout << "\n"; ++n; diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 5aeaa8c6..ee6c9058 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -3,6 +3,10 @@ project(GUDHIDelaunayTriangulationOffFileReadWrite) # need CGAL 4.7 if(CGAL_FOUND) + add_executable ( cgal3Doffreader CGAL_3D_points_off_reader.cpp ) + target_link_libraries(cgal3Doffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_test(cgal3Doffreader ${CMAKE_CURRENT_BINARY_DIR}/cgaloffreader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) diff --git a/src/common/include/gudhi/Points_3D_off_io.h b/src/common/include/gudhi/Points_3D_off_io.h new file mode 100644 index 00000000..02e6f910 --- /dev/null +++ b/src/common/include/gudhi/Points_3D_off_io.h @@ -0,0 +1,206 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#ifndef POINTS_3D_OFF_IO_H_ +#define POINTS_3D_OFF_IO_H_ + +#include + +#include +#include +#include +#include + +namespace Gudhi { + +/** + * @brief OFF file visitor implementation according to Off_reader in order to read points from an OFF file. + */ +template +class Points_3D_off_visitor_reader { + private: + std::vector point_cloud_; + bool valid_; + + public: + + /** @brief Off_reader visitor init implementation. + * + * The init parameters are set from OFF file header. + * Dimension value is required and the value must be 3. + * + * @param[in] dim space dimension of vertices. + * @param[in] num_vertices number of vertices in the OFF file (not used). + * @param[in] num_faces number of faces in the OFF file (not used). + * @param[in] num_edges number of edges in the OFF file (not used). + */ + void init(int dim, int num_vertices, int num_faces, int num_edges) { +#ifdef DEBUG_TRACES + std::cout << "Points_3D_off_visitor_reader::init - dim=" << dim << " - num_vertices=" << + num_vertices << " - num_faces=" << num_faces << " - num_edges=" << num_edges << std::endl; +#endif // DEBUG_TRACES + if (dim == 3) { + valid_ = true; + } else { + valid_ = false; + std::cerr << "Points_3D_off_reader::Points_3D_off_reader cannot read OFF files in dimension " << dim << "\n"; + } + + if (num_faces > 0) { + std::cerr << "Points_3D_off_visitor_reader::init faces are not taken into account from OFF file for Points.\n"; + } + if (num_edges > 0) { + std::cerr << "Points_3D_off_visitor_reader::init edges are not taken into account from OFF file for Points.\n"; + } + } + + /** @brief Off_reader visitor point implementation. + * + * The point function is called on each vertex line from OFF file. + * This function inserts the vertex in the vector of points. + * + * @param[in] point vector of vertex coordinates. + * + * @details + * Point_3 must have a constructor with the following form: + * + * @code template Point_3::Point_3(double x, double y, double z) @endcode + */ + void point(const std::vector& point) { + if (valid_) { +#ifdef DEBUG_TRACES + std::cout << "Points_3D_off_visitor_reader::point "; + for (auto coordinate : point) { + std::cout << coordinate << " | "; + } + std::cout << std::endl; +#endif // DEBUG_TRACES + // Fill the point cloud + point_cloud_.push_back(Point_3(point[0], point[1], point[2])); + } + } + + // Off_reader visitor maximal_face implementation - Only points are read + + void maximal_face(const std::vector& face) { } + + // Off_reader visitor done implementation - Only points are read + + void done() { } + + /** @brief Point cloud getter. + * + * @return The point cloud. + */ + const std::vector& get_point_cloud() const { + return point_cloud_; + } + + /** @brief Returns if the OFF file read operation was successful or not. + * + * @return OFF file read status. + */ + bool is_valid() const { + return valid_; + } +}; + +/** + * \@brief OFF file reader implementation in order to read dimension 3 points from an OFF file. + * + * @details + * This class is using the Points_3D_off_visitor_reader to visit the OFF file according to Off_reader. + * + * Point_3 must have a constructor with the following form: + * + * @code template Point_3::Point_3(double x, double y, double z) @endcode + * + * @section Example + * + * This example loads points from an OFF file and builds a vector of CGAL points in dimension 3. + * Then, it is asked to display the points. + * + * Asserts + * + * @include common/CGAL_Points_3D_off_reader.cpp + * + * When launching: + * + * @code $> ./cgal3Doffreader ../../data/points/alphacomplexdoc.off + * @endcode + * + * the program output is: + * + * @include common/cgal3Doffreader_result.txt + */ +template +class Points_3D_off_reader { + public: + + /** @brief Reads the OFF file and constructs a vector of points from the points + * that are in the OFF file. + * + * @param[in] name_file OFF file to read. + * + * @post Check with is_valid() function to see if read operation was successful. + */ + Points_3D_off_reader(const std::string& name_file) + : valid_(false) { + std::ifstream stream(name_file); + if (stream.is_open()) { + Off_reader off_reader(stream); + Points_3D_off_visitor_reader off_visitor; + valid_ = off_reader.read(off_visitor); + valid_ = valid_ && off_visitor.is_valid(); + if (valid_) { + point_cloud = off_visitor.get_point_cloud(); + } + } else { + std::cerr << "Points_3D_off_reader::Points_3D_off_reader could not open file " << name_file << "\n"; + } + } + + /** @brief Returns if the OFF file read operation was successful or not. + * + * @return OFF file read status. + */ + bool is_valid() const { + return valid_; + } + + /** @brief Point cloud getter. + * + * @return point_cloud. + */ + const std::vector& get_point_cloud() const { + return point_cloud; + } + + private: + /** @brief point_cloud.*/ + std::vector point_cloud; + /** @brief OFF file read status.*/ + bool valid_; +}; + +} // namespace Gudhi + +#endif // POINTS_3D_OFF_IO_H_ diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index 77f36be2..74b49386 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -43,7 +43,7 @@ class Points_off_visitor_reader { /** \brief Off_reader visitor init implementation. * * The init parameters are set from OFF file header. - * Dimension value is required in order to construct Alpha complex. + * Dimension value is required in order to construct a vector of points. * * @param[in] dim space dimension of vertices. * @param[in] num_vertices number of vertices in the OFF file (not used). @@ -63,12 +63,19 @@ class Points_off_visitor_reader { } } - /** \brief Off_reader visitor point implementation. + /** @brief Off_reader visitor point implementation. * * The point function is called on each vertex line from OFF file. - * This function inserts the vertex in the Alpha complex. + * This function inserts the vertex in the vector of points. * * @param[in] point vector of vertex coordinates. + * + * @details + * Point_d must have a constructor with the following form: + * + * @code template Point_d::Point_d(int d, InputIterator first, InputIterator last) @endcode + * + * where d is the point dimension. */ void point(const std::vector& point) { #ifdef DEBUG_TRACES @@ -127,7 +134,7 @@ class Points_off_visitor_reader { template class Points_off_reader { public: - /** \brief Reads the OFF file and constructs the Alpha complex from the points + /** \brief Reads the OFF file and constructs a vector of points from the points * that are in the OFF file. * * @param[in] name_file OFF file to read. -- cgit v1.2.3 From 7e71749f8aa73236a244394eedd5f662ec3a1889 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 14 Apr 2016 14:30:17 +0000 Subject: fix example test run git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/periodic_alpha_complex_3d@1118 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ee8dbd1e8e0dcc902076a80c7c42dc0327adb95e --- src/common/example/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/common') diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index ee6c9058..83f874e1 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -5,7 +5,7 @@ project(GUDHIDelaunayTriangulationOffFileReadWrite) if(CGAL_FOUND) add_executable ( cgal3Doffreader CGAL_3D_points_off_reader.cpp ) target_link_libraries(cgal3Doffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_test(cgal3Doffreader ${CMAKE_CURRENT_BINARY_DIR}/cgaloffreader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) + add_test(cgal3Doffreader ${CMAKE_CURRENT_BINARY_DIR}/cgal3Doffreader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) if (NOT CGAL_VERSION VERSION_LESS 4.7.0) find_package(Eigen3 3.1.0) -- cgit v1.2.3 From 3e46bcd4ec49e35937108bc44a3c0434a9ca174a Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 15 Apr 2016 12:12:45 +0000 Subject: Doc bug fix git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/periodic_alpha_complex_3d@1121 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 48bb0bb605dfdb9c20bcf5d8de049aef6caf15a5 --- .../example/periodic_alpha_complex_3d_persistence.cpp | 16 ++++++++-------- src/common/doc/main_page.h | 14 ++++++++++---- src/common/include/gudhi/Points_3D_off_io.h | 8 +++----- 3 files changed, 21 insertions(+), 17 deletions(-) (limited to 'src/common') diff --git a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp b/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp index e9425066..42e6f0c6 100644 --- a/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp +++ b/src/Persistent_cohomology/example/periodic_alpha_complex_3d_persistence.cpp @@ -45,14 +45,14 @@ using K = CGAL::Exact_predicates_inexact_constructions_kernel; using PK = CGAL::Periodic_3_Delaunay_triangulation_traits_3; // Vertex type using DsVb = CGAL::Periodic_3_triangulation_ds_vertex_base_3<>; -using Vb = CGAL::Triangulation_vertex_base_3; -using AsVb = CGAL::Alpha_shape_vertex_base_3; +using Vb = CGAL::Triangulation_vertex_base_3; +using AsVb = CGAL::Alpha_shape_vertex_base_3; // Cell type using DsCb = CGAL::Periodic_3_triangulation_ds_cell_base_3<>; -using Cb = CGAL::Triangulation_cell_base_3; -using AsCb = CGAL::Alpha_shape_cell_base_3; -using Tds = CGAL::Triangulation_data_structure_3; -using P3DT3 = CGAL::Periodic_3_Delaunay_triangulation_3; +using Cb = CGAL::Triangulation_cell_base_3; +using AsCb = CGAL::Alpha_shape_cell_base_3; +using Tds = CGAL::Triangulation_data_structure_3; +using P3DT3 = CGAL::Periodic_3_Delaunay_triangulation_3; using Alpha_shape_3 = CGAL::Alpha_shape_3; using Point_3 = PK::Point_3; @@ -162,9 +162,9 @@ int main(int argc, char * const argv[]) { // Retrieve the triangulation std::vector lp = off_reader.get_point_cloud(); - + // Define the periodic cube - P3DT3 pdt(PK::Iso_cuboid_3(0,0,0,1,1,1)); + P3DT3 pdt(PK::Iso_cuboid_3(0, 0, 0, 1, 1, 1)); // Heuristic for inserting large point sets (if pts is reasonably large) pdt.insert(lp.begin(), lp.end(), true); // As pdt won't be modified anymore switch to 1-sheeted cover if possible diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 063e0a11..ecf3251f 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -189,8 +189,6 @@ * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: * \li * Persistent_cohomology/alpha_complex_3d_persistence.cpp - * \li - * Persistent_cohomology/alpha_complex_persistence.cpp * \li * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp * \li @@ -207,8 +205,10 @@ * Alpha_complex/Alpha_complex_from_off.cpp * \li * Alpha_complex/Alpha_complex_from_points.cpp - * \li - * common/CGAL_points_off_reader.cpp + * \li + * Persistent_cohomology/alpha_complex_persistence.cpp + * \li + * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp * * \subsection eigen3 Eigen3: * Eigen3 is a C++ template library for linear algebra: @@ -222,6 +222,8 @@ * Alpha_complex/Alpha_complex_from_points.cpp (requires also Eigen3) * \li * Persistent_cohomology/alpha_complex_persistence.cpp + * \li + * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp * * \subsection tbb Threading Building Blocks: * Intel® TBB lets you easily write parallel @@ -267,6 +269,8 @@ * Persistent_cohomology/rips_multifield_persistence.cpp * \li * Persistent_cohomology/rips_persistence.cpp + * \li + * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp * * \subsection demos Demos and examples * To build the demos and examples, run the following commands in a terminal: @@ -307,6 +311,7 @@ make \endverbatim * @example Bitmap_cubical_complex/Bitmap_cubical_complex.cpp * @example Bitmap_cubical_complex/Bitmap_cubical_complex_periodic_boundary_conditions.cpp * @example Bitmap_cubical_complex/Random_bitmap_cubical_complex.cpp + * @example common/CGAL_3D_points_off_reader.cpp * @example common/CGAL_points_off_reader.cpp * @example Contraction/Garland_heckbert.cpp * @example Contraction/Rips_contraction.cpp @@ -314,6 +319,7 @@ make \endverbatim * @example Persistent_cohomology/alpha_complex_persistence.cpp * @example Persistent_cohomology/parallel_rips_persistence.cpp * @example Persistent_cohomology/performance_rips_persistence.cpp + * @example Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp * @example Persistent_cohomology/persistence_from_file.cpp * @example Persistent_cohomology/persistence_from_simple_simplex_tree.cpp * @example Persistent_cohomology/plain_homology.cpp diff --git a/src/common/include/gudhi/Points_3D_off_io.h b/src/common/include/gudhi/Points_3D_off_io.h index 02e6f910..45c8455e 100644 --- a/src/common/include/gudhi/Points_3D_off_io.h +++ b/src/common/include/gudhi/Points_3D_off_io.h @@ -41,7 +41,6 @@ class Points_3D_off_visitor_reader { bool valid_; public: - /** @brief Off_reader visitor init implementation. * * The init parameters are set from OFF file header. @@ -140,11 +139,11 @@ class Points_3D_off_visitor_reader { * * Asserts * - * @include common/CGAL_Points_3D_off_reader.cpp + * @include common/CGAL_3D_points_off_reader.cpp * * When launching: * - * @code $> ./cgal3Doffreader ../../data/points/alphacomplexdoc.off + * @code $> ./cgal3Doffreader ../../data/points/tore3D_300.off * @endcode * * the program output is: @@ -154,7 +153,6 @@ class Points_3D_off_visitor_reader { template class Points_3D_off_reader { public: - /** @brief Reads the OFF file and constructs a vector of points from the points * that are in the OFF file. * @@ -201,6 +199,6 @@ class Points_3D_off_reader { bool valid_; }; -} // namespace Gudhi +} // namespace Gudhi #endif // POINTS_3D_OFF_IO_H_ -- cgit v1.2.3 From 8c9f21dee8b91fcb0b8073fe3e5c2fcbe07206e5 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 15 Apr 2016 12:28:35 +0000 Subject: example result forgotten git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/periodic_alpha_complex_3d@1123 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 0678c13104b203e2a329c44aa6db04e9283f4580 --- src/common/example/cgal3Doffreader_result.txt | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 src/common/example/cgal3Doffreader_result.txt (limited to 'src/common') diff --git a/src/common/example/cgal3Doffreader_result.txt b/src/common/example/cgal3Doffreader_result.txt new file mode 100644 index 00000000..f992c8e3 --- /dev/null +++ b/src/common/example/cgal3Doffreader_result.txt @@ -0,0 +1,8 @@ +Point[1] = (0.959535, -0.418347, 0.302237) +Point[2] = (2.16795, 1.85348, -0.52312) +Point[3] = (-2.38753, -1.50911, -0.565889) +Point[4] = (-2.70428, -1.25688, 0.188394) +Point[5] = (-1.22932, -1.64337, -0.998632) +... +Point[300] = (-0.56244, 2.6018, -0.749591) + -- cgit v1.2.3 From b636fbff81f232788e1d25515998bc44381589bf Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 15 Apr 2016 14:56:06 +0000 Subject: rename parallel_rips_persistence in rips_persistence_via_boundary_matrix git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1129 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5c8b71aeb701a181381bf8d628ed65f1d7e214df --- src/Persistent_cohomology/example/CMakeLists.txt | 8 +- .../example/parallel_rips_persistence.cpp | 180 --------------------- .../rips_persistence_via_boundary_matrix.cpp | 180 +++++++++++++++++++++ src/common/doc/main_page.h | 6 +- 4 files changed, 187 insertions(+), 187 deletions(-) delete mode 100644 src/Persistent_cohomology/example/parallel_rips_persistence.cpp create mode 100644 src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp (limited to 'src/common') diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index ba4c518b..1f32da17 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -14,8 +14,8 @@ target_link_libraries(persistence_from_simple_simplex_tree ${Boost_SYSTEM_LIBRAR add_executable(rips_persistence rips_persistence.cpp) target_link_libraries(rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) -add_executable(parallel_rips_persistence parallel_rips_persistence.cpp) -target_link_libraries(parallel_rips_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) +add_executable(rips_persistence_via_boundary_matrix rips_persistence_via_boundary_matrix.cpp) +target_link_libraries(rips_persistence_via_boundary_matrix ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) add_executable(persistence_from_file persistence_from_file.cpp) target_link_libraries(persistence_from_file ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) @@ -24,14 +24,14 @@ if (TBB_FOUND) target_link_libraries(plain_homology ${TBB_RELEASE_LIBRARY}) target_link_libraries(persistence_from_simple_simplex_tree ${TBB_RELEASE_LIBRARY}) target_link_libraries(rips_persistence ${TBB_RELEASE_LIBRARY}) - target_link_libraries(parallel_rips_persistence ${TBB_RELEASE_LIBRARY}) + target_link_libraries(rips_persistence_via_boundary_matrix ${TBB_RELEASE_LIBRARY}) target_link_libraries(persistence_from_file ${TBB_RELEASE_LIBRARY}) endif() add_test(plain_homology ${CMAKE_CURRENT_BINARY_DIR}/plain_homology) add_test(persistence_from_simple_simplex_tree ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_simple_simplex_tree 1 0) add_test(rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 3 -m 100) -add_test(parallel_rips_persistence_3 ${CMAKE_CURRENT_BINARY_DIR}/parallel_rips_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.txt -r 0.3 -d 3 -p 3 -m 100) +add_test(rips_persistence_via_boundary_matrix_3 ${CMAKE_CURRENT_BINARY_DIR}/rips_persistence_via_boundary_matrix ${CMAKE_SOURCE_DIR}/data/points/tore3D_1307.txt -r 0.3 -d 3 -p 3 -m 100) add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 2 -m 0) add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) diff --git a/src/Persistent_cohomology/example/parallel_rips_persistence.cpp b/src/Persistent_cohomology/example/parallel_rips_persistence.cpp deleted file mode 100644 index 4c6656f5..00000000 --- a/src/Persistent_cohomology/example/parallel_rips_persistence.cpp +++ /dev/null @@ -1,180 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Clément Maria, Marc Glisse - * - * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France), - * 2015 INRIA Saclay ÃŽle de France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include -#include -#include -#include -#include -#include - -#include - -#ifdef GUDHI_USE_TBB -#include -#endif - -#include -#include - -//////////////////////////////////////////////////////////////// -// // -// WARNING: persistence computation itself is not parallel, // -// and this uses more memory than rips_persistence. // -// // -//////////////////////////////////////////////////////////////// - -using namespace Gudhi; -using namespace Gudhi::persistent_cohomology; - -typedef int Vertex_handle; -typedef double Filtration_value; - -void program_options(int argc, char * argv[] - , std::string & filepoints - , std::string & filediag - , Filtration_value & threshold - , int & dim_max - , int & p - , Filtration_value & min_persistence); - -int main(int argc, char * argv[]) { - std::string filepoints; - std::string filediag; - Filtration_value threshold; - int dim_max; - int p; - Filtration_value min_persistence; - - program_options(argc, argv, filepoints, filediag, threshold, dim_max, p, min_persistence); - - // Extract the points from the file filepoints - typedef std::vector Point_t; - std::vector< Point_t > points; - read_points(filepoints, points); - - // Compute the proximity graph of the points - Graph_t prox_graph = compute_proximity_graph(points, threshold - , euclidean_distance); - - // Construct the Rips complex in a Simplex Tree - Simplex_tree<>& st = *new Simplex_tree<>; - // insert the proximity graph in the simplex tree - st.insert_graph(prox_graph); - // expand the graph until dimension dim_max - st.expansion(dim_max); - - std::cout << "The complex contains " << st.num_simplices() << " simplices \n"; - std::cout << " and has dimension " << st.dimension() << " \n"; - -#ifdef GUDHI_USE_TBB - // Unnecessary, but clarifies which operations are parallel. - tbb::task_scheduler_init ts; -#endif - - // Sort the simplices in the order of the filtration - st.initialize_filtration(); - int count = 0; - for (auto sh : st.filtration_simplex_range()) - st.assign_key(sh, count++); - - // Convert to a more convenient representation. - Hasse_complex<> hcpx(st); - -#ifdef GUDHI_USE_TBB - ts.terminate(); -#endif - - // Free some space. - delete &st; - - // Compute the persistence diagram of the complex - persistent_cohomology::Persistent_cohomology< Hasse_complex<>, Field_Zp > pcoh(hcpx); - // initializes the coefficient field for homology - pcoh.init_coefficients(p); - - pcoh.compute_persistent_cohomology(min_persistence); - - // Output the diagram in filediag - if (filediag.empty()) { - pcoh.output_diagram(); - } else { - std::ofstream out(filediag); - pcoh.output_diagram(out); - out.close(); - } -} - -void program_options(int argc, char * argv[] - , std::string & filepoints - , std::string & filediag - , Filtration_value & threshold - , int & dim_max - , int & p - , Filtration_value & min_persistence) { - namespace po = boost::program_options; - po::options_description hidden("Hidden options"); - hidden.add_options() - ("input-file", po::value(&filepoints), - "Name of file containing a point set. Format is one point per line: X1 ... Xd "); - - po::options_description visible("Allowed options", 100); - visible.add_options() - ("help,h", "produce help message") - ("output-file,o", po::value(&filediag)->default_value(std::string()), - "Name of file in which the persistence diagram is written. Default print in std::cout") - ("max-edge-length,r", po::value(&threshold)->default_value(0), - "Maximal length of an edge for the Rips complex construction.") - ("cpx-dimension,d", po::value(&dim_max)->default_value(1), - "Maximal dimension of the Rips complex we want to compute.") - ("field-charac,p", po::value(&p)->default_value(11), - "Characteristic p of the coefficient field Z/pZ for computing homology.") - ("min-persistence,m", po::value(&min_persistence), - "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals"); - - po::positional_options_description pos; - pos.add("input-file", 1); - - po::options_description all; - all.add(visible).add(hidden); - - po::variables_map vm; - po::store(po::command_line_parser(argc, argv). - options(all).positional(pos).run(), vm); - po::notify(vm); - - if (vm.count("help") || !vm.count("input-file")) { - std::cout << std::endl; - std::cout << "Compute the persistent homology with coefficient field Z/pZ \n"; - std::cout << "of a Rips complex defined on a set of input points.\n \n"; - std::cout << "The output diagram contains one bar per line, written with the convention: \n"; - std::cout << " p dim b d \n"; - std::cout << "where dim is the dimension of the homological feature,\n"; - std::cout << "b and d are respectively the birth and death of the feature and \n"; - std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl; - - std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl; - std::cout << visible << std::endl; - std::abort(); - } -} diff --git a/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp new file mode 100644 index 00000000..4c6656f5 --- /dev/null +++ b/src/Persistent_cohomology/example/rips_persistence_via_boundary_matrix.cpp @@ -0,0 +1,180 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clément Maria, Marc Glisse + * + * Copyright (C) 2014 INRIA Sophia Antipolis-Méditerranée (France), + * 2015 INRIA Saclay ÃŽle de France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include +#include +#include +#include + +#include + +#ifdef GUDHI_USE_TBB +#include +#endif + +#include +#include + +//////////////////////////////////////////////////////////////// +// // +// WARNING: persistence computation itself is not parallel, // +// and this uses more memory than rips_persistence. // +// // +//////////////////////////////////////////////////////////////// + +using namespace Gudhi; +using namespace Gudhi::persistent_cohomology; + +typedef int Vertex_handle; +typedef double Filtration_value; + +void program_options(int argc, char * argv[] + , std::string & filepoints + , std::string & filediag + , Filtration_value & threshold + , int & dim_max + , int & p + , Filtration_value & min_persistence); + +int main(int argc, char * argv[]) { + std::string filepoints; + std::string filediag; + Filtration_value threshold; + int dim_max; + int p; + Filtration_value min_persistence; + + program_options(argc, argv, filepoints, filediag, threshold, dim_max, p, min_persistence); + + // Extract the points from the file filepoints + typedef std::vector Point_t; + std::vector< Point_t > points; + read_points(filepoints, points); + + // Compute the proximity graph of the points + Graph_t prox_graph = compute_proximity_graph(points, threshold + , euclidean_distance); + + // Construct the Rips complex in a Simplex Tree + Simplex_tree<>& st = *new Simplex_tree<>; + // insert the proximity graph in the simplex tree + st.insert_graph(prox_graph); + // expand the graph until dimension dim_max + st.expansion(dim_max); + + std::cout << "The complex contains " << st.num_simplices() << " simplices \n"; + std::cout << " and has dimension " << st.dimension() << " \n"; + +#ifdef GUDHI_USE_TBB + // Unnecessary, but clarifies which operations are parallel. + tbb::task_scheduler_init ts; +#endif + + // Sort the simplices in the order of the filtration + st.initialize_filtration(); + int count = 0; + for (auto sh : st.filtration_simplex_range()) + st.assign_key(sh, count++); + + // Convert to a more convenient representation. + Hasse_complex<> hcpx(st); + +#ifdef GUDHI_USE_TBB + ts.terminate(); +#endif + + // Free some space. + delete &st; + + // Compute the persistence diagram of the complex + persistent_cohomology::Persistent_cohomology< Hasse_complex<>, Field_Zp > pcoh(hcpx); + // initializes the coefficient field for homology + pcoh.init_coefficients(p); + + pcoh.compute_persistent_cohomology(min_persistence); + + // Output the diagram in filediag + if (filediag.empty()) { + pcoh.output_diagram(); + } else { + std::ofstream out(filediag); + pcoh.output_diagram(out); + out.close(); + } +} + +void program_options(int argc, char * argv[] + , std::string & filepoints + , std::string & filediag + , Filtration_value & threshold + , int & dim_max + , int & p + , Filtration_value & min_persistence) { + namespace po = boost::program_options; + po::options_description hidden("Hidden options"); + hidden.add_options() + ("input-file", po::value(&filepoints), + "Name of file containing a point set. Format is one point per line: X1 ... Xd "); + + po::options_description visible("Allowed options", 100); + visible.add_options() + ("help,h", "produce help message") + ("output-file,o", po::value(&filediag)->default_value(std::string()), + "Name of file in which the persistence diagram is written. Default print in std::cout") + ("max-edge-length,r", po::value(&threshold)->default_value(0), + "Maximal length of an edge for the Rips complex construction.") + ("cpx-dimension,d", po::value(&dim_max)->default_value(1), + "Maximal dimension of the Rips complex we want to compute.") + ("field-charac,p", po::value(&p)->default_value(11), + "Characteristic p of the coefficient field Z/pZ for computing homology.") + ("min-persistence,m", po::value(&min_persistence), + "Minimal lifetime of homology feature to be recorded. Default is 0. Enter a negative value to see zero length intervals"); + + po::positional_options_description pos; + pos.add("input-file", 1); + + po::options_description all; + all.add(visible).add(hidden); + + po::variables_map vm; + po::store(po::command_line_parser(argc, argv). + options(all).positional(pos).run(), vm); + po::notify(vm); + + if (vm.count("help") || !vm.count("input-file")) { + std::cout << std::endl; + std::cout << "Compute the persistent homology with coefficient field Z/pZ \n"; + std::cout << "of a Rips complex defined on a set of input points.\n \n"; + std::cout << "The output diagram contains one bar per line, written with the convention: \n"; + std::cout << " p dim b d \n"; + std::cout << "where dim is the dimension of the homological feature,\n"; + std::cout << "b and d are respectively the birth and death of the feature and \n"; + std::cout << "p is the characteristic of the field Z/pZ used for homology coefficients." << std::endl << std::endl; + + std::cout << "Usage: " << argv[0] << " [options] input-file" << std::endl << std::endl; + std::cout << visible << std::endl; + std::abort(); + } +} diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index ecf3251f..19dece4b 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -255,8 +255,8 @@ * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp * \li * Persistent_cohomology/alpha_shapes_persistence.cpp - * \li - * Persistent_cohomology/parallel_rips_persistence.cpp + * \li + * Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp * \li * Persistent_cohomology/performance_rips_persistence.cpp * \li @@ -317,7 +317,7 @@ make \endverbatim * @example Contraction/Rips_contraction.cpp * @example Persistent_cohomology/alpha_complex_3d_persistence.cpp * @example Persistent_cohomology/alpha_complex_persistence.cpp - * @example Persistent_cohomology/parallel_rips_persistence.cpp + * @example Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp * @example Persistent_cohomology/performance_rips_persistence.cpp * @example Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp * @example Persistent_cohomology/persistence_from_file.cpp -- cgit v1.2.3 From 24fddda9ca1d057c57b1ec9a3d24443c3e1abbf9 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 26 May 2016 21:03:43 +0000 Subject: Add doc, examples and unitary tests git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/get_persistence@1213 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: dc7980ef3f1baef17f2e42770da146d6c961e672 --- src/Persistent_cohomology/example/CMakeLists.txt | 5 + .../example/custom_persistence_sort.cpp | 115 ++++++++++ .../example/plain_homology.cpp | 44 +--- .../include/gudhi/Persistent_cohomology.h | 70 ++++-- src/Persistent_cohomology/test/CMakeLists.txt | 8 + .../test/betti_numbers_unit_test.cpp | 234 +++++++++++++++++++++ src/common/doc/main_page.h | 7 + 7 files changed, 422 insertions(+), 61 deletions(-) create mode 100644 src/Persistent_cohomology/example/custom_persistence_sort.cpp create mode 100644 src/Persistent_cohomology/test/betti_numbers_unit_test.cpp (limited to 'src/common') diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 1f32da17..186a6c33 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -74,12 +74,17 @@ if(GMPXX_FOUND AND GMP_FOUND) add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp) target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + add_executable(custom_persistence_sort custom_persistence_sort.cpp) + target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + if (TBB_FOUND) target_link_libraries(alpha_complex_persistence ${TBB_RELEASE_LIBRARY}) target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_RELEASE_LIBRARY}) + target_link_libraries(custom_persistence_sort ${TBB_RELEASE_LIBRARY}) endif() add_test(alpha_complex_persistence_2_0_45 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -m 0.45 -p 2) add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/periodic_alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off 2 0) + add_test(custom_persistence_sort ${CMAKE_CURRENT_BINARY_DIR}/custom_persistence_sort) else() message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") diff --git a/src/Persistent_cohomology/example/custom_persistence_sort.cpp b/src/Persistent_cohomology/example/custom_persistence_sort.cpp new file mode 100644 index 00000000..4dab4560 --- /dev/null +++ b/src/Persistent_cohomology/example/custom_persistence_sort.cpp @@ -0,0 +1,115 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2014 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#include // for std::ofstream + + +using Kernel = CGAL::Epick_d< CGAL::Dimension_tag<3> >; +using Point = Kernel::Point_d; +using Alpha_complex = Gudhi::alphacomplex::Alpha_complex; + +std::vector random_points() { + // Instanciate a random point generator + CGAL::Random rng(0); + + // Generate "points_number" random points in a vector + std::vector points; + + // Generates 1000 random 3D points on a sphere of radius 4.0 + CGAL::Random_points_on_sphere_d rand_outside(3, 4.0, rng); + CGAL::cpp11::copy_n(rand_outside, 1000, std::back_inserter(points)); + // Generates 2000 random 3D points in a sphere of radius 3.0 + CGAL::Random_points_in_ball_d rand_inside(3, 3.0, rng); + CGAL::cpp11::copy_n(rand_inside, 2000, std::back_inserter(points)); + + return points; +} + +/* + * Compare two intervals by dimension, then by length. + */ +struct cmp_intervals_by_dim_then_length { + explicit cmp_intervals_by_dim_then_length(Alpha_complex * sc) + : sc_(sc) { } + + template + bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) { + if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2))) + return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1)) + > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2))); + else + return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2))); + } + Alpha_complex* sc_; +}; + +int main(int argc, char **argv) { + std::vector points = random_points(); + + // Alpha complex persistence computation from generated points + Alpha_complex alpha_complex_from_points(points, 0.6); + + using Persistent_cohomology = Gudhi::persistent_cohomology::Persistent_cohomology< Alpha_complex, + Gudhi::persistent_cohomology::Field_Zp >; + Persistent_cohomology pcoh(alpha_complex_from_points); + + // initializes the coefficient field for homology - Z/3Z + pcoh.init_coefficients(3); + pcoh.compute_persistent_cohomology(0.2); + + // Custom sort and output persistence + cmp_intervals_by_dim_then_length cmp(&alpha_complex_from_points); + auto persistent_pairs = pcoh.get_persistent_pairs(); + std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp); + for (auto pair : persistent_pairs) { + std::cout << alpha_complex_from_points.dimension(get<0>(pair)) << " " + << alpha_complex_from_points.filtration(get<0>(pair)) << " " + << alpha_complex_from_points.filtration(get<1>(pair)) << std::endl; + } + + // Persistent Betti numbers + std::cout << "The persistent Betti numbers in interval [0.40, 0.41] are : "; + for (int dim = 0; dim < alpha_complex_from_points.dimension(); dim++) + std::cout << "b" << dim << " = " << pcoh.persistent_betti_number(dim, 0.40, 0.41) << " ; "; + std::cout << std::endl; + + // Betti numbers + std::vector betti_numbers = pcoh.betti_numbers(); + std::cout << "The Betti numbers are : "; + for (std::size_t i = 0; i < betti_numbers.size(); i++) + std::cout << "b" << i << " = " << betti_numbers[i] << " ; "; + std::cout << std::endl; + + return 0; +} + diff --git a/src/Persistent_cohomology/example/plain_homology.cpp b/src/Persistent_cohomology/example/plain_homology.cpp index 9e5adb5d..5afce9e2 100644 --- a/src/Persistent_cohomology/example/plain_homology.cpp +++ b/src/Persistent_cohomology/example/plain_homology.cpp @@ -42,24 +42,6 @@ struct MyOptions : Simplex_tree_options_full_featured { }; typedef Simplex_tree ST; - /* - * Compare two intervals by dimension, then by length. - */ - struct cmp_intervals_by_dim_then_length { - explicit cmp_intervals_by_dim_then_length(ST * sc) - : sc_(sc) { - } - template - bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) { - if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2))) - return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1)) - > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2))); - else - return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2))); - } - ST* sc_; - }; - int main() { ST st; @@ -102,34 +84,10 @@ int main() { pcoh.output_diagram(); - // ******************************************************** - // get_persistence - // ******************************************************** - std::cout << std::endl; + // Print the Betti numbers are b0=2 and b1=1. std::cout << std::endl; - - // First version - std::vector betti_numbers = pcoh.betti_numbers(); - std::cout << "The Betti numbers are : "; - for (std::size_t i = 0; i < betti_numbers.size(); i++) - std::cout << "b" << i << " = " << betti_numbers[i] << " ; "; - std::cout << std::endl; - - // Second version std::cout << "The Betti numbers are : "; for (int i = 0; i < st.dimension(); i++) std::cout << "b" << i << " = " << pcoh.betti_number(i) << " ; "; std::cout << std::endl; - - // Get persistence - cmp_intervals_by_dim_then_length cmp(&st); - auto persistent_pairs = pcoh.get_persistent_pairs(); - std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp); - for (auto pair : persistent_pairs) { - std::cout << st.dimension(get<0>(pair)) << " " - << st.filtration(get<0>(pair)) << " " - << st.filtration(get<1>(pair)) << std::endl; - } - - } diff --git a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h index f0cb159e..5b4c215a 100644 --- a/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h +++ b/src/Persistent_cohomology/include/gudhi/Persistent_cohomology.h @@ -88,7 +88,7 @@ class Persistent_cohomology { /** \brief Initializes the Persistent_cohomology class. * * @param[in] cpx Complex for which the persistent homology is computed. - cpx is a model of FilteredComplex + * cpx is a model of FilteredComplex */ explicit Persistent_cohomology(Complex_ds& cpx) : cpx_(&cpx), @@ -117,7 +117,7 @@ class Persistent_cohomology { /** \brief Initializes the Persistent_cohomology class. * * @param[in] cpx Complex for which the persistent homology is compiuted. - cpx is a model of FilteredComplex + * cpx is a model of FilteredComplex * * @param[in] persistence_dim_max if true, the persistent homology for the maximal dimension in the * complex is computed. If false, it is ignored. Default is false. @@ -554,14 +554,6 @@ class Persistent_cohomology { Complex_ds * sc_; }; - /* - * Returns true when Filtration_value type accepts infinity values and the given value is equal to infinity. - */ - bool is_infinity(Filtration_value value) const { - bool has_infinity = std::numeric_limits::has_infinity; - return (has_infinity && value == std::numeric_limits::infinity()); - } - public: /** \brief Output the persistence diagram in ostream. * @@ -576,13 +568,14 @@ class Persistent_cohomology { void output_diagram(std::ostream& ostream = std::cout) { cmp_intervals_by_length cmp(cpx_); std::sort(std::begin(persistent_pairs_), std::end(persistent_pairs_), cmp); + bool has_infinity = std::numeric_limits::has_infinity; for (auto pair : persistent_pairs_) { // Special case on windows, inf is "1.#INF" (cf. unitary tests and R package TDA) - if (is_infinity(cpx_->filtration(get<1>(pair)))) { - ostream << /*get<2>(pair) <<*/ " " << cpx_->dimension(get<0>(pair)) << " " + if (has_infinity && cpx_->filtration(get<1>(pair)) == std::numeric_limits::infinity()) { + ostream << get<2>(pair) << " " << cpx_->dimension(get<0>(pair)) << " " << cpx_->filtration(get<0>(pair)) << " inf " << std::endl; } else { - ostream << /*get<2>(pair) <<*/ " " << cpx_->dimension(get<0>(pair)) << " " + ostream << get<2>(pair) << " " << cpx_->dimension(get<0>(pair)) << " " << cpx_->filtration(get<0>(pair)) << " " << cpx_->filtration(get<1>(pair)) << " " << std::endl; } @@ -601,14 +594,15 @@ class Persistent_cohomology { } /** @brief Returns Betti numbers. - * + * @return A vector of persistent Betti numbers. */ std::vector betti_numbers() const { // Init Betti numbers vector with zeros until Simplicial complex dimension std::vector betti_numbers(cpx_->dimension(), 0); for (auto pair : persistent_pairs_) { - if (is_infinity(cpx_->filtration(get<1>(pair)))) { + // Count never ended persistence intervals + if (cpx_->null_simplex() == get<1>(pair)) { // Increment corresponding betti number betti_numbers[cpx_->dimension(get<0>(pair))] += 1; } @@ -616,16 +610,56 @@ class Persistent_cohomology { return betti_numbers; } - /** @brief Returns the Betti number passed by parameter. + /** @brief Returns the Betti number of the dimension passed by parameter. * @param[in] dimension The Betti number dimension to get. - * @return Betti number + * @return Betti number of the given dimension * */ int betti_number(int dimension) const { int betti_number = 0; for (auto pair : persistent_pairs_) { - if (is_infinity(cpx_->filtration(get<1>(pair)))) { + // Count never ended persistence intervals + if (cpx_->null_simplex() == get<1>(pair)) { + if (cpx_->dimension(get<0>(pair)) == dimension) { + // Increment betti number found + ++betti_number; + } + } + } + return betti_number; + } + + /** @brief Returns the persistent Betti numbers. + * @param[in] from The persistence birth limit to be added in the number \f$(persistent birth \leq from)\f$. + * @param[in] to The persistence death limit to be added in the number \f$(persistent death > from)\f$. + * @return A vector of persistent Betti numbers. + */ + std::vector persistent_betti_numbers(Filtration_value from, Filtration_value to) const { + // Init Betti numbers vector with zeros until Simplicial complex dimension + std::vector betti_numbers(cpx_->dimension(), 0); + + for (auto pair : persistent_pairs_) { + // Count persistence intervals that covers the given interval + if (cpx_->filtration(get<0>(pair)) <= from && cpx_->filtration(get<1>(pair)) > to) { + // Increment corresponding betti number + betti_numbers[cpx_->dimension(get<0>(pair))] += 1; + } + } + return betti_numbers; + } + + /** @brief Returns the persistentBetti number of the dimension passed by parameter. + * @param[in] dimension The Betti number dimension to get. + * @return Betti number of the given dimension + * + */ + int persistent_betti_number(int dimension, Filtration_value from, Filtration_value to) const { + int betti_number = 0; + + for (auto pair : persistent_pairs_) { + // Count persistence intervals that covers the given interval + if (cpx_->filtration(get<0>(pair)) <= from && cpx_->filtration(get<1>(pair)) > to) { if (cpx_->dimension(get<0>(pair)) == dimension) { // Increment betti number found ++betti_number; diff --git a/src/Persistent_cohomology/test/CMakeLists.txt b/src/Persistent_cohomology/test/CMakeLists.txt index 459cc000..a034031a 100644 --- a/src/Persistent_cohomology/test/CMakeLists.txt +++ b/src/Persistent_cohomology/test/CMakeLists.txt @@ -12,8 +12,11 @@ endif() add_executable ( PersistentCohomologyUT persistent_cohomology_unit_test.cpp ) target_link_libraries(PersistentCohomologyUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +add_executable ( BettiNumbersUT betti_numbers_unit_test.cpp ) +target_link_libraries(BettiNumbersUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) if (TBB_FOUND) target_link_libraries(PersistentCohomologyUT ${TBB_RELEASE_LIBRARY}) + target_link_libraries(BettiNumbersUT ${TBB_RELEASE_LIBRARY}) endif() # Unitary tests @@ -23,6 +26,11 @@ add_test(NAME PersistentCohomologyUT # XML format for Jenkins xUnit plugin --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/PersistentCohomologyUT.xml --log_level=test_suite --report_level=no) +add_test(NAME BettiNumbersUT + COMMAND ${CMAKE_CURRENT_BINARY_DIR}/BettiNumbersUT + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/BettiNumbersUT.xml --log_level=test_suite --report_level=no) + if(GMPXX_FOUND AND GMP_FOUND) add_executable ( PersistentCohomologyMultiFieldUT persistent_cohomology_unit_test_multi_field.cpp ) target_link_libraries(PersistentCohomologyMultiFieldUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) diff --git a/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp new file mode 100644 index 00000000..a4e00b45 --- /dev/null +++ b/src/Persistent_cohomology/test/betti_numbers_unit_test.cpp @@ -0,0 +1,234 @@ +#include +#include +#include +#include // std::pair, std::make_pair +#include // float comparison +#include + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "betti_numbers" +#include + +#include "gudhi/Simplex_tree.h" +#include "gudhi/Persistent_cohomology.h" + +struct MyOptions : Gudhi::Simplex_tree_options_full_featured { + // Implicitly use 0 as filtration value for all simplices + static const bool store_filtration = false; + // The persistence algorithm needs this + static const bool store_key = true; + // I have few vertices + typedef short Vertex_handle; +}; + +using Mini_simplex_tree = Gudhi::Simplex_tree; +using Mini_st_persistence = + Gudhi::persistent_cohomology::Persistent_cohomology; + +/* + * Compare two intervals by dimension, then by length. + */ +template +struct cmp_intervals_by_dim_then_length { + explicit cmp_intervals_by_dim_then_length(Simplicial_complex * sc) + : sc_(sc) { } + + template + bool operator()(const Persistent_interval & p1, const Persistent_interval & p2) { + if (sc_->dimension(get < 0 > (p1)) == sc_->dimension(get < 0 > (p2))) + return (sc_->filtration(get < 1 > (p1)) - sc_->filtration(get < 0 > (p1)) + > sc_->filtration(get < 1 > (p2)) - sc_->filtration(get < 0 > (p2))); + else + return (sc_->dimension(get < 0 > (p1)) > sc_->dimension(get < 0 > (p2))); + } + Simplicial_complex* sc_; +}; + +BOOST_AUTO_TEST_CASE( plain_homology_betti_numbers ) +{ + Mini_simplex_tree st; + + /* Complex to build. */ + /* 1 4 */ + /* o---o */ + /* /3\ / */ + /* o---o o */ + /* 2 0 5 */ + const short tetra0123[] = {0, 1, 2, 3}; + const short edge04[] = {0, 4}; + const short edge14[] = {1, 4}; + const short vertex5[] = {5}; + st.insert_simplex_and_subfaces(tetra0123); + st.insert_simplex_and_subfaces(edge04); + st.insert_simplex(edge14); + st.insert_simplex(vertex5); + // FIXME: Remove this line + st.set_dimension(3); + + // Sort the simplices in the order of the filtration + st.initialize_filtration(); + + // Class for homology computation + Mini_st_persistence pcoh(st); + + // Initialize the coefficient field Z/3Z for homology + pcoh.init_coefficients(3); + + // Compute the persistence diagram of the complex + pcoh.compute_persistent_cohomology(); + + // Print the result. The format is, on each line: 2 dim 0 inf + // where 2 represents the field, dim the dimension of the feature. + // 2 0 0 inf + // 2 0 0 inf + // 2 1 0 inf + // means that in Z/2Z-homology, the Betti numbers are b0=2 and b1=1. + + BOOST_CHECK(pcoh.betti_number(0) == 2); + BOOST_CHECK(pcoh.betti_number(1) == 1); + BOOST_CHECK(pcoh.betti_number(2) == 0); + + std::vector bns = pcoh.betti_numbers(); + BOOST_CHECK(bns.size() == 3); + BOOST_CHECK(bns[0] == 2); + BOOST_CHECK(bns[1] == 1); + BOOST_CHECK(bns[2] == 0); + + // Custom sort and output persistence + cmp_intervals_by_dim_then_length cmp(&st); + auto persistent_pairs = pcoh.get_persistent_pairs(); + + std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp); + + BOOST_CHECK(persistent_pairs.size() == 3); + // persistent_pairs[0] = 2 1 0 inf + BOOST_CHECK(st.dimension(get<0>(persistent_pairs[0])) == 1); + BOOST_CHECK(st.filtration(get<0>(persistent_pairs[0])) == 0); + BOOST_CHECK(get<1>(persistent_pairs[0]) == st.null_simplex()); + + // persistent_pairs[1] = 2 0 0 inf + BOOST_CHECK(st.dimension(get<0>(persistent_pairs[1])) == 0); + BOOST_CHECK(st.filtration(get<0>(persistent_pairs[1])) == 0); + BOOST_CHECK(get<1>(persistent_pairs[1]) == st.null_simplex()); + + // persistent_pairs[2] = 2 0 0 inf + BOOST_CHECK(st.dimension(get<0>(persistent_pairs[2])) == 0); + BOOST_CHECK(st.filtration(get<0>(persistent_pairs[2])) == 0); + BOOST_CHECK(get<1>(persistent_pairs[2]) == st.null_simplex()); +} + +using Simplex_tree = Gudhi::Simplex_tree<>; +using St_persistence = + Gudhi::persistent_cohomology::Persistent_cohomology; + +BOOST_AUTO_TEST_CASE( betti_numbers ) +{ + Simplex_tree st; + + /* Complex to build. */ + /* 1 4 */ + /* o---o */ + /* /3\ / */ + /* o---o o */ + /* 2 0 5 */ + const short tetra0123[] = {0, 1, 2, 3}; + const short edge04[] = {0, 4}; + const short edge14[] = {1, 4}; + const short vertex5[] = {5}; + st.insert_simplex_and_subfaces(tetra0123, 4.0); + st.insert_simplex_and_subfaces(edge04, 2.0); + st.insert_simplex(edge14, 2.0); + st.insert_simplex(vertex5, 1.0); + // FIXME: Remove this line + st.set_dimension(3); + + // Sort the simplices in the order of the filtration + st.initialize_filtration(); + + // Class for homology computation + St_persistence pcoh(st); + + // Initialize the coefficient field Z/3Z for homology + pcoh.init_coefficients(3); + + // Compute the persistence diagram of the complex + pcoh.compute_persistent_cohomology(); + + // Check the Betti numbers are b0=2, b1=1 and b2=0. + BOOST_CHECK(pcoh.betti_number(0) == 2); + BOOST_CHECK(pcoh.betti_number(1) == 1); + BOOST_CHECK(pcoh.betti_number(2) == 0); + + // Check the Betti numbers are b0=2, b1=1 and b2=0. + std::vector bns = pcoh.betti_numbers(); + BOOST_CHECK(bns.size() == 3); + BOOST_CHECK(bns[0] == 2); + BOOST_CHECK(bns[1] == 1); + BOOST_CHECK(bns[2] == 0); + + // Check the persistent Betti numbers in [4., 10.] are b0=2, b1=1 and b2=0. + BOOST_CHECK(pcoh.persistent_betti_number(0, 4., 10.) == 2); + BOOST_CHECK(pcoh.persistent_betti_number(1, 4., 10.) == 1); + BOOST_CHECK(pcoh.persistent_betti_number(2, 4., 10.) == 0); + + // Check the persistent Betti numbers in [2., 100.] are b0=2, b1=0 and b2=0. + BOOST_CHECK(pcoh.persistent_betti_number(0, 2., 100.) == 2); + BOOST_CHECK(pcoh.persistent_betti_number(1, 2., 100.) == 0); + BOOST_CHECK(pcoh.persistent_betti_number(2, 2., 100.) == 0); + + // Check the persistent Betti numbers in [1., 1000.] are b0=1, b1=0 and b2=0. + BOOST_CHECK(pcoh.persistent_betti_number(0, 1., 1000.) == 1); + BOOST_CHECK(pcoh.persistent_betti_number(1, 1., 1000.) == 0); + BOOST_CHECK(pcoh.persistent_betti_number(2, 1., 1000.) == 0); + + // Check the persistent Betti numbers in [.9, 1000.] are b0=0, b1=0 and b2=0. + BOOST_CHECK(pcoh.persistent_betti_number(0, .9, 1000.) == 0); + BOOST_CHECK(pcoh.persistent_betti_number(1, .9, 1000.) == 0); + BOOST_CHECK(pcoh.persistent_betti_number(2, .9, 1000.) == 0); + + // Check the persistent Betti numbers in [4.1, 10000.] are b0=2, b1=1 and b2=0. + bns = pcoh.persistent_betti_numbers(4.1, 10000.); + BOOST_CHECK(bns[0] == 2); + BOOST_CHECK(bns[1] == 1); + BOOST_CHECK(bns[2] == 0); + + // Check the persistent Betti numbers in [2.1, 100000.] are b0=2, b1=0 and b2=0. + bns = pcoh.persistent_betti_numbers(2.1, 100000.); + BOOST_CHECK(bns[0] == 2); + BOOST_CHECK(bns[1] == 0); + BOOST_CHECK(bns[2] == 0); + + // Check the persistent Betti numbers in [1.1, 1000000.] are b0=1, b1=0 and b2=0. + bns = pcoh.persistent_betti_numbers(1.1, 1000000.); + BOOST_CHECK(bns[0] == 1); + BOOST_CHECK(bns[1] == 0); + BOOST_CHECK(bns[2] == 0); + + // Check the persistent Betti numbers in [.1, 10000000.] are b0=0, b1=0 and b2=0. + bns = pcoh.persistent_betti_numbers(.1, 10000000.); + BOOST_CHECK(bns[0] == 0); + BOOST_CHECK(bns[1] == 0); + BOOST_CHECK(bns[2] == 0); + + // Custom sort and output persistence + cmp_intervals_by_dim_then_length cmp(&st); + auto persistent_pairs = pcoh.get_persistent_pairs(); + + std::sort(std::begin(persistent_pairs), std::end(persistent_pairs), cmp); + + BOOST_CHECK(persistent_pairs.size() == 3); + // persistent_pairs[0] = 2 1 4 inf + BOOST_CHECK(st.dimension(get<0>(persistent_pairs[0])) == 1); + BOOST_CHECK(st.filtration(get<0>(persistent_pairs[0])) == 4); + BOOST_CHECK(get<1>(persistent_pairs[0]) == st.null_simplex()); + + // persistent_pairs[1] = 2 0 2 inf + BOOST_CHECK(st.dimension(get<0>(persistent_pairs[1])) == 0); + BOOST_CHECK(st.filtration(get<0>(persistent_pairs[1])) == 2); + BOOST_CHECK(get<1>(persistent_pairs[1]) == st.null_simplex()); + + // persistent_pairs[2] = 2 0 1 inf + BOOST_CHECK(st.dimension(get<0>(persistent_pairs[2])) == 0); + BOOST_CHECK(st.filtration(get<0>(persistent_pairs[2])) == 1); + BOOST_CHECK(get<1>(persistent_pairs[2]) == st.null_simplex()); +} diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 19dece4b..2b9f2f40 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -209,6 +209,8 @@ * Persistent_cohomology/alpha_complex_persistence.cpp * \li * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/custom_persistence_sort.cpp * * \subsection eigen3 Eigen3: * Eigen3 is a C++ template library for linear algebra: @@ -224,6 +226,8 @@ * Persistent_cohomology/alpha_complex_persistence.cpp * \li * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/custom_persistence_sort.cpp * * \subsection tbb Threading Building Blocks: * Intel® TBB lets you easily write parallel @@ -271,6 +275,8 @@ * Persistent_cohomology/rips_persistence.cpp * \li * Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/custom_persistence_sort.cpp * * \subsection demos Demos and examples * To build the demos and examples, run the following commands in a terminal: @@ -325,6 +331,7 @@ make \endverbatim * @example Persistent_cohomology/plain_homology.cpp * @example Persistent_cohomology/rips_multifield_persistence.cpp * @example Persistent_cohomology/rips_persistence.cpp + * @example Persistent_cohomology/custom_persistence_sort.cpp * @example Simplex_tree/mini_simplex_tree.cpp * @example Simplex_tree/simple_simplex_tree.cpp * @example Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp -- cgit v1.2.3 From 75a8bf8132d2bade9ccc31147db00bf29eeb7589 Mon Sep 17 00:00:00 2001 From: cjamin Date: Tue, 7 Jun 2016 14:59:23 +0000 Subject: Rename projects in CMakeLists.txt git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/fix_naming@1257 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a51e43d795b0c246d0949c7566aa57d3e15925c7 --- src/Alpha_complex/example/CMakeLists.txt | 2 +- src/Alpha_complex/test/CMakeLists.txt | 2 +- src/Bitmap_cubical_complex/example/CMakeLists.txt | 2 +- src/Bitmap_cubical_complex/test/CMakeLists.txt | 2 +- src/Bottleneck/example/CMakeLists.txt | 2 +- src/Bottleneck/test/CMakeLists.txt | 2 +- src/Contraction/example/CMakeLists.txt | 2 +- src/Persistent_cohomology/example/CMakeLists.txt | 2 +- src/Persistent_cohomology/test/CMakeLists.txt | 2 +- src/Simplex_tree/example/CMakeLists.txt | 2 +- src/Simplex_tree/test/CMakeLists.txt | 2 +- src/Skeleton_blocker/example/CMakeLists.txt | 2 +- src/Skeleton_blocker/test/CMakeLists.txt | 2 +- src/Witness_complex/example/CMakeLists.txt | 2 +- src/Witness_complex/test/CMakeLists.txt | 2 +- src/common/example/CMakeLists.txt | 2 +- src/common/test/CMakeLists.txt | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) (limited to 'src/common') diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index f1346867..36d3cb1f 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIAlphaShapesExample) +project(Alpha_shapes_examples) # need CGAL 4.7 # cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../.. diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index e24588d7..d4b34126 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIAlphaComplexTest) +project(Alpha_shapes_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/Bitmap_cubical_complex/example/CMakeLists.txt b/src/Bitmap_cubical_complex/example/CMakeLists.txt index ad86b763..b26999d0 100644 --- a/src/Bitmap_cubical_complex/example/CMakeLists.txt +++ b/src/Bitmap_cubical_complex/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIBitmap) +project(Bitmap_cubical_complex_examples) add_executable ( Bitmap_cubical_complex Bitmap_cubical_complex.cpp ) target_link_libraries(Bitmap_cubical_complex ${Boost_SYSTEM_LIBRARY}) diff --git a/src/Bitmap_cubical_complex/test/CMakeLists.txt b/src/Bitmap_cubical_complex/test/CMakeLists.txt index 0e5340c7..e98251c2 100644 --- a/src/Bitmap_cubical_complex/test/CMakeLists.txt +++ b/src/Bitmap_cubical_complex/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIBitmapCCUT) +project(Bitmap_cubical_complex_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/Bottleneck/example/CMakeLists.txt b/src/Bottleneck/example/CMakeLists.txt index 2ff009c4..77797202 100644 --- a/src/Bottleneck/example/CMakeLists.txt +++ b/src/Bottleneck/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIBottleneckExample) +project(Bottleneck_examples) add_executable ( RandomDiagrams random_diagrams.cpp ) add_test(RandomDiagrams ${CMAKE_CURRENT_BINARY_DIR}/RandomDiagrams) diff --git a/src/Bottleneck/test/CMakeLists.txt b/src/Bottleneck/test/CMakeLists.txt index ad63c080..9d88ab25 100644 --- a/src/Bottleneck/test/CMakeLists.txt +++ b/src/Bottleneck/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIBottleneckUT) +project(Bottleneck_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/Contraction/example/CMakeLists.txt b/src/Contraction/example/CMakeLists.txt index 4889b82f..b8384cb3 100644 --- a/src/Contraction/example/CMakeLists.txt +++ b/src/Contraction/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIskbl) +project(Contraction_examples) add_executable(RipsContraction Rips_contraction.cpp) diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 186a6c33..f85c50f9 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIExPersCohom) +project(Persistent_cohomology_examples) # problem with Visual Studio link on Boost program_options add_definitions( -DBOOST_ALL_NO_LIB ) diff --git a/src/Persistent_cohomology/test/CMakeLists.txt b/src/Persistent_cohomology/test/CMakeLists.txt index a034031a..ff6b38a6 100644 --- a/src/Persistent_cohomology/test/CMakeLists.txt +++ b/src/Persistent_cohomology/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIPersistentCohomologyUT) +project(Persistent_cohomology_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/Simplex_tree/example/CMakeLists.txt b/src/Simplex_tree/example/CMakeLists.txt index 89a4e053..d3013c50 100644 --- a/src/Simplex_tree/example/CMakeLists.txt +++ b/src/Simplex_tree/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHISimplexTreeFromFile) +project(Simplex_tree_examples) add_executable ( simplex_tree_from_cliques_of_graph simplex_tree_from_cliques_of_graph.cpp ) if (TBB_FOUND) diff --git a/src/Simplex_tree/test/CMakeLists.txt b/src/Simplex_tree/test/CMakeLists.txt index 609d8669..7ed0ce75 100644 --- a/src/Simplex_tree/test/CMakeLists.txt +++ b/src/Simplex_tree/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHISimplexTreeUT) +project(Simplex_tree_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/Skeleton_blocker/example/CMakeLists.txt b/src/Skeleton_blocker/example/CMakeLists.txt index de0c7bba..cc7f37f3 100644 --- a/src/Skeleton_blocker/example/CMakeLists.txt +++ b/src/Skeleton_blocker/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIskbl) +project(Skeleton_blocker_examples) add_executable(SkeletonBlockerFromSimplices Skeleton_blocker_from_simplices.cpp) add_executable(SkeletonBlockerIteration Skeleton_blocker_iteration.cpp) diff --git a/src/Skeleton_blocker/test/CMakeLists.txt b/src/Skeleton_blocker/test/CMakeLists.txt index 5e063845..a4576b36 100644 --- a/src/Skeleton_blocker/test/CMakeLists.txt +++ b/src/Skeleton_blocker/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIskbl) +project(Skeleton_blocker_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt index b304479e..e6a916cd 100644 --- a/src/Witness_complex/example/CMakeLists.txt +++ b/src/Witness_complex/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIWitnessComplex) +project(Witness_complex_examples) # A simple example add_executable( witness_complex_from_file witness_complex_from_file.cpp ) diff --git a/src/Witness_complex/test/CMakeLists.txt b/src/Witness_complex/test/CMakeLists.txt index 37bef2d0..bb55b0f1 100644 --- a/src/Witness_complex/test/CMakeLists.txt +++ b/src/Witness_complex/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIWitnessComplexUT) +project(Witness_complex_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 83f874e1..59ee12c4 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIDelaunayTriangulationOffFileReadWrite) +project(Common_examples) # need CGAL 4.7 if(CGAL_FOUND) diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 789546ae..5f9c5dde 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 2.6) -project(GUDHIDelaunayTriangulationOffFileReadWriteUT) +project(Common_tests) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin -- cgit v1.2.3 From ee5997ccdea6c01fc3d0b1bae5dac60a35cffb11 Mon Sep 17 00:00:00 2001 From: cjamin Date: Fri, 10 Jun 2016 11:29:30 +0000 Subject: Useless typename git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1271 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: dcc10421c0e2ce26abbfa93176c62d6c834bb5d3 --- src/common/example/CGAL_points_off_reader.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/common') diff --git a/src/common/example/CGAL_points_off_reader.cpp b/src/common/example/CGAL_points_off_reader.cpp index 997b47c1..d1ca166d 100644 --- a/src/common/example/CGAL_points_off_reader.cpp +++ b/src/common/example/CGAL_points_off_reader.cpp @@ -9,7 +9,7 @@ #include using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; -using Point_d = typename Kernel::Point_d; +using Point_d = Kernel::Point_d; void usage(char * const progName) { std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; -- cgit v1.2.3 From b92ac1d4eeb7ea185619c7d6f2e4b426456ea518 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 16 Jun 2016 09:59:19 +0000 Subject: Remove GMP and GMPXX required for all persistence examples. Only for Multi-field. Update doc. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/tbb_msvc2015@1303 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5ade88a9525f04ac45e2ddd65881da1771954ef6 --- src/Persistent_cohomology/example/CMakeLists.txt | 12 ++++++------ src/Persistent_cohomology/test/CMakeLists.txt | 16 ++++++++-------- src/common/doc/main_page.h | 4 ---- 3 files changed, 14 insertions(+), 18 deletions(-) (limited to 'src/common') diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index d081b662..b823d658 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -52,10 +52,13 @@ if(GMP_FOUND) add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 2 -q 71 -m 100) endif(GMPXX_FOUND) +else() + # message(WARNING "GMP not found.") +endif(GMP_FOUND) if(CGAL_FOUND) add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp) - target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) if (TBB_FOUND) target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES}) @@ -75,10 +78,10 @@ if(GMP_FOUND) target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp) - target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_executable(custom_persistence_sort custom_persistence_sort.cpp) - target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${GMP_LIBRARIES} ${CGAL_LIBRARY}) + target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) if (TBB_FOUND) target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES}) @@ -99,6 +102,3 @@ if(GMP_FOUND) # message(WARNING "CGAL not found.") endif(CGAL_FOUND) -else() - # message(WARNING "GMP not found.") -endif(GMP_FOUND) diff --git a/src/Persistent_cohomology/test/CMakeLists.txt b/src/Persistent_cohomology/test/CMakeLists.txt index b50430f9..18d85eda 100644 --- a/src/Persistent_cohomology/test/CMakeLists.txt +++ b/src/Persistent_cohomology/test/CMakeLists.txt @@ -4,11 +4,11 @@ project(GUDHIPersistentCohomologyUT) if (GCOVR_PATH) # for gcovr to make coverage reports - Corbera Jenkins plugin set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage") -endif() +endif(GCOVR_PATH) if (GPROF_PATH) # for gprof to make coverage reports - Jenkins set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") -endif() +endif(GPROF_PATH) add_executable ( PersistentCohomologyUT persistent_cohomology_unit_test.cpp ) target_link_libraries(PersistentCohomologyUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) @@ -17,7 +17,7 @@ target_link_libraries(BettiNumbersUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_F if (TBB_FOUND) target_link_libraries(PersistentCohomologyUT ${TBB_LIBRARIES}) target_link_libraries(BettiNumbersUT ${TBB_LIBRARIES}) -endif() +endif(TBB_FOUND) # Unitary tests add_test(NAME PersistentCohomologyUT @@ -33,10 +33,10 @@ add_test(NAME BettiNumbersUT if(GMPXX_FOUND AND GMP_FOUND) add_executable ( PersistentCohomologyMultiFieldUT persistent_cohomology_unit_test_multi_field.cpp ) -target_link_libraries(PersistentCohomologyMultiFieldUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) -if (TBB_FOUND) - target_link_libraries(PersistentCohomologyMultiFieldUT ${TBB_LIBRARIES}) -endif() + target_link_libraries(PersistentCohomologyMultiFieldUT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) + if (TBB_FOUND) + target_link_libraries(PersistentCohomologyMultiFieldUT ${TBB_LIBRARIES}) + endif(TBB_FOUND) # Unitary tests add_test(NAME PersistentCohomologyMultiFieldUT @@ -45,5 +45,5 @@ endif() # XML format for Jenkins xUnit plugin --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/PersistentCohomologyMultiFieldUT.xml --log_level=test_suite --report_level=no) -endif() +endif(GMPXX_FOUND AND GMP_FOUND) diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 2b9f2f40..abe7398b 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -168,14 +168,10 @@ * * The following example requires the GNU Multiple Precision Arithmetic * Library (GMP) and will not be built if GMP is not installed: - * \li - * Persistent_cohomology/alpha_shapes_persistence.cpp * \li * Persistent_cohomology/performance_rips_persistence.cpp * \li * Persistent_cohomology/rips_multifield_persistence.cpp - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp * * Having GMP version 4.2 or higher installed is recommended. * -- cgit v1.2.3 From 335269117566c598b38b55345ad1ceef0b4f45e2 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 24 Jun 2016 14:32:18 +0000 Subject: Bug fix in Gudhi 1.3.0 documentation - Alpha_complex_from_off.cpp and Alpha_complex_from_points.cpp requires CGAL >= 4.7 git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1346 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: e6f0966710d6b07f629490d4b6068d087777488f --- src/common/doc/main_page.h | 4 ---- 1 file changed, 4 deletions(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index abe7398b..8154cc8e 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -187,10 +187,6 @@ * Persistent_cohomology/alpha_complex_3d_persistence.cpp * \li * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp - * \li - * Alpha_complex/Alpha_complex_from_off.cpp - * \li - * Alpha_complex/Alpha_complex_from_points.cpp * * The following example requires CGAL version ≥ 4.6: * \li -- cgit v1.2.3 From 2ef73a660a093b8350edea9474e887cd6792f8e8 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 1 Jul 2016 09:26:36 +0000 Subject: Eigen3 find_package factorization CGAL find package in root CMakeLists.txt for Witness complex git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/eigen3_cmake_factorization@1377 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 32137f7687837c63ecfab3a2d11114b27873a1f2 --- CMakeLists.txt | 18 ++++-- data/points/generator/CMakeLists.txt | 6 -- src/Alpha_complex/example/CMakeLists.txt | 10 +--- src/Alpha_complex/test/CMakeLists.txt | 11 +--- src/CMakeLists.txt | 10 ++++ src/Persistent_cohomology/example/CMakeLists.txt | 71 ++++++++++-------------- src/Witness_complex/example/CMakeLists.txt | 24 -------- src/common/example/CMakeLists.txt | 9 +-- src/common/test/CMakeLists.txt | 8 +-- 9 files changed, 62 insertions(+), 105 deletions(-) (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index 9e85be8a..1d151455 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -20,9 +20,11 @@ else() find_package(GMP) if(GMP_FOUND) + message(STATUS "GMP_LIBRARIES = ${GMP_LIBRARIES}") INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR}) find_package(GMPXX) if(GMPXX_FOUND) + message(STATUS "GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") INCLUDE_DIRECTORIES(${GMPXX_INCLUDE_DIR}) endif() endif() @@ -40,6 +42,7 @@ else() unset(CGAL_FOUND) endif() if(CGAL_FOUND) + message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) endif() @@ -69,6 +72,13 @@ else() add_definitions(-DGUDHI_USE_TBB) endif() + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + #include_directories (BEFORE "../../include") + endif (EIGEN3_FOUND) + # Required programs for unitary tests purpose FIND_PROGRAM( GCOVR_PATH gcovr ) if (GCOVR_PATH) @@ -79,10 +89,10 @@ else() if (GPROF_PATH) message("gprof found in ${GPROF_PATH}") endif() -FIND_PROGRAM( DIFF_PATH diff ) -if (DIFF_PATH) - message("diff found in ${DIFF_PATH}") -endif() + FIND_PROGRAM( DIFF_PATH diff ) + if (DIFF_PATH) + message("diff found in ${DIFF_PATH}") + endif() # BOOST ISSUE result_of vs C++11 add_definitions(-DBOOST_RESULT_OF_USE_DECLTYPE) diff --git a/data/points/generator/CMakeLists.txt b/data/points/generator/CMakeLists.txt index 13fb84fd..e29eb19c 100644 --- a/data/points/generator/CMakeLists.txt +++ b/data/points/generator/CMakeLists.txt @@ -3,13 +3,7 @@ project(Data_points_generator) if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.6.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - include( ${EIGEN3_USE_FILE} ) - include_directories (BEFORE "../../include") - add_executable ( hypergenerator hypergenerator.cpp ) target_link_libraries(hypergenerator ${Boost_SYSTEM_LIBRARY}) add_test(hypergenerator_on_sphere_3000_10_5.0 ${CMAKE_CURRENT_BINARY_DIR}/hypergenerator on sphere onSphere.off 3000 10 5.0) diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index c0ecf0d1..f1c7ae97 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -5,13 +5,7 @@ project(Alpha_complex_examples) # cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../.. if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - add_executable ( alphapoints Alpha_complex_from_points.cpp ) target_link_libraries(alphapoints ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY}) add_executable ( alphaoffreader Alpha_complex_from_off.cpp ) @@ -35,9 +29,9 @@ if(CGAL_FOUND) add_test(alphaoffreader_doc_32_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt) endif() else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex examples.") endif() else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex examples. Version 4.7.0 is required.") endif () endif() diff --git a/src/Alpha_complex/test/CMakeLists.txt b/src/Alpha_complex/test/CMakeLists.txt index dae0d45f..b0723a41 100644 --- a/src/Alpha_complex/test/CMakeLists.txt +++ b/src/Alpha_complex/test/CMakeLists.txt @@ -14,14 +14,7 @@ endif() # cmake -DCGAL_DIR=~/workspace/CGAL-4.7-Ic-41 ../../.. if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - include_directories (BEFORE "../../include") - add_executable ( AlphaComplexUT Alpha_complex_unit_test.cpp ) target_link_libraries(AlphaComplexUT ${Boost_SYSTEM_LIBRARY} ${Boost_THREAD_LIBRARY} ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) if (TBB_FOUND) @@ -36,10 +29,10 @@ if(CGAL_FOUND) --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/AlphaComplexUT.xml --log_level=test_suite --report_level=no) else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex feature.") + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex unitary tests.") endif() else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex feature. Version 4.6.0 is required.") + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex unitary tests. Version 4.7.0 is required.") endif () endif() diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index e55e4395..80f6e1ff 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -17,9 +17,11 @@ else() find_package(GMP) if(GMP_FOUND) + message(STATUS "GMP_LIBRARIES = ${GMP_LIBRARIES}") INCLUDE_DIRECTORIES(${GMP_INCLUDE_DIR}) find_package(GMPXX) if(GMPXX_FOUND) + message(STATUS "GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") INCLUDE_DIRECTORIES(${GMPXX_INCLUDE_DIR}) endif() endif() @@ -37,6 +39,7 @@ else() unset(CGAL_FOUND) endif() if(CGAL_FOUND) + message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) endif() @@ -66,6 +69,13 @@ else() add_definitions(-DGUDHI_USE_TBB) endif() + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + #include_directories (BEFORE "../../include") + endif (EIGEN3_FOUND) + # BOOST ISSUE result_of vs C++11 add_definitions(-DBOOST_RESULT_OF_USE_DECLTYPE) # BOOST ISSUE with Libraries name resolution under Windows diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 5fb4ba12..94b9fbfa 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -36,11 +36,7 @@ add_test(persistence_from_file_3_2_0 ${CMAKE_CURRENT_BINARY_DIR}/persistence_fro add_test(persistence_from_file_3_3_100 ${CMAKE_CURRENT_BINARY_DIR}/persistence_from_file ${CMAKE_SOURCE_DIR}/data/points/bunny_5000.st -p 3 -m 100) if(GMP_FOUND) - message("GMP_LIBRARIES = ${GMP_LIBRARIES}") - if(GMPXX_FOUND) - message("GMPXX_LIBRARIES = ${GMPXX_LIBRARIES}") - add_executable(rips_multifield_persistence rips_multifield_persistence.cpp ) target_link_libraries(rips_multifield_persistence ${Boost_SYSTEM_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY} ${GMPXX_LIBRARIES} ${GMP_LIBRARIES}) add_executable ( performance_rips_persistence performance_rips_persistence.cpp ) @@ -56,49 +52,42 @@ else() # message(WARNING "GMP not found.") endif(GMP_FOUND) - if(CGAL_FOUND) - add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp) - target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - - if (TBB_FOUND) - target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES}) - endif(TBB_FOUND) - add_test(alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off 2 0.45) - +if(CGAL_FOUND) + add_executable(alpha_complex_3d_persistence alpha_complex_3d_persistence.cpp) + target_link_libraries(alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") + if (TBB_FOUND) + target_link_libraries(alpha_complex_3d_persistence ${TBB_LIBRARIES}) + endif(TBB_FOUND) + add_test(alpha_complex_3d_persistence_2_0_5 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off 2 0.45) - find_package(Eigen3 3.1.0) - if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - add_executable (alpha_complex_persistence alpha_complex_persistence.cpp) - target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) + if (NOT CGAL_VERSION VERSION_LESS 4.7.0) + if (EIGEN3_FOUND) + add_executable (alpha_complex_persistence alpha_complex_persistence.cpp) + target_link_libraries(alpha_complex_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY} ${Boost_PROGRAM_OPTIONS_LIBRARY}) - add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp) - target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_executable(periodic_alpha_complex_3d_persistence periodic_alpha_complex_3d_persistence.cpp) + target_link_libraries(periodic_alpha_complex_3d_persistence ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_executable(custom_persistence_sort custom_persistence_sort.cpp) - target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_executable(custom_persistence_sort custom_persistence_sort.cpp) + target_link_libraries(custom_persistence_sort ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - if (TBB_FOUND) - target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES}) - target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES}) - target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES}) - endif(TBB_FOUND) - add_test(alpha_complex_persistence_2_0_45 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -m 0.45 -p 2) - add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/periodic_alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off ${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt 2 0) - add_test(custom_persistence_sort ${CMAKE_CURRENT_BINARY_DIR}/custom_persistence_sort) + if (TBB_FOUND) + target_link_libraries(alpha_complex_persistence ${TBB_LIBRARIES}) + target_link_libraries(periodic_alpha_complex_3d_persistence ${TBB_LIBRARIES}) + target_link_libraries(custom_persistence_sort ${TBB_LIBRARIES}) + endif(TBB_FOUND) + add_test(alpha_complex_persistence_2_0_45 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -m 0.45 -p 2) + add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/periodic_alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off ${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt 2 0) + add_test(custom_persistence_sort ${CMAKE_CURRENT_BINARY_DIR}/custom_persistence_sort) - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") - endif(EIGEN3_FOUND) else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") - endif () + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex persistence examples.") + endif(EIGEN3_FOUND) else() - # message(WARNING "CGAL not found.") - endif(CGAL_FOUND) - + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex persistence examples. Version 4.7.0 is required.") + endif () +else() + # message(WARNING "CGAL not found.") +endif(CGAL_FOUND) diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt index e6a916cd..48ba9279 100644 --- a/src/Witness_complex/example/CMakeLists.txt +++ b/src/Witness_complex/example/CMakeLists.txt @@ -7,31 +7,7 @@ project(Witness_complex_examples) if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.6.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - include( ${CGAL_USE_FILE} ) - # In CMakeLists.txt, when include(${CGAL_USE_FILE}), CXX_FLAGS are overwritten. - # cf. http://doc.cgal.org/latest/Manual/installation.html#title40 - # A workaround is to add "-std=c++11" again. - # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html - # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html - # but it implies to use cmake version 3.1 at least. - if(NOT MSVC) - include(CheckCXXCompilerFlag) - CHECK_CXX_COMPILER_FLAG(-std=c++11 COMPILER_SUPPORTS_CXX11) - if(COMPILER_SUPPORTS_CXX11) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") - endif() - endif() - # - End of workaround - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - message(STATUS "Eigen3 use file: ${EIGEN3_USE_FILE}.") - include_directories (BEFORE "../../include") - add_executable ( witness_complex_sphere witness_complex_sphere.cpp ) target_link_libraries(witness_complex_sphere ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_test( witness_complex_sphere_10 ${CMAKE_CURRENT_BINARY_DIR}/witness_complex_sphere 10) diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 59ee12c4..4ea8c9cb 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -8,19 +8,14 @@ if(CGAL_FOUND) add_test(cgal3Doffreader ${CMAKE_CURRENT_BINARY_DIR}/cgal3Doffreader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - add_executable ( cgaloffreader CGAL_points_off_reader.cpp ) target_link_libraries(cgaloffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_test(cgaloffreader ${CMAKE_CURRENT_BINARY_DIR}/cgaloffreader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off) - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + message(WARNING "Eigen3 not found. Version 3.1.0 is required for cgaloffreader example.") endif() else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile cgaloffreader example. Version 4.7.0 is required.") endif () endif() diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 5f9c5dde..0a88cf8e 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -13,11 +13,7 @@ endif() # need CGAL 4.7 if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - add_executable ( poffreader_UT points_off_reader_unit_test.cpp ) target_link_libraries(poffreader_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) @@ -30,10 +26,10 @@ if(CGAL_FOUND) --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/poffreader_UT.xml --log_level=test_suite --report_level=no) else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha shapes feature.") + message(WARNING "Eigen3 not found. Version 3.1.0 is required for points_off_reader unitary tests.") endif() else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha shapes feature. Version 4.6.0 is required.") + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile points_off_reader unitary tests. Version 4.7.0 is required.") endif () endif() -- cgit v1.2.3 From c4f7417b79dfe7610a9e7b5ffbf487abfe7fa3a0 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Thu, 4 Aug 2016 15:41:38 +0000 Subject: Make GUDHI Cmake less verbose on CGAL package finding git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1415 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 2eebce43b0f75ab1cef3a2fe6f2c1bd0b38672a9 --- data/points/generator/CMakeLists.txt | 8 +++----- src/Alpha_complex/example/CMakeLists.txt | 10 +++------- src/CMakeLists.txt | 8 +++++--- src/Persistent_cohomology/example/CMakeLists.txt | 11 +---------- src/Witness_complex/example/CMakeLists.txt | 8 ++------ src/common/example/CMakeLists.txt | 8 ++------ 6 files changed, 16 insertions(+), 37 deletions(-) (limited to 'src/common') diff --git a/data/points/generator/CMakeLists.txt b/data/points/generator/CMakeLists.txt index e29eb19c..f559610c 100644 --- a/data/points/generator/CMakeLists.txt +++ b/data/points/generator/CMakeLists.txt @@ -13,8 +13,6 @@ if(CGAL_FOUND) # on cube is not available in CGAL add_test(hypergenerator_in_cube_7000_12_10.8 ${CMAKE_CURRENT_BINARY_DIR}/hypergenerator in cube inCube.off 7000 12 10.8) add_test(hypergenerator_in_cube_50000_2 ${CMAKE_CURRENT_BINARY_DIR}/hypergenerator in cube inCube.off 50000 3) - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile hypergenerator. Version 4.6.0 is required.") - endif () -endif() + endif(EIGEN3_FOUND) + endif(NOT CGAL_VERSION VERSION_LESS 4.6.0) +endif(CGAL_FOUND) diff --git a/src/Alpha_complex/example/CMakeLists.txt b/src/Alpha_complex/example/CMakeLists.txt index f1c7ae97..71a95d61 100644 --- a/src/Alpha_complex/example/CMakeLists.txt +++ b/src/Alpha_complex/example/CMakeLists.txt @@ -28,10 +28,6 @@ if(CGAL_FOUND) add_test(alphaoffreader_doc_60_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_60.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_60.txt) add_test(alphaoffreader_doc_32_diff_files ${DIFF_PATH} ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_result_32.txt ${CMAKE_CURRENT_BINARY_DIR}/alphaoffreader_for_doc_32.txt) endif() - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex examples.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex examples. Version 4.7.0 is required.") - endif () -endif() + endif(EIGEN3_FOUND) + endif(NOT CGAL_VERSION VERSION_LESS 4.7.0) +endif(CGAL_FOUND) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 80f6e1ff..c02f816d 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -32,12 +32,14 @@ else() # A fix would be to use https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html # or even better https://cmake.org/cmake/help/v3.1/variable/CMAKE_CXX_STANDARD.html # but it implies to use cmake version 3.1 at least. - find_package(CGAL) + + # find CGAL in QUIET mode for cmake to be less verbose when CGAL is not found. + find_package(CGAL QUIET) # Only CGAL versions > 4.4 supports what Gudhi uses from CGAL - if (CGAL_VERSION VERSION_LESS 4.4.0) + if (CGAL_VERSION VERSION_LESS 4.4.0 AND CGAL_FOUND) message("CGAL version ${CGAL_VERSION} is considered too old to be used by Gudhi.") unset(CGAL_FOUND) - endif() + endif(CGAL_VERSION VERSION_LESS 4.4.0 AND CGAL_FOUND) if(CGAL_FOUND) message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) diff --git a/src/Persistent_cohomology/example/CMakeLists.txt b/src/Persistent_cohomology/example/CMakeLists.txt index 94b9fbfa..d97d1b63 100644 --- a/src/Persistent_cohomology/example/CMakeLists.txt +++ b/src/Persistent_cohomology/example/CMakeLists.txt @@ -48,8 +48,6 @@ if(GMP_FOUND) add_test(rips_multifield_persistence_2_71 ${CMAKE_CURRENT_BINARY_DIR}/rips_multifield_persistence ${CMAKE_SOURCE_DIR}/data/points/Kl.txt -r 0.2 -d 3 -p 2 -q 71 -m 100) endif(GMPXX_FOUND) -else() - # message(WARNING "GMP not found.") endif(GMP_FOUND) if(CGAL_FOUND) @@ -81,13 +79,6 @@ if(CGAL_FOUND) add_test(alpha_complex_persistence_2_0_45 ${CMAKE_CURRENT_BINARY_DIR}/alpha_complex_persistence ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off -m 0.45 -p 2) add_test(periodic_alpha_complex_3d_persistence_2_0 ${CMAKE_CURRENT_BINARY_DIR}/periodic_alpha_complex_3d_persistence ${CMAKE_SOURCE_DIR}/data/points/grid_10_10_10_in_0_1.off ${CMAKE_SOURCE_DIR}/data/points/iso_cuboid_3_in_0_1.txt 2 0) add_test(custom_persistence_sort ${CMAKE_CURRENT_BINARY_DIR}/custom_persistence_sort) - - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Alpha complex persistence examples.") endif(EIGEN3_FOUND) - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Alpha complex persistence examples. Version 4.7.0 is required.") - endif () -else() - # message(WARNING "CGAL not found.") + endif (NOT CGAL_VERSION VERSION_LESS 4.7.0) endif(CGAL_FOUND) diff --git a/src/Witness_complex/example/CMakeLists.txt b/src/Witness_complex/example/CMakeLists.txt index 48ba9279..4d67e0d0 100644 --- a/src/Witness_complex/example/CMakeLists.txt +++ b/src/Witness_complex/example/CMakeLists.txt @@ -11,10 +11,6 @@ if(CGAL_FOUND) add_executable ( witness_complex_sphere witness_complex_sphere.cpp ) target_link_libraries(witness_complex_sphere ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_test( witness_complex_sphere_10 ${CMAKE_CURRENT_BINARY_DIR}/witness_complex_sphere 10) - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for witness_complex_sphere example.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile witness_complex_sphere example. Version 4.6.0 is required.") - endif () + endif(EIGEN3_FOUND) + endif (NOT CGAL_VERSION VERSION_LESS 4.6.0) endif() diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 4ea8c9cb..0da3dcc0 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -12,10 +12,6 @@ if(CGAL_FOUND) add_executable ( cgaloffreader CGAL_points_off_reader.cpp ) target_link_libraries(cgaloffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) add_test(cgaloffreader ${CMAKE_CURRENT_BINARY_DIR}/cgaloffreader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off) - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for cgaloffreader example.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile cgaloffreader example. Version 4.7.0 is required.") - endif () + endif(EIGEN3_FOUND) + endif (NOT CGAL_VERSION VERSION_LESS 4.7.0) endif() -- cgit v1.2.3 From 918833c3917cfd62d85c42fa0d7bb36768a7de61 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 22 Aug 2016 13:24:27 +0000 Subject: Clarify installation documentation by separating in a section optional cots git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1446 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 5c5a0544148248f16f03c888b2e8598bc2076a9e --- src/common/doc/main_page.h | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) (limited to 'src/common') diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 2391e147..9146bed1 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -154,6 +154,7 @@ */ /*! \page installation Gudhi installation + * \tableofcontents * As Gudhi is a header only library, there is no need to install the library. * * Examples of Gudhi headers inclusion can be found in \ref demos. @@ -162,6 +163,20 @@ * The library uses c++11 and requires Boost with version 1.48.0 or * more recent. It is a multi-platform library and compiles on Linux, Mac OSX and Visual Studio 2015. * + * \subsection demos Demos and examples + * To build the demos and examples, run the following commands in a terminal: +\verbatim cd /path-to-gudhi/ +mkdir build +cd build/ +cmake .. +make \endverbatim + * A list of examples is available here. + * + * \subsection testsuites Test suites + * To test your build, run the following command in a terminal: + * \verbatim make test \endverbatim + * + * \section optionallibrary Optional third-party library * \subsection gmp GMP: * The multi-field persistent homology algorithm requires GMP which is a free library for arbitrary-precision * arithmetic, operating on signed integers, rational numbers, and floating point numbers. @@ -176,7 +191,8 @@ * Having GMP version 4.2 or higher installed is recommended. * * \subsection cgal CGAL: - * CGAL is a C++ library which provides easy access to efficient and reliable geometric algorithms. + * The \ref alpha_complex data structure and few examples requires CGAL, which is a C++ library which provides easy + * access to efficient and reliable geometric algorithms. * * Having CGAL version 4.4 or higher installed is recommended. The procedure to install this library according to * your operating system is detailed here http://doc.cgal.org/latest/Manual/installation.html @@ -205,6 +221,7 @@ * Persistent_cohomology/custom_persistence_sort.cpp * * \subsection eigen3 Eigen3: + * The \ref alpha_complex data structure and few examples requires * Eigen3 is a C++ template library for linear algebra: * matrices, vectors, numerical solvers, and related algorithms. * @@ -270,19 +287,6 @@ * \li * Persistent_cohomology/custom_persistence_sort.cpp * - * \subsection demos Demos and examples - * To build the demos and examples, run the following commands in a terminal: -\verbatim cd /path-to-gudhi/ -mkdir build -cd build/ -cmake .. -make \endverbatim - * A list of examples is available here. - * - * \subsection testsuites Test suites - * To test your build, run the following command in a terminal: - * \verbatim make test \endverbatim - * * \section Contributions Bug reports and contributions * Please help us improving the quality of the GUDHI library. You may report bugs or suggestions to: * \verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim -- cgit v1.2.3 From ba76b65af98ad337e39b72fd4260baee17eb4f49 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 12 Sep 2016 12:47:01 +0000 Subject: Modify filtered complexes sections and examples. Modify persistence sections and examples git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1487 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 6b0bdc7199f7229ac152175c4cbc6ebd79c9bc67 --- .../doc/Intro_persistent_cohomology.h | 62 ++++++++++++++++++---- src/Simplex_tree/example/README | 4 +- src/Simplex_tree/include/gudhi/Simplex_tree.h | 32 ----------- src/common/doc/main_page.h | 10 ++-- 4 files changed, 61 insertions(+), 47 deletions(-) (limited to 'src/common') diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h index c8081cac..0cba6361 100644 --- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h +++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h @@ -46,7 +46,7 @@ namespace persistent_cohomology { composed of three elements: topological spaces, their homology groups and an evolution scheme. -
Topological Spaces:
+ \section persistencetopolocalspaces Topological Spaces Topological spaces are represented by simplicial complexes. Let \f$V = \{1, \cdots ,|V|\}\f$ be a set of vertices. A simplex \f$\sigma\f$ is a subset of vertices @@ -84,7 +84,7 @@ namespace persistent_cohomology { Filtration_value filtration (Simplex_handle) that returns the value of the filtration on the simplex represented by the handle. -
Homology:
+ \section persistencehomology Homology For a ring \f$\mathcal{R}\f$, the group of n-chains, denoted \f$\mathbf{C}_n(\mathbf{K},\mathcal{R})\f$, of \f$\mathbf{K}\f$ is the group of formal sums of @@ -111,7 +111,7 @@ namespace persistent_cohomology { We refer to \cite Munkres-elementsalgtop1984 for an introduction to homology theory and to \cite DBLP:books/daglib/0025666 for an introduction to persistent homology. -
Indexing Scheme:
+ \section persistenceindexingscheme Indexing Scheme "Changing" a simplicial complex consists in applying a simplicial map. An indexing scheme is a directed graph together with a traversal order, such that two @@ -140,18 +140,62 @@ namespace persistent_cohomology { its subsimplices of same filtration value) provides an indexing scheme. \section Examples - We provide several example files: run these examples with -h for details on their use, and read the README file. -\li rips_persistence.cpp computes the Rips complex of a point cloud and its persistence diagram. +We provide several example files: run these examples with -h for details on their use, and read the README file. -\li rips_multifield_persistence.cpp computes the Rips complex of a point cloud and its persistence diagram -with a family of field coefficients. +\li +Persistent_cohomology/rips_persistence.cpp computes the Rips complex of a point cloud and its persistence diagram. -\li performance_rips_persistence.cpp provides timings for the construction of the Rips complex on a set of -points sampling a Klein bottle in \f$\mathbb{R}^5\f$ with a simplex tree, its conversion to a +\li +Persistent_cohomology/rips_multifield_persistence.cpp computes the Rips complex of a point cloud and its +persistence diagram with a family of field coefficients. + +\li +Persistent_cohomology/performance_rips_persistence.cpp provides timings for the construction of the Rips complex +on a set of points sampling a Klein bottle in \f$\mathbb{R}^5\f$ with a simplex tree, its conversion to a Hasse diagram and the computation of persistent homology and multi-field persistent homology for the different representations. +\li +Persistent_cohomology/alpha_complex_3d_persistence.cpp computes the persistent homology with +\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file. +\code $> ./alpha_complex_3d_persistence ../../data/points/tore3D_300.off 2 0.45 \endcode +\code Simplex_tree dim: 3 +2 0 0 inf +2 1 0.0682162 1.0001 +2 1 0.0934117 1.00003 +2 2 0.56444 1.03938 \endcode + +\li +Persistent_cohomology/alpha_complex_persistence.cpp computes the persistent homology with +\f$\mathbb{Z}/p\mathbb{Z}\f$ coefficients of the alpha complex on points sampling from an OFF file. +\code $> ./alpha_complex_persistence -r 32 -p 2 -m 0.45 ../../data/points/tore3D_300.off \endcode +\code Alpha complex is of dimension 3 - 9273 simplices - 300 vertices. +Simplex_tree dim: 3 +2 0 0 inf +2 1 0.0682162 1.0001 +2 1 0.0934117 1.00003 +2 2 0.56444 1.03938 \endcode + +\li +Persistent_cohomology/periodic_alpha_complex_3d_persistence.cpp computes the persistent homology with +\f$\mathbb{Z}/2\mathbb{Z}\f$ coefficients of the periodic alpha complex on points sampling from an OFF file. +\code $> ./periodic_alpha_complex_3d_persistence ../../data/points/grid_10_10_10_in_0_1.off 3 1.0 \endcode +\code Periodic Delaunay computed. +Simplex_tree dim: 3 +3 0 0 inf +3 1 0.0025 inf +3 1 0.0025 inf +3 1 0.0025 inf +3 2 0.005 inf +3 2 0.005 inf +3 2 0.005 inf +3 3 0.0075 inf \endcode + +\li +Persistent_cohomology/plain_homology.cpp computes the plain homology of a simple simplicial complex without +filtration values. + \copyright GNU General Public License v3. */ diff --git a/src/Simplex_tree/example/README b/src/Simplex_tree/example/README index 03c759cb..e37af790 100644 --- a/src/Simplex_tree/example/README +++ b/src/Simplex_tree/example/README @@ -52,7 +52,7 @@ EXAMPLE OF SIMPLE INSERTION *** Simplex tree construction with Z/2Z coefficients on weighted graph Klein bottle file: -./simplex_tree_from_file ../../../data/points/Klein_bottle_complex.txt 2 +./simplex_tree_from_cliques_of_graph ../../../data/points/Klein_bottle_complex.txt 2 Insert the 1-skeleton in the simplex tree in 0 s. Expand the simplex tree in 0 s. Information of the Simplex Tree: @@ -60,7 +60,7 @@ Information of the Simplex Tree: with Z/3Z coefficients: -./simplex_tree_from_file ../../../data/points/Klein_bottle_complex.txt 3 +./simplex_tree_from_cliques_of_graph ../../../data/points/Klein_bottle_complex.txt 3 Insert the 1-skeleton in the simplex tree in 0 s. Expand the simplex tree in 0 s. diff --git a/src/Simplex_tree/include/gudhi/Simplex_tree.h b/src/Simplex_tree/include/gudhi/Simplex_tree.h index fa9c0800..63e3f0e5 100644 --- a/src/Simplex_tree/include/gudhi/Simplex_tree.h +++ b/src/Simplex_tree/include/gudhi/Simplex_tree.h @@ -51,38 +51,6 @@ #include // for std::uint32_t namespace Gudhi { -/** \defgroup simplex_tree Filtered Complexes - * \author Clément Maria - * - * A simplicial complex \f$\mathbf{K}\f$ - * on a set of vertices \f$V = \{1, \cdots ,|V|\}\f$ is a collection of simplices - * \f$\{\sigma\}\f$, - * \f$\sigma \subseteq V\f$ such that \f$\tau \subseteq \sigma \in \mathbf{K} \rightarrow \tau \in - * \mathbf{K}\f$. The - * dimension \f$n=|\sigma|-1\f$ of \f$\sigma\f$ is its number of elements minus \f$1\f$. - * - * A filtration of a simplicial complex is - * a function \f$f:\mathbf{K} \rightarrow \mathbb{R}\f$ satisfying \f$f(\tau)\leq f(\sigma)\f$ whenever - * \f$\tau \subseteq \sigma\f$. Ordering the simplices by increasing filtration values - * (breaking ties so as a simplex appears after its subsimplices of same filtration value) - * provides an indexing scheme. - * - -
Implementations:
- There are two implementation of complexes. The first on is the Simplex_tree data structure. - The simplex tree is an efficient and flexible - data structure for representing general (filtered) simplicial complexes. The data structure - is described in \cite boissonnatmariasimplextreealgorithmica - \image html "Simplex_tree_representation.png" "Simplex tree representation" - - The second one is the Hasse_complex. The Hasse complex is a data structure representing - explicitly all co-dimension 1 incidence relations in a complex. It is consequently faster - when accessing the boundary of a simplex, but is less compact and harder to construct from - scratch. - - * \copyright GNU General Public License v3. - * @{ - */ struct Simplex_tree_options_full_featured; diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 9146bed1..0983051d 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -13,8 +13,8 @@ * The current release of the GUDHI library includes: * * \li Data structures to represent, construct and manipulate simplicial complexes. - * \li Algorithms to compute persistent homology and multi-field persistent homology. - * \li Simplication of simplicial complexes by edge contraction. + * \li Simplification of simplicial complexes by edge contraction. + * \li Algorithms to compute persistent homology persistent homology. * * All data-structures are generic and several of their aspects can be parameterized via template classes. * We refer to \cite gudhilibrary_ICMS14 for a detailed description of the design of the library. @@ -266,8 +266,10 @@ make \endverbatim * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp * \li * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp - * \li - * Persistent_cohomology/alpha_shapes_persistence.cpp + * \li + * Persistent_cohomology/alpha_complex_3d_persistence.cpp + * \li + * Persistent_cohomology/alpha_complex_persistence.cpp * \li * Persistent_cohomology/rips_persistence_via_boundary_matrix.cpp * \li -- cgit v1.2.3 From efe9b0f91eb63640941fa04153d921061e099d99 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 16 Sep 2016 14:47:23 +0000 Subject: No more need of Test.h git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/warning_fix@1508 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: a0436136ab366116b61d0630a3da2b6407291db9 --- src/common/include/gudhi/Test.h | 105 ---------------------------------------- 1 file changed, 105 deletions(-) delete mode 100644 src/common/include/gudhi/Test.h (limited to 'src/common') diff --git a/src/common/include/gudhi/Test.h b/src/common/include/gudhi/Test.h deleted file mode 100644 index 6024c822..00000000 --- a/src/common/include/gudhi/Test.h +++ /dev/null @@ -1,105 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): David Salinas - * - * Copyright (C) 2014 INRIA Sophia Antipolis-Mediterranee (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - * - */ - -#ifndef TEST_H_ -#define TEST_H_ - -#include -#include -#include -#include -#include - - -#define TEST(a) std::cout << "TEST: " << (a) << std::endl -#define TESTMSG(a, b) std::cout << "TEST: " << a << b << std::endl -#define TESTVALUE(a) std::cout << "TEST: " << #a << ": " << a << std::endl - -/** - * Class to perform test - */ - -class Test { - private: - std::string name; - bool (*test)(); - - std::string separation() const { - return "+++++++++++++++++++++++++++++++++++++++++++++++++\n"; - } - - std::string print_between_plus(std::string& s) const { - std::stringstream res; - res << "+++++++++++++++++" << s << "+++++++++++++++++\n"; - return res.str(); - } - - public: - Test(std::string name_, bool (*test_)()) { - name = name_; - test = test_; - } - - bool run() { - std::cout << print_between_plus(name); - return test(); - } - - std::string getName() { - return name; - } -}; - -class Tests { - private: - std::list tests; - - public: - void add(std::string name_, bool (*test_)()) { - Test test(name_, test_); - tests.push_back(test); - } - - bool run() { - bool tests_succesful(true); - std::vector res; - for (Test test : tests) { - res.push_back(test.run()); - } - std::cout << "\n\n results of tests : " << std::endl; - int i = 0; - for (Test t : tests) { - std::cout << "Test " << i << " \"" << t.getName() << "\" --> "; - if (res[i++]) { - std::cout << "OK" << std::endl; - } else { - std::cout << "Fail" << std::endl; - tests_succesful = false; - break; - } - } - return tests_succesful; - } -}; - -#endif // TEST_H_ -- cgit v1.2.3 From e54574c7290b28543b9c1e7d1b9a16f42825ae26 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 19 Sep 2016 17:27:24 +0000 Subject: Rename off file reader as stands in convention. Add an example with a vector of double for point type. Fix SO3 OFF files accordingly to OFF file standard. git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1510 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: ff61bed1f1895bc5bf1af4ae946e2a84a692c390 --- data/points/SO3_10000.off | 4 +- data/points/SO3_50000.off | 4 +- src/common/example/CGAL_3D_points_off_reader.cpp | 41 ------------------- src/common/example/CGAL_points_off_reader.cpp | 46 ---------------------- src/common/example/CMakeLists.txt | 18 +++++---- .../example/example_CGAL_3D_points_off_reader.cpp | 41 +++++++++++++++++++ .../example/example_CGAL_points_off_reader.cpp | 46 ++++++++++++++++++++++ .../example_vector_double_points_off_reader.cpp | 41 +++++++++++++++++++ src/common/include/gudhi/Points_off_io.h | 11 +++--- 9 files changed, 148 insertions(+), 104 deletions(-) delete mode 100644 src/common/example/CGAL_3D_points_off_reader.cpp delete mode 100644 src/common/example/CGAL_points_off_reader.cpp create mode 100644 src/common/example/example_CGAL_3D_points_off_reader.cpp create mode 100644 src/common/example/example_CGAL_points_off_reader.cpp create mode 100644 src/common/example/example_vector_double_points_off_reader.cpp (limited to 'src/common') diff --git a/data/points/SO3_10000.off b/data/points/SO3_10000.off index fbf9b273..3f2a3cda 100644 --- a/data/points/SO3_10000.off +++ b/data/points/SO3_10000.off @@ -1,5 +1,5 @@ -OFF -10000 1 0 +nOFF +9 10000 1 0 2.08167e-17 0 -1 -0.500001 0.866026 0 0.866026 0.500001 -2.08167e-17 0 0 -1 0.500001 -0.866026 0 -0.866026 -0.500001 0 0 0 -1 -1 0 0 0 1 0 diff --git a/data/points/SO3_50000.off b/data/points/SO3_50000.off index 635e8988..9b23a78f 100644 --- a/data/points/SO3_50000.off +++ b/data/points/SO3_50000.off @@ -1,5 +1,5 @@ -OFF -50000 1 0 +nOFF +9 50000 1 0 2.08167e-17 0 -1 -0.500001 0.866026 0 0.866026 0.500001 -2.08167e-17 0 0 -1 0.500001 -0.866026 0 -0.866026 -0.500001 0 0 0 -1 -1 0 0 0 1 0 diff --git a/src/common/example/CGAL_3D_points_off_reader.cpp b/src/common/example/CGAL_3D_points_off_reader.cpp deleted file mode 100644 index d48bb17d..00000000 --- a/src/common/example/CGAL_3D_points_off_reader.cpp +++ /dev/null @@ -1,41 +0,0 @@ -#include - -#include - -#include -#include -#include - -using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel; -using Point_3 = Kernel::Point_3; - -void usage(char * const progName) { - std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; - exit(-1); -} - -int main(int argc, char **argv) { - if (argc != 2) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; - usage(argv[0]); - } - - std::string offInputFile(argv[1]); - // Read the OFF file (input file name given as parameter) and triangulate points - Gudhi::Points_3D_off_reader off_reader(offInputFile); - // Check the read operation was correct - if (!off_reader.is_valid()) { - std::cerr << "Unable to read file " << offInputFile << std::endl; - usage(argv[0]); - } - - // Retrieve the triangulation - std::vector point_cloud = off_reader.get_point_cloud(); - - int n {0}; - for (auto point : point_cloud) { - ++n; - std::cout << "Point[" << n << "] = (" << point[0] << ", " << point[1] << ", " << point[2] << ")\n"; - } - return 0; -} diff --git a/src/common/example/CGAL_points_off_reader.cpp b/src/common/example/CGAL_points_off_reader.cpp deleted file mode 100644 index d1ca166d..00000000 --- a/src/common/example/CGAL_points_off_reader.cpp +++ /dev/null @@ -1,46 +0,0 @@ -#include - -// For CGAL points type in dimension d -// cf. http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Point__d.html -#include - -#include -#include -#include - -using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; -using Point_d = Kernel::Point_d; - -void usage(char * const progName) { - std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; - exit(-1); -} - -int main(int argc, char **argv) { - if (argc != 2) { - std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; - usage(argv[0]); - } - - std::string offInputFile(argv[1]); - // Read the OFF file (input file name given as parameter) and triangulate points - Gudhi::Points_off_reader off_reader(offInputFile); - // Check the read operation was correct - if (!off_reader.is_valid()) { - std::cerr << "Unable to read file " << offInputFile << std::endl; - usage(argv[0]); - } - - // Retrieve the triangulation - std::vector point_cloud = off_reader.get_point_cloud(); - - int n {0}; - for (auto point : point_cloud) { - std::cout << "Point[" << n << "] = "; - for (int i {0}; i < point.dimension(); i++) - std::cout << point[i] << " "; - std::cout << "\n"; - ++n; - } - return 0; -} diff --git a/src/common/example/CMakeLists.txt b/src/common/example/CMakeLists.txt index 0da3dcc0..b0c6d69a 100644 --- a/src/common/example/CMakeLists.txt +++ b/src/common/example/CMakeLists.txt @@ -1,17 +1,21 @@ cmake_minimum_required(VERSION 2.6) project(Common_examples) -# need CGAL 4.7 +add_executable ( vector_double_off_reader example_vector_double_points_off_reader.cpp ) +target_link_libraries(vector_double_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) +add_test(vector_double_off_reader ${CMAKE_CURRENT_BINARY_DIR}/vector_double_off_reader ${CMAKE_SOURCE_DIR}/data/points/SO3_10000.off) + if(CGAL_FOUND) - add_executable ( cgal3Doffreader CGAL_3D_points_off_reader.cpp ) - target_link_libraries(cgal3Doffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_test(cgal3Doffreader ${CMAKE_CURRENT_BINARY_DIR}/cgal3Doffreader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) + add_executable ( cgal_3D_off_reader example_CGAL_3D_points_off_reader.cpp ) + target_link_libraries(cgal_3D_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_test(cgal_3D_off_reader ${CMAKE_CURRENT_BINARY_DIR}/cgal_3D_off_reader ${CMAKE_SOURCE_DIR}/data/points/tore3D_300.off) + # need CGAL 4.7 if (NOT CGAL_VERSION VERSION_LESS 4.7.0) if (EIGEN3_FOUND) - add_executable ( cgaloffreader CGAL_points_off_reader.cpp ) - target_link_libraries(cgaloffreader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) - add_test(cgaloffreader ${CMAKE_CURRENT_BINARY_DIR}/cgaloffreader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off) + add_executable ( cgal_off_reader example_CGAL_points_off_reader.cpp ) + target_link_libraries(cgal_off_reader ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + add_test(cgal_off_reader ${CMAKE_CURRENT_BINARY_DIR}/cgal_off_reader ${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off) endif(EIGEN3_FOUND) endif (NOT CGAL_VERSION VERSION_LESS 4.7.0) endif() diff --git a/src/common/example/example_CGAL_3D_points_off_reader.cpp b/src/common/example/example_CGAL_3D_points_off_reader.cpp new file mode 100644 index 00000000..d48bb17d --- /dev/null +++ b/src/common/example/example_CGAL_3D_points_off_reader.cpp @@ -0,0 +1,41 @@ +#include + +#include + +#include +#include +#include + +using Kernel = CGAL::Exact_predicates_inexact_constructions_kernel; +using Point_3 = Kernel::Point_3; + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; + exit(-1); +} + +int main(int argc, char **argv) { + if (argc != 2) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_3D_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + usage(argv[0]); + } + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + + int n {0}; + for (auto point : point_cloud) { + ++n; + std::cout << "Point[" << n << "] = (" << point[0] << ", " << point[1] << ", " << point[2] << ")\n"; + } + return 0; +} diff --git a/src/common/example/example_CGAL_points_off_reader.cpp b/src/common/example/example_CGAL_points_off_reader.cpp new file mode 100644 index 00000000..264231b2 --- /dev/null +++ b/src/common/example/example_CGAL_points_off_reader.cpp @@ -0,0 +1,46 @@ +#include + +// For CGAL points type in dimension d +// cf. http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Point__d.html +#include + +#include +#include +#include + +using Kernel = CGAL::Epick_d< CGAL::Dynamic_dimension_tag >; +using Point_d = Kernel::Point_d; + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; + exit(-1); +} + +int main(int argc, char **argv) { + if (argc != 2) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + usage(argv[0]); + } + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + + int n {0}; + for (auto point : point_cloud) { + std::cout << "Point[" << n << "] = "; + for (int i {0}; i < point.size(); i++) + std::cout << point[i] << " "; + std::cout << "\n"; + ++n; + } + return 0; +} diff --git a/src/common/example/example_vector_double_points_off_reader.cpp b/src/common/example/example_vector_double_points_off_reader.cpp new file mode 100644 index 00000000..f691db92 --- /dev/null +++ b/src/common/example/example_vector_double_points_off_reader.cpp @@ -0,0 +1,41 @@ +#include + +#include +#include +#include + +using Point_d = std::vector; + +void usage(char * const progName) { + std::cerr << "Usage: " << progName << " inputFile.off" << std::endl; + exit(-1); +} + +int main(int argc, char **argv) { + if (argc != 2) { + std::cerr << "Error: Number of arguments (" << argc << ") is not correct" << std::endl; + usage(argv[0]); + } + + std::string offInputFile(argv[1]); + // Read the OFF file (input file name given as parameter) and triangulate points + Gudhi::Points_off_reader off_reader(offInputFile); + // Check the read operation was correct + if (!off_reader.is_valid()) { + std::cerr << "Unable to read file " << offInputFile << std::endl; + usage(argv[0]); + } + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + + int n {0}; + for (auto point : point_cloud) { + std::cout << "Point[" << n << "] = "; + for (int i {0}; i < point.size(); i++) + std::cout << point[i] << " "; + std::cout << "\n"; + ++n; + } + return 0; +} diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index 74b49386..18b23e84 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -73,9 +73,8 @@ class Points_off_visitor_reader { * @details * Point_d must have a constructor with the following form: * - * @code template Point_d::Point_d(int d, InputIterator first, InputIterator last) @endcode + * @code template Point_d::Point_d(InputIterator first, InputIterator last) @endcode * - * where d is the point dimension. */ void point(const std::vector& point) { #ifdef DEBUG_TRACES @@ -86,7 +85,7 @@ class Points_off_visitor_reader { std::cout << std::endl; #endif // DEBUG_TRACES // Fill the point cloud - point_cloud.push_back(Point_d(point.size(), point.begin(), point.end())); + point_cloud.push_back(Point_d(point.begin(), point.end())); } // Off_reader visitor maximal_face implementation - Only points are read @@ -117,14 +116,14 @@ class Points_off_visitor_reader { * * \section Example * - * This example loads points from an OFF file and builds a vector of CGAL points in dimension d. + * This example loads points from an OFF file and builds a vector of points (vector of double). * Then, it is asked to display the points. * - * \include common/CGAL_points_off_reader.cpp + * \include common/example_vector_double_points_off_reader.cpp * * When launching: * - * \code $> ./cgaloffreader ../../data/points/alphacomplexdoc.off + * \code $> ./vector_double_off_reader ../../data/points/alphacomplexdoc.off * \endcode * * the program output is: -- cgit v1.2.3 From 5cdf5825e0e5a937c5bbc5cee49ed9aa34f0af0e Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Mon, 19 Sep 2016 19:40:44 +0000 Subject: use of std::vector for unitary test instead of CGAL points type rename unitary test as stands in convention git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1511 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: b768b5a6bb0fe823abe6cbbea28d49c7ecb39a9d --- src/common/test/CMakeLists.txt | 27 +++------ src/common/test/points_off_reader_unit_test.cpp | 78 ------------------------- src/common/test/test_points_off_reader.cpp | 73 +++++++++++++++++++++++ 3 files changed, 81 insertions(+), 97 deletions(-) delete mode 100644 src/common/test/points_off_reader_unit_test.cpp create mode 100644 src/common/test/test_points_off_reader.cpp (limited to 'src/common') diff --git a/src/common/test/CMakeLists.txt b/src/common/test/CMakeLists.txt index 0a88cf8e..7ccdb752 100644 --- a/src/common/test/CMakeLists.txt +++ b/src/common/test/CMakeLists.txt @@ -10,26 +10,15 @@ if (GPROF_PATH) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") endif() -# need CGAL 4.7 -if(CGAL_FOUND) - if (NOT CGAL_VERSION VERSION_LESS 4.7.0) - if (EIGEN3_FOUND) - add_executable ( poffreader_UT points_off_reader_unit_test.cpp ) - target_link_libraries(poffreader_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) +add_executable ( poffreader_UT test_points_off_reader.cpp ) +target_link_libraries(poffreader_UT ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) - # Do not forget to copy test files in current binary dir - file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) +# Do not forget to copy test files in current binary dir +file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - # Unitary tests - add_test(poffreader_UT ${CMAKE_CURRENT_BINARY_DIR}/poffreader_UT - # XML format for Jenkins xUnit plugin - --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/poffreader_UT.xml --log_level=test_suite --report_level=no) +# Unitary tests +add_test(poffreader_UT ${CMAKE_CURRENT_BINARY_DIR}/poffreader_UT + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/poffreader_UT.xml --log_level=test_suite --report_level=no) - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for points_off_reader unitary tests.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile points_off_reader unitary tests. Version 4.7.0 is required.") - endif () -endif() diff --git a/src/common/test/points_off_reader_unit_test.cpp b/src/common/test/points_off_reader_unit_test.cpp deleted file mode 100644 index dbcc0434..00000000 --- a/src/common/test/points_off_reader_unit_test.cpp +++ /dev/null @@ -1,78 +0,0 @@ -/* This file is part of the Gudhi Library. The Gudhi library - * (Geometric Understanding in Higher Dimensions) is a generic C++ - * library for computational topology. - * - * Author(s): Vincent Rouvreau - * - * Copyright (C) 2015 INRIA Saclay (France) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see . - */ - -#include - -// For CGAL points type in dimension d -// cf. http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Point__d.html -#include - -#include -#include -#include - -#define BOOST_TEST_DYN_LINK -#define BOOST_TEST_MODULE "points_off_read_write" -#include - -typedef CGAL::Epick_d< CGAL::Dynamic_dimension_tag > Kernel; -typedef Kernel::Point_d Point_d; - -BOOST_AUTO_TEST_CASE( points_doc_test ) -{ - // Read the OFF file (input file name given as parameter) and triangulates points - Gudhi::Points_off_reader off_reader("alphacomplexdoc.off"); - // Check the read operation was correct - BOOST_CHECK(off_reader.is_valid()); - - // Retrieve the triangulation - std::vector point_cloud = off_reader.get_point_cloud(); - BOOST_CHECK(point_cloud.size() == 7); - - std::vector expected_points; - std::vector point = {1.0, 1.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - point = {7.0, 0.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - point = {4.0, 6.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - point = {9.0, 6.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - point = {0.0, 14.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - point = {2.0, 19.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - point = {9.0, 17.0}; - expected_points.push_back(Point_d(2, point.begin(), point.end())); - - BOOST_CHECK(point_cloud == expected_points); -} - -BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) -{ - Gudhi::Points_off_reader off_reader("some_impossible_weird_file_name.off"); - // Check the read operation was correct - BOOST_CHECK(!off_reader.is_valid()); - - std::vector point_cloud = off_reader.get_point_cloud(); - BOOST_CHECK(point_cloud.size() == 0); -} diff --git a/src/common/test/test_points_off_reader.cpp b/src/common/test/test_points_off_reader.cpp new file mode 100644 index 00000000..b4f71182 --- /dev/null +++ b/src/common/test/test_points_off_reader.cpp @@ -0,0 +1,73 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Vincent Rouvreau + * + * Copyright (C) 2015 INRIA Saclay (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include + +#include +#include +#include + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE "points_off_read_write" +#include + +using Point_d = std::vector; + +BOOST_AUTO_TEST_CASE( points_doc_test ) +{ + // Read the OFF file (input file name given as parameter) and triangulates points + Gudhi::Points_off_reader off_reader("alphacomplexdoc.off"); + // Check the read operation was correct + BOOST_CHECK(off_reader.is_valid()); + + // Retrieve the triangulation + std::vector point_cloud = off_reader.get_point_cloud(); + BOOST_CHECK(point_cloud.size() == 7); + + std::vector expected_points; + std::vector point = {1.0, 1.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + point = {7.0, 0.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + point = {4.0, 6.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + point = {9.0, 6.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + point = {0.0, 14.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + point = {2.0, 19.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + point = {9.0, 17.0}; + expected_points.push_back(Point_d(point.begin(), point.end())); + + BOOST_CHECK(point_cloud == expected_points); +} + +BOOST_AUTO_TEST_CASE( Delaunay_triangulation_unexisting_file_read_test ) +{ + Gudhi::Points_off_reader off_reader("some_impossible_weird_file_name.off"); + // Check the read operation was correct + BOOST_CHECK(!off_reader.is_valid()); + + std::vector point_cloud = off_reader.get_point_cloud(); + BOOST_CHECK(point_cloud.size() == 0); +} -- cgit v1.2.3 From f4cae184ac11200a95c8b84efa8e029c870d7e2b Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Fri, 23 Sep 2016 11:07:56 +0000 Subject: Fix doxygen warnings git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/warning_fix@1550 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: 39ef2513b5d84cb392677ba10718f5e47734884c --- src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h | 2 +- src/Contraction/include/gudhi/Edge_contraction.h | 2 +- src/Persistent_cohomology/doc/Intro_persistent_cohomology.h | 2 +- src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h | 8 ++++---- src/common/include/gudhi/Points_3D_off_io.h | 4 ++-- src/common/include/gudhi/Points_off_io.h | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) (limited to 'src/common') diff --git a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h index 4c9c04d9..5963caa3 100644 --- a/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h +++ b/src/Bitmap_cubical_complex/doc/Gudhi_Cubical_Complex_doc.h @@ -63,7 +63,7 @@ namespace cubical_complex { * For further details and theory of cubical complexes, please consult \cite kaczynski2004computational as well as the * following paper \cite peikert2012topological . * - * \section datastructure Data structure. + * \section cubicalcomplexdatastructure Data structure. * * The implementation of Cubical complex provides a representation of complexes that occupy a rectangular region in * \f$\mathbb{R}^n\f$. This extra assumption allows for a memory efficient way of storing cubical complexes in a form diff --git a/src/Contraction/include/gudhi/Edge_contraction.h b/src/Contraction/include/gudhi/Edge_contraction.h index 5af13c3e..61f2d945 100644 --- a/src/Contraction/include/gudhi/Edge_contraction.h +++ b/src/Contraction/include/gudhi/Edge_contraction.h @@ -41,7 +41,7 @@ namespace contraction { \author David Salinas -\section Introduction +\section edgecontractionintroduction Introduction The purpose of this package is to offer a user-friendly interface for edge contraction simplification of huge simplicial complexes. It uses the \ref skbl data-structure whose size remains small during simplification diff --git a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h index 0cba6361..433cfd3e 100644 --- a/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h +++ b/src/Persistent_cohomology/doc/Intro_persistent_cohomology.h @@ -139,7 +139,7 @@ namespace persistent_cohomology { by increasing filtration values (breaking ties so as a simplex appears after its subsimplices of same filtration value) provides an indexing scheme. -\section Examples +\section pcohexamples Examples We provide several example files: run these examples with -h for details on their use, and read the README file. diff --git a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h index bd907131..32fe411c 100644 --- a/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h +++ b/src/Skeleton_blocker/include/gudhi/Skeleton_blocker.h @@ -42,7 +42,7 @@ namespace skeleton_blocker { \author David Salinas -\section Introduction +\section skblintroduction Introduction The Skeleton-Blocker data-structure proposes a light encoding for simplicial complexes by storing only an *implicit* representation of its simplices \cite socg_blockers_2011,\cite blockers2012. @@ -53,7 +53,7 @@ This data-structure handles all simplicial complexes operations such as are operations that do not require simplex enumeration such as edge iteration, link computation or simplex contraction. -\section Definitions +\section skbldefinitions Definitions We recall briefly classical definitions of simplicial complexes \cite Munkres-elementsalgtop1984. @@ -108,7 +108,7 @@ and point access in addition. -\subsection Visitor +\subsection skblvisitor Visitor The class Skeleton_blocker_complex has a visitor that is called when usual operations such as adding an edge or remove a vertex are called. You may want to use this visitor to compute statistics or to update another data-structure (for instance this visitor is heavily used in the \ref contr package). @@ -116,7 +116,7 @@ You may want to use this visitor to compute statistics or to update another data -\section Example +\section skblexample Example \subsection Iterating Iterating through vertices, edges, blockers and simplices diff --git a/src/common/include/gudhi/Points_3D_off_io.h b/src/common/include/gudhi/Points_3D_off_io.h index 2647f11e..b0d24998 100644 --- a/src/common/include/gudhi/Points_3D_off_io.h +++ b/src/common/include/gudhi/Points_3D_off_io.h @@ -132,12 +132,12 @@ class Points_3D_off_visitor_reader { * * @code template Point_3::Point_3(double x, double y, double z) @endcode * - * @section Example + * @section point3doffioexample Example * * This example loads points from an OFF file and builds a vector of CGAL points in dimension 3. * Then, it is asked to display the points. * - * @include common/CGAL_3D_points_off_reader.cpp + * @include common/example_CGAL_3D_points_off_reader.cpp * * When launching: * diff --git a/src/common/include/gudhi/Points_off_io.h b/src/common/include/gudhi/Points_off_io.h index 18b23e84..29af8a8a 100644 --- a/src/common/include/gudhi/Points_off_io.h +++ b/src/common/include/gudhi/Points_off_io.h @@ -114,7 +114,7 @@ class Points_off_visitor_reader { * * where d is the point dimension. * - * \section Example + * \section pointoffioexample Example * * This example loads points from an OFF file and builds a vector of points (vector of double). * Then, it is asked to display the points. -- cgit v1.2.3 From 16aaf4cda5fd97da12a7f1da8b0a5168fac2e289 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 11 Oct 2016 13:57:03 +0000 Subject: Problem of merge with tangentialcomplex branch. Redo in an integration branch git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/tangential_integration@1701 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: fa029e8e90b3e203ea675f02098ec6fe95596f9f --- CMakeLists.txt | 10 +- biblio/bibliography.bib | 15 + biblio/how_to_cite_gudhi.bib | 9 + src/CMakeLists.txt | 7 +- src/Doxyfile | 3 +- src/GudhUI/model/Model.h | 6 +- src/Tangential_complex/benchmark/CMakeLists.txt | 40 + src/Tangential_complex/benchmark/RIB_exporter.h | 269 +++ src/Tangential_complex/benchmark/XML_exporter.h | 207 ++ .../benchmark/benchmark_script.txt | 221 ++ src/Tangential_complex/benchmark/benchmark_tc.cpp | 785 +++++++ .../doc/Intro_tangential_complex.h | 119 + src/Tangential_complex/doc/tc_example_01.png | Bin 0 -> 20323 bytes src/Tangential_complex/doc/tc_example_02.png | Bin 0 -> 36017 bytes src/Tangential_complex/doc/tc_example_03.png | Bin 0 -> 62990 bytes src/Tangential_complex/doc/tc_example_05.png | Bin 0 -> 36032 bytes src/Tangential_complex/doc/tc_example_06.png | Bin 0 -> 37195 bytes src/Tangential_complex/doc/tc_example_07.png | Bin 0 -> 49399 bytes src/Tangential_complex/doc/tc_example_07_after.png | Bin 0 -> 50132 bytes .../doc/tc_example_07_before.png | Bin 0 -> 48898 bytes src/Tangential_complex/doc/tc_example_08.png | Bin 0 -> 63636 bytes src/Tangential_complex/doc/tc_example_09.png | Bin 0 -> 35453 bytes src/Tangential_complex/doc/tc_examples.png | Bin 0 -> 150776 bytes src/Tangential_complex/example/CMakeLists.txt | 30 + src/Tangential_complex/example/example_basic.cpp | 46 + .../example/example_with_perturb.cpp | 53 + .../include/gudhi/Tangential_complex.h | 2277 ++++++++++++++++++++ .../gudhi/Tangential_complex/Simplicial_complex.h | 539 +++++ .../include/gudhi/Tangential_complex/config.h | 44 + .../include/gudhi/Tangential_complex/utilities.h | 195 ++ src/Tangential_complex/test/CMakeLists.txt | 31 + .../test/test_tangential_complex.cpp | 70 + src/cmake/modules/GUDHI_user_version_target.txt | 2 +- src/common/doc/main_page.h | 21 + src/common/include/gudhi/Clock.h | 48 +- src/common/include/gudhi/Debug_utils.h | 2 + src/common/include/gudhi/console_color.h | 97 + src/common/include/gudhi/random_point_generators.h | 477 ++++ .../include/gudhi_patches/CGAL/Convex_hull.h | 56 + .../gudhi_patches/CGAL/Delaunay_triangulation.h | 933 ++++++++ src/common/include/gudhi_patches/CGAL/Epeck_d.h | 53 + src/common/include/gudhi_patches/CGAL/Epick_d.h | 71 + .../CGAL/IO/Triangulation_off_ostream.h | 320 +++ .../CGAL/NewKernel_d/Cartesian_LA_base.h | 177 ++ .../CGAL/NewKernel_d/Cartesian_LA_functors.h | 344 +++ .../CGAL/NewKernel_d/Cartesian_base.h | 40 + .../CGAL/NewKernel_d/Cartesian_change_FT.h | 117 + .../CGAL/NewKernel_d/Cartesian_complete.h | 33 + .../CGAL/NewKernel_d/Cartesian_filter_K.h | 79 + .../CGAL/NewKernel_d/Cartesian_filter_NT.h | 93 + .../CGAL/NewKernel_d/Cartesian_per_dimension.h | 33 + .../CGAL/NewKernel_d/Cartesian_static_filters.h | 95 + .../gudhi_patches/CGAL/NewKernel_d/Coaffine.h | 330 +++ .../CGAL/NewKernel_d/Define_kernel_types.h | 50 + .../CGAL/NewKernel_d/Dimension_base.h | 49 + .../CGAL/NewKernel_d/Filtered_predicate2.h | 137 ++ .../CGAL/NewKernel_d/KernelD_converter.h | 199 ++ .../CGAL/NewKernel_d/Kernel_2_interface.h | 104 + .../CGAL/NewKernel_d/Kernel_3_interface.h | 102 + .../CGAL/NewKernel_d/Kernel_d_interface.h | 298 +++ .../CGAL/NewKernel_d/Kernel_object_converter.h | 134 ++ .../gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h | 175 ++ .../CGAL/NewKernel_d/LA_eigen/constructors.h | 162 ++ .../CGAL/NewKernel_d/Lazy_cartesian.h | 188 ++ .../CGAL/NewKernel_d/Types/Aff_transformation.h | 59 + .../CGAL/NewKernel_d/Types/Hyperplane.h | 159 ++ .../gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h | 88 + .../gudhi_patches/CGAL/NewKernel_d/Types/Line.h | 66 + .../gudhi_patches/CGAL/NewKernel_d/Types/Ray.h | 66 + .../gudhi_patches/CGAL/NewKernel_d/Types/Segment.h | 121 ++ .../gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h | 132 ++ .../CGAL/NewKernel_d/Types/Weighted_point.h | 205 ++ .../gudhi_patches/CGAL/NewKernel_d/Vector/array.h | 165 ++ .../gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h | 213 ++ ...f_iterator_to_points_from_iterator_to_vectors.h | 76 + ...determinant_of_iterator_to_points_from_points.h | 211 ++ ...terminant_of_iterator_to_vectors_from_vectors.h | 201 ++ .../Vector/determinant_of_points_from_vectors.h | 164 ++ .../Vector/determinant_of_vectors_small_dim.h | 58 + .../determinant_of_vectors_small_dim_internal.h | 164 ++ .../gudhi_patches/CGAL/NewKernel_d/Vector/mix.h | 46 + .../gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h | 145 ++ .../gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h | 181 ++ .../gudhi_patches/CGAL/NewKernel_d/Vector/vector.h | 167 ++ .../CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h | 305 +++ .../CGAL/NewKernel_d/Wrapper/Hyperplane_d.h | 131 ++ .../CGAL/NewKernel_d/Wrapper/Point_d.h | 284 +++ .../CGAL/NewKernel_d/Wrapper/Ref_count_obj.h | 120 ++ .../CGAL/NewKernel_d/Wrapper/Segment_d.h | 133 ++ .../CGAL/NewKernel_d/Wrapper/Sphere_d.h | 130 ++ .../CGAL/NewKernel_d/Wrapper/Vector_d.h | 266 +++ .../CGAL/NewKernel_d/Wrapper/Weighted_point_d.h | 129 ++ .../CGAL/NewKernel_d/function_objects_cartesian.h | 1355 ++++++++++++ .../CGAL/NewKernel_d/functor_properties.h | 40 + .../gudhi_patches/CGAL/NewKernel_d/functor_tags.h | 363 ++++ .../gudhi_patches/CGAL/NewKernel_d/static_int.h | 61 + .../gudhi_patches/CGAL/NewKernel_d/store_kernel.h | 104 + .../include/gudhi_patches/CGAL/NewKernel_d/utils.h | 306 +++ .../gudhi_patches/CGAL/Regular_triangulation.h | 1169 ++++++++++ .../CGAL/Regular_triangulation_traits_adapter.h | 288 +++ .../CGAL/TDS_full_cell_default_storage_policy.h | 99 + .../CGAL/TDS_full_cell_mirror_storage_policy.h | 71 + .../include/gudhi_patches/CGAL/Triangulation.h | 1424 ++++++++++++ .../CGAL/Triangulation_data_structure.h | 1603 ++++++++++++++ .../CGAL/Triangulation_ds_full_cell.h | 311 +++ .../gudhi_patches/CGAL/Triangulation_ds_vertex.h | 154 ++ .../gudhi_patches/CGAL/Triangulation_face.h | 111 + .../gudhi_patches/CGAL/Triangulation_full_cell.h | 148 ++ .../gudhi_patches/CGAL/Triangulation_vertex.h | 128 ++ .../include/gudhi_patches/CGAL/argument_swaps.h | 88 + .../gudhi_patches/CGAL/determinant_of_vectors.h | 117 + .../CGAL/internal/Combination_enumerator.h | 148 ++ .../CGAL/internal/Static_or_dynamic_array.h | 116 + .../CGAL/internal/Triangulation/Dummy_TDS.h | 49 + .../Triangulation/Triangulation_ds_iterators.h | 154 ++ .../CGAL/internal/Triangulation/utilities.h | 154 ++ .../gudhi_patches/CGAL/iterator_from_indices.h | 75 + .../gudhi_patches/CGAL/transforming_iterator.h | 123 ++ .../CGAL/transforming_pair_iterator.h | 127 ++ src/common/include/gudhi_patches/CGAL/typeset.h | 117 + 120 files changed, 23157 insertions(+), 26 deletions(-) create mode 100644 src/Tangential_complex/benchmark/CMakeLists.txt create mode 100644 src/Tangential_complex/benchmark/RIB_exporter.h create mode 100644 src/Tangential_complex/benchmark/XML_exporter.h create mode 100644 src/Tangential_complex/benchmark/benchmark_script.txt create mode 100644 src/Tangential_complex/benchmark/benchmark_tc.cpp create mode 100644 src/Tangential_complex/doc/Intro_tangential_complex.h create mode 100644 src/Tangential_complex/doc/tc_example_01.png create mode 100644 src/Tangential_complex/doc/tc_example_02.png create mode 100644 src/Tangential_complex/doc/tc_example_03.png create mode 100644 src/Tangential_complex/doc/tc_example_05.png create mode 100644 src/Tangential_complex/doc/tc_example_06.png create mode 100644 src/Tangential_complex/doc/tc_example_07.png create mode 100644 src/Tangential_complex/doc/tc_example_07_after.png create mode 100644 src/Tangential_complex/doc/tc_example_07_before.png create mode 100644 src/Tangential_complex/doc/tc_example_08.png create mode 100644 src/Tangential_complex/doc/tc_example_09.png create mode 100644 src/Tangential_complex/doc/tc_examples.png create mode 100644 src/Tangential_complex/example/CMakeLists.txt create mode 100644 src/Tangential_complex/example/example_basic.cpp create mode 100644 src/Tangential_complex/example/example_with_perturb.cpp create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex.h create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex/config.h create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h create mode 100644 src/Tangential_complex/test/CMakeLists.txt create mode 100644 src/Tangential_complex/test/test_tangential_complex.cpp create mode 100644 src/common/include/gudhi/console_color.h create mode 100644 src/common/include/gudhi/random_point_generators.h create mode 100644 src/common/include/gudhi_patches/CGAL/Convex_hull.h create mode 100644 src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h create mode 100644 src/common/include/gudhi_patches/CGAL/Epeck_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/Epick_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Define_kernel_types.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Dimension_base.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Filtered_predicate2.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h create mode 100644 src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h create mode 100644 src/common/include/gudhi_patches/CGAL/Regular_triangulation.h create mode 100644 src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h create mode 100644 src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h create mode 100644 src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_face.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h create mode 100644 src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h create mode 100644 src/common/include/gudhi_patches/CGAL/argument_swaps.h create mode 100644 src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h create mode 100644 src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h create mode 100644 src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h create mode 100644 src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h create mode 100644 src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h create mode 100644 src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h create mode 100644 src/common/include/gudhi_patches/CGAL/iterator_from_indices.h create mode 100644 src/common/include/gudhi_patches/CGAL/transforming_iterator.h create mode 100644 src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h create mode 100644 src/common/include/gudhi_patches/CGAL/typeset.h (limited to 'src/common') diff --git a/CMakeLists.txt b/CMakeLists.txt index d49e0ce5..005df6d7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -8,7 +8,7 @@ configure_file(GUDHIVersion.cmake.in "${CMAKE_SOURCE_DIR}/GUDHIVersion.cmake" @O find_package(Boost REQUIRED COMPONENTS system filesystem unit_test_framework chrono timer date_time program_options thread REQUIRED) if(NOT Boost_FOUND) - message(FATAL_ERROR "NOTICE: This demo requires Boost and will not be compiled.") + message(FATAL_ERROR "NOTICE: This program requires Boost and will not be compiled.") else() set(CMAKE_PREFIX_PATH "${CMAKE_SOURCE_DIR}/src/cmake/modules/") @@ -49,6 +49,10 @@ else() if(CGAL_FOUND) message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) + + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + include_directories(BEFORE "src/common/include/gudhi_patches") + endif() endif() if(MSVC) @@ -127,6 +131,7 @@ else() include_directories(src/Skeleton_blocker/include/) include_directories(src/Spatial_searching/include/) include_directories(src/Subsampling/include/) + include_directories(src/Tangential_complex/include/) include_directories(src/Witness_complex/include/) add_subdirectory(src/common/example) @@ -148,6 +153,9 @@ else() add_subdirectory(src/Spatial_searching/test) add_subdirectory(src/Subsampling/example) add_subdirectory(src/Subsampling/test) + add_subdirectory(src/Tangential_complex/example) + add_subdirectory(src/Tangential_complex/test) + add_subdirectory(src/Tangential_complex/benchmark) # data points generator add_subdirectory(data/points/generator) diff --git a/biblio/bibliography.bib b/biblio/bibliography.bib index 9fc01a5d..4940ec78 100644 --- a/biblio/bibliography.bib +++ b/biblio/bibliography.bib @@ -306,6 +306,21 @@ language={English}, bibsource = {DBLP, http://dblp.uni-trier.de} } +%------------------------------------------------------------------ +@article{tangentialcomplex2014, +author="Boissonnat, Jean-Daniel and Ghosh, Arijit", +title="Manifold Reconstruction Using Tangential Delaunay Complexes", +journal="Discrete {\&} Computational Geometry", +year="2014", +volume="51", +number="1", +pages="221--267", +abstract="We give a provably correct algorithm to reconstruct a k-dimensional smooth manifold embedded in d-dimensional Euclidean space. The input to our algorithm is a point sample coming from an unknown manifold. Our approach is based on two main ideas: the notion of tangential Delaunay complex defined in Boissonnat and Fl{\"o}totto (Comput. Aided Des. 36:161--174, 2004), Fl{\"o}totto (A coordinate system associated to a point cloud issued from a manifold: definition, properties and applications. Ph.D. thesis, 2003), Freedman (IEEE Trans. Pattern Anal. Mach. Intell. 24(10), 2002), and the technique of sliver removal by weighting the sample points (Cheng et al. in J. ACM 47:883--904, 2000). Differently from previous methods, we do not construct any subdivision of the d-dimensional ambient space. As a result, the running time of our algorithm depends only linearly on the extrinsic dimension d while it depends quadratically on the size of the input sample, and exponentially on the intrinsic dimension k. To the best of our knowledge, this is the first certified algorithm for manifold reconstruction whose complexity depends linearly on the ambient dimension. We also prove that for a dense enough sample the output of our algorithm is isotopic to the manifold and a close geometric approximation of the manifold.", +issn="1432-0444", +doi="10.1007/s00454-013-9557-2", +url="http://dx.doi.org/10.1007/s00454-013-9557-2" +} + %BOOKS %------------------------------------------------------------------ @book{DBLP:tibkat_237559129, diff --git a/biblio/how_to_cite_gudhi.bib b/biblio/how_to_cite_gudhi.bib index 9a143487..7e1eac4f 100644 --- a/biblio/how_to_cite_gudhi.bib +++ b/biblio/how_to_cite_gudhi.bib @@ -85,4 +85,13 @@ , booktitle = "{GUDHI} User and Reference Manual" , url = "http://gudhi.gforge.inria.fr/doc/latest/group__spatial__searching.html" , year = 2016 +} + +@incollection{gudhi:Tangential complex +, author = "Cl\'ement Jamin" +, title = "Tangential complex" +, publisher = "{GUDHI Editorial Board}" +, booktitle = "{GUDHI} User and Reference Manual" +, url = "http://gudhi.gforge.inria.fr/doc/latest/group__tangential__complex.html" +, year = 2016 } \ No newline at end of file diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 3a831814..e26b2d25 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -12,7 +12,7 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/modules/") find_package(Boost REQUIRED COMPONENTS system filesystem program_options chrono timer date_time REQUIRED) if(NOT Boost_FOUND) - message(FATAL_ERROR "NOTICE: This demo requires Boost and will not be compiled.") + message(FATAL_ERROR "NOTICE: This program requires Boost and will not be compiled.") else() # For "make doxygen" @@ -47,6 +47,10 @@ else() if(CGAL_FOUND) message(STATUS "CGAL version: ${CGAL_VERSION}.") include( ${CGAL_USE_FILE} ) + + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + include_directories(BEFORE "include/gudhi_patches") + endif() endif() if(MSVC) @@ -110,6 +114,7 @@ else() add_subdirectory(example/Alpha_complex) add_subdirectory(example/Spatial_searching) add_subdirectory(example/Subsampling) + add_subdirectory(example/Tangential_complex) # data points generator add_subdirectory(data/points/generator) diff --git a/src/Doxyfile b/src/Doxyfile index 51950b3d..943869ad 100644 --- a/src/Doxyfile +++ b/src/Doxyfile @@ -847,7 +847,8 @@ IMAGE_PATH = doc/Skeleton_blocker/ \ doc/Witness_complex/ \ doc/Bitmap_cubical_complex/ \ doc/Subsampling/ \ - doc/Spatial_searching/ + doc/Spatial_searching/ \ + doc/Tangential_complex/ # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program diff --git a/src/GudhUI/model/Model.h b/src/GudhUI/model/Model.h index 77e37b6c..fc284cc6 100644 --- a/src/GudhUI/model/Model.h +++ b/src/GudhUI/model/Model.h @@ -187,7 +187,7 @@ class Model { } void contract_edges(unsigned num_contractions) { - Clock c; + Gudhi::Clock c; Edge_contractor contractor(complex_, num_contractions); std::cout << "Time to simplify: " << c.num_seconds() << "s" << std::endl; } @@ -248,7 +248,7 @@ class Model { unsigned num_simplices = 0; int euler = 0; int dimension = 0; - Clock clock; + Gudhi::Clock clock; for (const auto &s : complex_.complex_simplex_range()) { num_simplices++; dimension = (std::max)(s.dimension(), dimension); @@ -271,7 +271,7 @@ class Model { #ifdef _WIN32 std::cout << "Works only on linux x64 for the moment\n"; #else - Clock clock; + Gudhi::Clock clock; run_chomp(); clock.end(); #endif diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt new file mode 100644 index 00000000..12488201 --- /dev/null +++ b/src/Tangential_complex/benchmark/CMakeLists.txt @@ -0,0 +1,40 @@ +cmake_minimum_required(VERSION 2.6) +project(Tangential_complex_benchmark) + +if (GCOVR_PATH) + # for gcovr to make coverage reports - Corbera Jenkins plugin + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage") +endif() +if (GPROF_PATH) + # for gprof to make coverage reports - Jenkins + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") +endif() + +# need CGAL 4.8 +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + + add_executable(Tangential_complex_benchmark benchmark_tc.cpp) + target_link_libraries(Tangential_complex_benchmark + ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES}) + endif() + + # Do not forget to copy test files in current binary dir + #file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Tangential complex feature.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential complex feature. Version 4.8.0 is required.") + endif () +endif() + diff --git a/src/Tangential_complex/benchmark/RIB_exporter.h b/src/Tangential_complex/benchmark/RIB_exporter.h new file mode 100644 index 00000000..73c14041 --- /dev/null +++ b/src/Tangential_complex/benchmark/RIB_exporter.h @@ -0,0 +1,269 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef GUDHI_TC_RIB_EXPORTER_H +#define GUDHI_TC_RIB_EXPORTER_H + +#include + +#include +#include + +template +class RIB_exporter { + typedef typename PointRandomAccessRange::value_type Point; + typedef typename SimplexRange::value_type Simplex; + public: + + typedef std::tuple Color; // RGBA + typedef std::tuple Coords_choice; + + // Constructor + RIB_exporter( + PointRandomAccessRange const& points, + SimplexRange const& simplices, + std::ofstream &out, + std::string const& rendered_image_filename = "export.tif", + bool is_preview = false, // low-quality + Coords_choice coords_choice = std::make_tuple(0, 1, 2), + int image_width = 1920, + int image_height = 1080, + Color const& triangle_color = std::make_tuple(1., 1., 1., 1.), + bool ambient_light = true, + double ambient_intensity = 0.3, + bool shadow = true, + double shadow_intensity = 0.85, + double point_sphere_radius = 0.003) + : m_points(points), + m_simplices(simplices), + m_out(out), + m_rendered_image_filename(rendered_image_filename), + m_is_preview(is_preview), + m_coords_choice(coords_choice), + m_image_width(image_width), + m_image_height(image_height), + m_current_color(0., 0., 0., 0.), + m_current_alpha(1), + m_triangle_color(triangle_color), + m_ambient_light(ambient_light), + m_ambient_intensity(ambient_intensity), + m_shadow(shadow), + m_shadow_intensity(shadow_intensity), + m_point_sphere_radius(point_sphere_radius) { + m_out.precision(8); + } + + void write_file() { + write_header(); + write_lights(); + /*if (m_point_sphere_radius != 0.) + write_point_spheres();*/ + write_triangles(); + + m_out << "WorldEnd\n"; + } + + private: + + void write_header() { + m_out << "Option \"searchpath\" \"shader\" " + "\".:./shaders:%PIXIE_SHADERS%:%PIXIEHOME%/shaders\"\n"; + + if (m_is_preview) { + m_out << "Attribute \"visibility\" \"specular\" 1\n" + << "Attribute \"visibility\" \"transmission\" 1\n\n"; + } + + m_out << "Display \"" << m_rendered_image_filename << "\" \"file\" \"rgb\"\n"; + + if (!m_is_preview) { + m_out << "Format " << m_image_width << " " << m_image_height << " 1\n"; + } else { + double ratio = double(m_image_height) / double(m_image_width); + + int width = (ratio < 1.) ? 300 : int(300. / ratio); + int height = (ratio < 1.) ? int(ratio * 300.) : 300; + + m_out << "Format " << width << " " << height << " 1\n"; + } + + + if (m_image_width > m_image_height) { + double ratio = double(m_image_height) / double(m_image_width); + m_out << "ScreenWindow -1 1 " << -ratio << " " << ratio << "\n"; + } else if (m_image_height > m_image_width) { + double ratio = double(m_image_width) / double(m_image_height); + m_out << "ScreenWindow " << -ratio << " " << ratio << " -1 1\n"; + } + + m_out << "Projection \"perspective\" \"fov\" 45\n" + << "Translate 0 0 3\n" + << "PixelSamples 4 4\n" + << "PixelFilter \"catmull-rom\" 3 3\n" + << "ShadingInterpolation \"smooth\"\n" + << "Rotate -10 20 0 1\n" + << "WorldBegin\n"; + } + + void write_lights() { + if (!m_is_preview) { + // ShadowLight + m_out << "LightSource \"shadowdistant\" 1 \"from\" [0 0 0] \"to\" [0 0 1]" + << " \"shadowname\" \"raytrace\" \"intensity\" " + << m_shadow_intensity << "\n"; + + // Ambient light + m_out << "LightSource \"ambientlight\" 2 \"intensity\" " + << m_ambient_intensity << "\n"; + } else { + m_out << "LightSource \"distantLight\" 1 \"from\" [0 0 0] \"to\" [0 0 1]" + << " \"intensity\" " << m_shadow_intensity << "\n"; + + // Ambient light + m_out << "LightSource \"ambientlight\" 2 \"intensity\" " + << m_ambient_intensity << "\n"; + } + + // Background light + m_out << "LightSource \"ambientlight\" 99 \"intensity\" 1\n"; + + // Turn background light OFF + turn_background_light(false); + } + + void turn_background_light(bool turn_on) { + if (!turn_on) { + m_out << "Illuminate 1 1" << std::endl; + if (!m_is_preview) + m_out << "Illuminate 2 1" << std::endl; + m_out << "Illuminate 99 0" << std::endl; + } else { + m_out << "Illuminate 1 0" << std::endl; + if (!m_is_preview) + m_out << "Illuminate 2 0" << std::endl; + m_out << "Illuminate 99 1" << std::endl; + } + } + + void write_color(Color const& color, bool use_transparency) { + if (m_current_color == color) + return; + + m_current_color = color; + + // Write opacity data + if (use_transparency) + write_opacity(std::get<3>(color)); + + // Write color data + m_out << "Color [ " << std::get<0>(color) << " " << std::get<1>(color) + << " " << std::get<2>(color) << " ]\n"; + } + + void write_opacity(const double alpha) { + if (m_current_alpha == alpha) + return; + + m_current_alpha = alpha; + + // Write opacity data + m_out << "Opacity " << alpha << " " << alpha << " " << alpha << std::endl; + } + + void write_point(Point const& p) { + m_out << " " << p[std::get<0>(m_coords_choice)] + << " " << p[std::get<1>(m_coords_choice)] + << " " << p[std::get<2>(m_coords_choice)] << " "; + } + + void write_triangles() { + m_out << "Surface \"plastic\" \"Ka\" 0.65 \"Kd\" 0.85 \"Ks\" 0.25 \"roughness\" 0.1" << std::endl; + + for (auto const& simplex : m_simplices) { + std::vector triangles; + // Get the triangles composing the simplex + combinations(simplex, 3, std::back_inserter(triangles)); + for (auto const& t : triangles) + write_triangle(t); + } + } + + template + void write_triangle(PointIndexRange const& t) { + // Color + write_color(m_triangle_color, true); + + // Triangle + m_out << "Polygon \"P\" ["; + for (auto idx : t) + write_point(m_points[idx]); + m_out << "]" << std::endl; + + // Edges (will be drawn later on) + /*add_edge(p, q, edge_color); + add_edge(p, r, edge_color); + add_edge(q, r, edge_color); + + // Vertices (will be drawn later on) + add_vertex(p, edge_color); + add_vertex(q, edge_color); + add_vertex(r, edge_color);*/ + } + + void write_point_sphere(Point const& p) { + if (m_point_sphere_radius == 0.) + return; + + m_out << "Translate " << p[0] << " " << p[1] << " " << p[2] << std::endl; + // Sphere radius zmin zmax thetamax + m_out << "Sphere " << m_point_sphere_radius << " " << -m_point_sphere_radius + << " " << m_point_sphere_radius << " 360" << std::endl; + m_out << "Identity" << std::endl; + } + + void write_point_spheres() { + write_color(std::make_tuple(0.7, 0.7, 0.7, 0.5), true); + for (auto const& p : m_points) + write_point_sphere(p); + } + + //=========================================================================== + + PointRandomAccessRange const& m_points; + SimplexRange const& m_simplices; + std::ofstream &m_out; + std::string m_rendered_image_filename; + bool m_is_preview; + Coords_choice m_coords_choice; + int m_image_width; + int m_image_height; + Color m_current_color; + Color m_triangle_color; + double m_current_alpha; + bool m_ambient_light; + double m_ambient_intensity; + bool m_shadow; + double m_shadow_intensity; + double m_point_sphere_radius; +}; + +#endif // GUDHI_TC_RIB_EXPORTER_H diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h new file mode 100644 index 00000000..ed44f90a --- /dev/null +++ b/src/Tangential_complex/benchmark/XML_exporter.h @@ -0,0 +1,207 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include +#include +#include + +template +class Simple_XML_exporter { + public: + typedef value_type Value_type; + typedef std::vector Element; + typedef std::map Element_with_map; + typedef std::vector List_of_elements; + + Simple_XML_exporter( + const std::string &list_name, + const std::string &element_name, + const std::vector &subelement_names, + bool add_timestamp = true) + : m_list_name(list_name), + m_element_name(element_name), + m_subelement_names(subelement_names), + m_add_timestamp(add_timestamp) { } + + bool add_element(const Element &element) { + if (element.size() == m_subelement_names.size()) { + m_list_of_elements.push_back(element); + return true; + } else { + std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl; + return false; + } + } + + bool add_element(Element_with_map &element) { + Element elt; + + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) { + elt.push_back(element[*it_subelement_name]); + } + + return add_element(elt); + } + + bool export_to_xml(const std::string &filename) const { + std::ofstream xmlfile; + xmlfile.open(filename.c_str()); + xmlfile << "" << std::endl; + xmlfile << "<" << m_list_name << ">" << std::endl; + + typename List_of_elements::const_iterator it_element = m_list_of_elements.begin(); + typename List_of_elements::const_iterator it_element_end = m_list_of_elements.end(); + for (int id = 1; it_element != it_element_end; ++it_element, ++id) { + xmlfile << " <" << m_element_name << ">" << std::endl; + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + + if (m_add_timestamp) + xmlfile << " " << time(NULL) << " " << std::endl; + + for (int i = 0; + it_subelement_name != it_subelement_name_end; + ++it_subelement_name, ++i) { + xmlfile + << " <" << *it_subelement_name << "> " + << (*it_element)[i] + << " " << std::endl; + } + xmlfile << " " << std::endl; + } + + xmlfile << "" << std::endl; + xmlfile.close(); + return 0; + + } + + protected: + std::string m_list_name; + std::string m_element_name; + std::vector m_subelement_names; + List_of_elements m_list_of_elements; + bool m_add_timestamp; +}; + +template +class Streaming_XML_exporter { + public: + typedef value_type Value_type; + typedef std::vector Element; + typedef std::map Element_with_map; + typedef std::vector List_of_elements; + + Streaming_XML_exporter( + const std::string &filename, + const std::string &list_name, + const std::string &element_name, + const std::vector &subelement_names, + bool add_timestamp = true) + : m_list_name(list_name), + m_element_name(element_name), + m_subelement_names(subelement_names), + m_add_timestamp(add_timestamp) { + m_xml_fstream.open(filename.c_str()); + if (m_xml_fstream.good()) { + m_xml_fstream << "" << std::endl; + m_xml_fstream << "<" << m_list_name << ">" << std::endl; + } else { + std::cerr << "Could not open file '" << filename << "'." << std::endl; + } + } + + virtual ~Streaming_XML_exporter() { + close_file(); + } + + void close_file() { + m_xml_fstream.close(); + } + + bool add_element(const Element &element) { + if (element.size() == m_subelement_names.size()) { + m_xml_fstream << " <" << m_element_name << ">" << std::endl; + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + + if (m_add_timestamp) { + m_xml_fstream << " " << time(NULL) << " " << std::endl; + } + + for (int i = 0; + it_subelement_name != it_subelement_name_end; + ++it_subelement_name, ++i) { + m_xml_fstream + << " <" << *it_subelement_name << "> " + << element[i] + << " " << std::endl; + } + m_xml_fstream << " " << std::endl; + + // Save current pointer position + std::ofstream::streampos pos = m_xml_fstream.tellp(); + // Close the XML file (temporarily) so that the XML file is always correct + m_xml_fstream << "" << std::endl; + // Restore the pointer position so that the next "add_element" will overwrite + // the end of the file + m_xml_fstream.seekp(pos); + + m_xml_fstream.flush(); + return true; + } else { + std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl; + return false; + } + } + + bool add_element(Element_with_map &element) { + Element elt; + + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) { + elt.push_back(element[*it_subelement_name]); + } + + return add_element(elt); + } + + protected: + std::ofstream m_xml_fstream; + std::string m_list_name; + std::string m_element_name; + std::vector m_subelement_names; + bool m_add_timestamp; +}; diff --git a/src/Tangential_complex/benchmark/benchmark_script.txt b/src/Tangential_complex/benchmark/benchmark_script.txt new file mode 100644 index 00000000..f4ddaac3 --- /dev/null +++ b/src/Tangential_complex/benchmark/benchmark_script.txt @@ -0,0 +1,221 @@ +#--------------------------------------------------------------------------------------------------------------------------------------------------------- +# Input PARAM1 PARAM2 PARAM3 NUM_P AMB INTR SPARSITY MAX_PERTURB PERTURB ADD_HDIM COLLAPSE FIX_TIME_LIMIT NUM_ITERATIONS +#--------------------------------------------------------------------------------------------------------------------------------------------------------- + +#---------------------------------------------------------------- Alpha TC tests ------------------------------------------------------------------------ +#generate_sphere_d 1 0 - 8 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 6 2d with a perturb sometimes +#generate_sphere_d 1 0 - 50 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 49 1d +#generate_sphere_d 1 1 - 50 2 1 0.01 0.005 N Y N 3 1 #Noise => OK: 45 2d + 3 3d +#generate_torus_d N - - 15 2 1 0.01 0.05 N Y N 10 1 +#generate_sphere_d 0.302 0 - 8 3 2 0.01 0.005 N Y N 60 1 #No noise => OK: 7 3d with a perturb sometimes +#generate_sphere_d 0.302 0 - 50 3 2 0.01 0.005 N Y N 60 1 #No noise => no inconsitencies +#generate_sphere_d 0.302 3 - 50 3 2 0.01 0.005 N Y N 60 1 #Noise => OK: 90 2d + 3 3d +#generate_sphere_d 1 1 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 1% => OK: 3113 3d + 35 4d +#generate_sphere_d 1 2 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 2969 3d + 91 4d +#generate_sphere_d 1 2 - 5000 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 27905 3d + 2485 4d +#generate_sphere_d 0.302 2 - 300 2 1 0.01 0.005 N Y N 60 1 +#generate_torus_3D 2 1 N 200 3 2 0.01 0.05 N Y N 600 1 #OK: 1048 3d ~170s +#generate_torus_3D 2 1 N 2000 3 2 0.01 0.05 N Y N 600 1 #OK: 3545 2d + 27 3d ~35s +#generate_torus_d N 1 - 50 4 2 0.01 0.05 N Y N 3 1 #OK: 431 4d +#generate_torus_d N 1 - 500 4 2 0.01 0.05 N Y N 3 1 #OK: 881 2d + 37 3d +#generate_torus_d Y 1 - 250 4 2 0.01 0.05 N Y N 3 1 #OK: 80 d2 + 185 d3 +#generate_torus_d N - - 50 6 3 0.01 0.05 Y Y N 10 1 # +#generate_torus_d Y - - 700 6 3 0.01 0.05 Y Y N 100 1 #Grid +#generate_torus_d N - - 10000 6 3 0.01 0.05 Y Y N 30000 1 +#generate_moment_curve 0 1 - 10 3 1 0.01 0.005 N Y N 60 1 +#generate_two_spheres_d 3 4 - 500 3 2 0.01 0.05 N Y N 10 1 #OK: 320 2d + 1167 3d +#generate_klein_bottle_4D 40 15 - 500 4 2 0.01 0.2 N Y N 60 1 #OK: 901 d2 + 50 d3 + 1 d4 +#data/SO3_10000.xyz - - - 0 9 3 0.01 0.05 Y Y N 300 1 #Too long. Be careful with the memory consumption! +#data/buddha_100kv.xyz - - - 0 3 2 0.01 0.005 Y Y N 120 1 #Too long... +#data/fandisk.xyz - - - 0 3 2 0.01 0.005 Y Y N 5 1 #NOT OK: Tq & V do not intersect + +#---------------------------------------------------------- Spatial search benchmarking -------------------------------------------------------------- +#generate_torus_3D 2 1 Y 10000 3 2 0 0 Y N N 600 1 +#data/buddha_100kv.xyz - - - 0 3 2 0 0 N Y N 120 1 +#generate_torus_d N - - 10000 30 15 0 0 Y N N 3600 1 +#generate_torus_d N - - 100000 12 6 0 0 Y N N 3600 1 +#data/SO3_50000.xyz - - - 0 9 3 0 0 Y N N 60 1 +#data/Cy8.xyz - - - 0 24 2 0 0 N Y N 60 1 +#generate_sphere_d 0.5 - - 10000 2 1 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 3 2 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 4 3 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 5 4 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 6 5 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 7 6 0 0 N N Y 60 1 + +#---------------------------------------------------------- Very small cases for Debug mode -------------------------------------------------------------- +#generate_sphere_d 4 - - 20 3 2 0.05 0.025 Y N N 60 1 +generate_sphere_d 3 10 - 70 3 2 0.05 0.025 Y N N 60 1 +#generate_sphere_d 3 - - 1000 3 2 0.05 0.025 Y N N 60 1 +#generate_sphere_d 3 - - 10 4 3 0.05 0.025 Y N N 60 1 +#generate_sphere_d 3 - - 70 5 4 0.05 0.025 Y N N 60 1 +#generate_klein_bottle_4D 4 3 - 70 4 2 0.05 0.025 Y N N 3 1 +#generate_klein_bottle_variant_5D 4 3 - 70 5 2 0.05 0.025 Y N N 3 1 +#data/SO3_10000.xyz - - - 0 9 3 0.7 0.35 Y N N 60 1 +#generate_moment_curve 0 1 - 30 3 1 0.005 0.0025 Y N N 60 1 + +#------------------------------------------------------------------ From files -------------------------------------------------------------------------- +#data/SO3_50000.xyz - - - 0 9 3 0.05 0.05 Y N N 6000 1 +#data/SO3_10000.xyz - - - 0 9 3 0.1 0.1 Y N N 60000 1 +#data/cube3D_eps_0.1.xyz - - - 0 3 2 0.05 0.05 Y N N 3000 1 +#data/cube4D_eps_0.1.xyz - - - 0 4 3 0.05 0.05 N Y N 3000 1 +#data/cube5D_eps_0.1.xyz - - - 0 5 4 0.05 0.05 N Y N 3000 1 +#data/Cy8.xyz - - - 0 24 2 0.1 0.1 N Y N 60 1 +#data/Kl.xyz - - - 0 5 2 0.05 0.05 N Y N 60 1 +#data/S3.xyz - - - 0 4 3 0.05 0.05 N Y N 60 1 + +#data/Alvarez_variete_k2_D4_29700p.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1 # points on a "grid" +#data/Alvarez_variete_k2_D4_10k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 200 1 +#data/Alvarez_variete_k2_D4_30k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 6000 1 +#data/Alvarez_variete_k2_D4_120k_2x2_denser_in_1x1.xyz - - - 0 4 2 0.002 0.002 Y N N 60000 1 +#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.005 0.005 Y N N 100000 1 +#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.05 0.05 Y N N 50000 1 # heavy sparsification (e.g. 0.05 => 33k points) +#data/Alvarez_variete_k2_D4_90k_2x2.xyz - - - 0 4 2 0.003 0.003 Y N N 6000 1 +#data/Alvarez_variete_k2_D4_30k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_variete_k2_D4_60k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 1800 1 + +#data/Alvarez_variete_k2_D8_9003p.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1 +#data/Alvarez_variete_k2_D8_90K.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1 +#data/Alvarez_variete_k2_D8_300k_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification +#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification +#data/Alvarez_variete_k2_D8_900k_10x10.xyz - - - 0 8 2 0.02 0.02 Y N N 60 1 # heavy sparsification + +#data/Alvarez_courbeElliptique_k2_D8_200K_2x2.xyz - - - 0 8 2 0.006 0.006 Y N N 60 1 + +#data/Alvarez_surface_deg2_k2_D8_6000K_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.003 Y N N 3600 1 +#data/Alvarez_surface_deg4_k2_D8_382K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg5_k2_D8_112K.xyz - - - 0 8 2 0.001 0.001 Y N N 240 1 +#data/Alvarez_surface_deg6_k2_D8_67K.xyz - - - 0 8 2 0.015 0.015 Y N N 60 1 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.025 Y N N 60 1 +#data/Alvarez_surface_deg9_k2_D8_42K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 + +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1 +#data/sparsified/Alvarez_deg8_k2_D8_32K_sparsified_from_41K_0.01.xyz - - - 0 8 2 0.05 0.05 Y N N 600 1 + +# PAS VRAIMENT DE DIFFERENCE +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.007 Y N N 3600 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.014 0.007 Y N N 3600 1 + +# PAS VRAIMENT DE DIFFERENCE +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.005 Y N N 120 1 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.005 Y N N 120 1 + +# PAS VRAIMENT DE DIFFERENCE +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.001 0.01 Y N N 3600 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.02 0.01 Y N N 3600 1 +#data/sparsified/Alvarez_deg3_k2_D8_534k_sparsified_from_902K_0.001.xyz - - - 0 8 2 0.01 0.01 Y N N 3600 1 + +# PAS TRES CLAIR, MAIS DIFFERENCE EN NOMBRE D'ETAPES (>100 vs 15-20) : +#data/sparsified/Alvarez_deg8_k2_D8_38K_sparsified_from_41K_0.005.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.001 0.02 Y N N 60 1 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.02 Y N N 60 1 + +# With pre-computed tangent spaces +#data/test.pwt - - - 0 4 2 0.01 0.01 N N N 500000 1 +#data/Alvarez_variete_k2_D4_30000p.xyz - - - 0 4 2 0.01 0.01 Y N N 500000 1 +#data/Alvarez_variete_k2_D4_30000p_with_TSB.pwt - - - 0 4 2 0.01 0.01 Y N N 500000 1 + +#---------------------------------------------------------------------- 3D meshes ----------------------------------------------------------------------- +#data/buddha_100kv.xyz - - - 0 3 2 0.005 0.005 Y N N 3 1 +#data/fandisk.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/fertility.xyz - - - 0 3 2 0.4 0.4 Y N N 3 1 +#data/bunny.xyz - - - 0 3 2 0.0006 0.0003 Y N N 3000 1 +#data/blob.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/3holes.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/785_hand_2500v.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/785_hand_50kv.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/bumpy_sphere.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#D:\INRIA\Data\_Models\Pointclouds\ajax_jotero.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#D:\INRIA\Data\_Models\Pointclouds\house.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#D:\INRIA\Data\_Models\Pointclouds\lucy_14M.xyz - - - 0 3 2 0.6 0.3 Y N N 3 1 + +#----------------------------------------------------------- Generated point sets ----------------------------------------------------------------------- +#generate_sphere_d 3 - - 4 3 2 0.05 0.05 Y N N 3000 1 +#generate_sphere_d 3 - - 30000 2 1 0.005 0.005 Y N N 3000 1 +#generate_sphere_d 1 - - 500000 3 2 0.005 0.005 Y N N 3000 1 +#generate_sphere_d 3 - - 30000 4 3 0.05 0.05 Y N N 3000 1 +#generate_sphere_d 3 0 - 300 3 2 0.005 0.005 Y N N 60 1 +#generate_sphere_d 3 4 - 3000 3 2 0.005 0.005 Y N N 60 1 +#generate_sphere_d 3 7 - 3000 3 2 0.005 0.005 Y N N 60 1 +#generate_torus_3D 2 1 N 300 3 2 0.05 0.05 Y N N 600 1 +#generate_torus_d N - - 200 4 2 0.05 0.05 Y N N 600 1 + +#generate_torus_d Y - - 100 6 3 0.1 0.19 Y N N 600 1 +#generate_torus_d Y - - 1000 6 3 0. 0.19 Y N N 600 1 +#generate_torus_d Y - - 10000 6 3 0. 0.19 Y N N 600 1 +#generate_torus_d Y - - 100000 6 3 0. 0.19 Y N N 600 1 +#generate_plane - - - 30000 3 2 0.005 0.005 Y N N 3000 1 +#generate_moment_curve 0 1 - 30000 6 1 0.005 0.005 Y N N 60 1 +#generate_klein_bottle_4D 4 3 - 700 4 2 0.05 0.05 Y N N 500 20 +#generate_klein_bottle_variant_5D 4 3 - 30000 5 2 0.05 0.05 Y N N 600 1 +#generate_klein_bottle_4D 8 5 - 5000 4 2 0.2 0.2 Y N N 60 1 #Takes forever +#data/sparsified/Flat_torus_195p_sparsified_0.05_from_200p.xyz N - - 0 4 2 -1 0.2 Y N N 600 1 + +#----------------------------------------------------------- Performance testing ------------------------------------------------------------------------ +# TC: 5.55 / 1st fix step : 0.2 +#data/fertility.xyz - - - 0 3 2 0.1 0.1 Y N N 10 1 + +#---------------------------------------------------------- 04/04/2016 - for stats ---------------------------------------------------------- + +#generate_torus_3D 2 1 N 5000 3 2 0.05 0.05 Y N N 120 1 +#generate_torus_d N - - 500 4 2 0.05 0.05 Y N N 120 1 +#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.005 0.005 Y N N 120 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.01 0.01 Y N N 120 1 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 10 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1 +#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1 +#generate_torus_d N - - 200000 6 3 0.05 0.05 Y N N 1200 1 + +#---------------------------------------------------------- 14/04/2016 - stats about noise ---------------------------------------------------------- + +#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 1 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 2 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 3 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 4 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 5 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 6 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 7 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 8 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 9 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 10 - 1000 4 2 0.05 0.19 Y N N 120 4 + +#generate_sphere_d 3 0 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 2 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 3 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 4 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 5 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 6 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 7 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 8 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 9 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 10 - 1000 4 3 0.05 0.05 Y N N 120 4 + +#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.01 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.02 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.03 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.04 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.05 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.06 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.07 5000 4 2 0.05 0.05 Y N N 120 4 + +#---------------------------------------------------------- 04/2016 - stats with different perturb techniques ---------------------------------------------------------- + +# Tangential translation +#data/SO3_50000.xyz - - - 0 9 3 0 0.05 Y N N 500 10 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10 +#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 10 +#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 10 +#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 10 + +# Weight +#data/SO3_50000.xyz - - - 0 9 3 0.1 0.05 Y N N 500 10 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10 +#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.025 Y N N 20000 10 +#generate_torus_d Y 0 - 1000 4 2 0.05 0.025 Y N N 120 10 +#generate_sphere_d 3 1 - 1000 4 3 0.05 0.025 Y N N 12000 10 \ No newline at end of file diff --git a/src/Tangential_complex/benchmark/benchmark_tc.cpp b/src/Tangential_complex/benchmark/benchmark_tc.cpp new file mode 100644 index 00000000..943fcb54 --- /dev/null +++ b/src/Tangential_complex/benchmark/benchmark_tc.cpp @@ -0,0 +1,785 @@ +/****************************************************************************** +This benchmark allows to compute the Tangential Complex from input files or +generated point sets. + +It reads the benchmark_script.txt file (located in the same folder as this +file) and compute one or several complexes for each line. Unless TC_NO_EXPORT +is defined, each complex is exported as an OFF file and/or as a RIB file +(RenderMan). In addition an XML file is created at each run of the benchmark. +It contains statistics about the complexes that were created. This XML file +can be processed in Excel, for example. + ******************************************************************************/ + +// Without TBB_USE_THREADING_TOOL Intel Inspector XE will report false positives in Intel TBB +// (http://software.intel.com/en-us/articles/compiler-settings-for-threading-error-analysis-in-intel-inspector-xe/) +#ifdef _DEBUG +#define TBB_USE_THREADING_TOOL +#endif + +#include + +//#define GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM +//#define TC_INPUT_STRIDES 3 // only take one point every TC_INPUT_STRIDES points +#define TC_NO_EXPORT // do not output OFF files +//#define TC_EXPORT_TO_RIB // +//#define GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET +//#define GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF + +const std::size_t ONLY_LOAD_THE_FIRST_N_POINTS = 20000000; + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include // for std::sqrt + +#ifdef GUDHI_USE_TBB +#include +#endif +#include "XML_exporter.h" +#include "RIB_exporter.h" +#define GUDHI_TC_EXPORT_PERFORMANCE_DATA +#define GUDHI_TC_SET_PERFORMANCE_DATA(value_name, value) \ + XML_perf_data::set(value_name, value); + + +namespace subsampl = Gudhi::subsampling; +namespace tc = Gudhi::tangential_complex; + +const char * const BENCHMARK_SCRIPT_FILENAME = "benchmark_script.txt"; + +typedef CGAL::Epick_d Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_d Point; +typedef Kernel::Vector_d Vector; +typedef tc::Tangential_complex< +Kernel, CGAL::Dynamic_dimension_tag, +CGAL::Parallel_tag> TC; +typedef TC::Simplex Simplex; +typedef TC::Simplex_set Simplex_set; + +class XML_perf_data { + public: + typedef Streaming_XML_exporter XML_exporter; + + XML_perf_data(const std::string &filename) + : m_xml(filename, "ContainerPerformance", "Perf", + construct_subelements_names()) { } + + virtual ~XML_perf_data() { } + + static XML_perf_data &get() { + static XML_perf_data singleton(build_filename()); + return singleton; + } + + template + static void set(const std::string &name, Value_type value) { + get().set_data(name, value); + } + + static void commit() { + get().commit_current_element(); + } + + protected: + + static std::string build_filename() { + std::stringstream sstr; + sstr << "perf_logs/Performance_log_" << time(0) << ".xml"; + return sstr.str(); + } + + static std::vector construct_subelements_names() { + std::vector subelements; + subelements.push_back("Input"); + subelements.push_back("Param1"); + subelements.push_back("Param2"); + subelements.push_back("Param3"); + subelements.push_back("Intrinsic_dim"); + subelements.push_back("Ambient_dim"); + subelements.push_back("Num_threads"); + subelements.push_back("Sparsity"); + subelements.push_back("Max_perturb"); + subelements.push_back("Num_points_in_input"); + subelements.push_back("Num_points"); + subelements.push_back("Perturb_technique"); + subelements.push_back("Perturb_which_points"); + subelements.push_back("Initial_num_inconsistent_local_tr"); + subelements.push_back("Best_num_inconsistent_local_tr"); + subelements.push_back("Final_num_inconsistent_local_tr"); + subelements.push_back("Init_time"); + subelements.push_back("Comput_time"); + subelements.push_back("Perturb_successful"); + subelements.push_back("Perturb_time"); + subelements.push_back("Perturb_steps"); + subelements.push_back("Result_pure_pseudomanifold"); + subelements.push_back("Result_num_wrong_dim_simplices"); + subelements.push_back("Result_num_wrong_number_of_cofaces"); + subelements.push_back("Result_num_unconnected_stars"); + subelements.push_back("Info"); + + return subelements; + } + + void set_data(const std::string &name, const std::string &value) { + m_current_element[name] = value; + } + + template + void set_data(const std::string &name, Value_type value) { + std::stringstream sstr; + sstr << value; + set_data(name, sstr.str()); + } + + void commit_current_element() { + m_xml.add_element(m_current_element); + m_current_element.clear(); + } + + XML_exporter m_xml; + XML_exporter::Element_with_map m_current_element; +}; + +template< +typename Kernel, typename OutputIteratorPoints> +bool load_points_from_file( + const std::string &filename, + OutputIteratorPoints points, + std::size_t only_first_n_points = std::numeric_limits::max()) { + typedef typename Kernel::Point_d Point; + + std::ifstream in(filename); + if (!in.is_open()) { + std::cerr << "Could not open '" << filename << "'" << std::endl; + return false; + } + + Kernel k; + Point p; + int num_ppints; + in >> num_ppints; + + std::size_t i = 0; + while (i < only_first_n_points && in >> p) { + *points++ = p; + ++i; + } + +#ifdef DEBUG_TRACES + std::cerr << "'" << filename << "' loaded." << std::endl; +#endif + + return true; +} + +template< +typename Kernel, typename Tangent_space_basis, +typename OutputIteratorPoints, typename OutputIteratorTS> +bool load_points_and_tangent_space_basis_from_file( + const std::string &filename, + OutputIteratorPoints points, + OutputIteratorTS tangent_spaces, + int intrinsic_dim, + std::size_t only_first_n_points = std::numeric_limits::max()) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::Vector_d Vector; + + std::ifstream in(filename); + if (!in.is_open()) { + std::cerr << "Could not open '" << filename << "'" << std::endl; + return false; + } + + Kernel k; + Point p; + int num_ppints; + in >> num_ppints; + + std::size_t i = 0; + while (i < only_first_n_points && in >> p) { + *points++ = p; + + Tangent_space_basis tsb(i); + for (int d = 0; d < intrinsic_dim; ++d) { + Vector v; + in >> v; + tsb.push_back(tc::internal::normalize_vector(v, k)); + } + *tangent_spaces++ = tsb; + ++i; + } + +#ifdef DEBUG_TRACES + std::cerr << "'" << filename << "' loaded." << std::endl; +#endif + + return true; +} + +// color_inconsistencies: only works if p_complex = NULL +template +bool export_to_off( + TC const& tc, + std::string const& input_name_stripped, + std::string const& suffix, + bool color_inconsistencies = false, + typename TC::Simplicial_complex const* p_complex = NULL, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL) { +#ifdef TC_NO_EXPORT + return true; +#endif + + CGAL::Identity proj_functor; + + if (tc.intrinsic_dimension() <= 3) { + std::stringstream output_filename; + output_filename << "output/" << input_name_stripped << "_" + << tc.intrinsic_dimension() << "_in_R" + << tc.ambient_dimension() << "_" + << tc.number_of_vertices() << "v" + << suffix << ".off"; + std::ofstream off_stream(output_filename.str().c_str()); + + if (p_complex) { +#ifndef TC_NO_EXPORT + tc.export_to_off( + *p_complex, off_stream, + p_simpl_to_color_in_red, + p_simpl_to_color_in_green, + p_simpl_to_color_in_blue, + proj_functor); +#endif + } else { + tc.export_to_off( + off_stream, color_inconsistencies, + p_simpl_to_color_in_red, + p_simpl_to_color_in_green, + p_simpl_to_color_in_blue, + NULL, + proj_functor); + } + return true; + } + return false; +} + +void make_tc(std::vector &points, + TC::TS_container const& tangent_spaces, // can be empty + int intrinsic_dim, + double sparsity = 0.01, + double max_perturb = 0.005, + bool perturb = true, + bool add_high_dim_simpl = false, + bool collapse = false, + double time_limit_for_perturb = 0., + const char *input_name = "tc") { + Kernel k; + + if (sparsity > 0. && !tangent_spaces.empty()) { + std::cerr << "Error: cannot sparsify point set with pre-computed normals.\n"; + return; + } + + //=========================================================================== + // Init + //=========================================================================== + Gudhi::Clock t; + + // Get input_name_stripped + std::string input_name_stripped(input_name); + size_t slash_index = input_name_stripped.find_last_of('/'); + if (slash_index == std::string::npos) + slash_index = input_name_stripped.find_last_of('\\'); + if (slash_index == std::string::npos) + slash_index = 0; + else + ++slash_index; + input_name_stripped = input_name_stripped.substr( + slash_index, input_name_stripped.find_last_of('.') - slash_index); + + GUDHI_TC_SET_PERFORMANCE_DATA("Num_points_in_input", points.size()); + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + std::vector points_not_sparse = points; +#endif + + //=========================================================================== + // Sparsify point set if requested + //=========================================================================== + if (sparsity > 0.) { + std::size_t num_points_before = points.size(); + std::vector sparsified_points; + subsampl::sparsify_point_set(k, points, sparsity*sparsity, + std::back_inserter(sparsified_points)); + sparsified_points.swap(points); + std::cerr << "Number of points before/after sparsification: " + << num_points_before << " / " << points.size() << "\n"; + +#ifdef GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET + std::ofstream ps_stream("output/sparsified_point_set.txt"); + tc::internal::export_point_set(k, points, ps_stream); +#endif + } + + GUDHI_TC_SET_PERFORMANCE_DATA("Sparsity", sparsity); + GUDHI_TC_SET_PERFORMANCE_DATA("Max_perturb", max_perturb); + GUDHI_TC_SET_PERFORMANCE_DATA("Num_points", points.size()); + + //=========================================================================== + // Compute Tangential Complex + //=========================================================================== + + TC tc( + points, + intrinsic_dim, +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + points_not_sparse.begin(), points_not_sparse.end(), +#endif + k); + + if (!tangent_spaces.empty()) { + tc.set_tangent_planes(tangent_spaces); + } + + t.end(); + double init_time = t.num_seconds(); + + t.begin(); + tc.compute_tangential_complex(); + t.end(); + double computation_time = t.num_seconds(); + + //=========================================================================== + // Export to OFF + //=========================================================================== + + // Create complex + int max_dim = -1; + TC::Simplicial_complex complex; + Simplex_set inconsistent_simplices; + max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices); + + // TODO(CJ): TEST + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree, true, false); + // std::cerr << stree; + + t.begin(); + bool ret = export_to_off( + tc, input_name_stripped, "_INITIAL_TC", true, + &complex, &inconsistent_simplices); + t.end(); + double export_before_time = (ret ? t.num_seconds() : -1); + + unsigned int num_perturb_steps = 0; + double perturb_time = -1; + double export_after_perturb_time = -1.; + bool perturb_success = false; + if (perturb) { + //========================================================================= + // Try to fix inconsistencies by perturbing points + //========================================================================= + t.begin(); + auto fix_result = + tc.fix_inconsistencies_using_perturbation(max_perturb, time_limit_for_perturb); + t.end(); + perturb_time = t.num_seconds(); + + perturb_success = fix_result.success; + GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr", + fix_result.initial_num_inconsistent_stars); + GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr", + fix_result.best_num_inconsistent_stars); + GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr", + fix_result.final_num_inconsistent_stars); + + //========================================================================= + // Export to OFF + //========================================================================= + + // Re-build the complex + Simplex_set inconsistent_simplices; + max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices); + + t.begin(); + bool exported = export_to_off( + tc, input_name_stripped, "_AFTER_FIX", true, &complex, + &inconsistent_simplices); + t.end(); + export_after_perturb_time = (exported ? t.num_seconds() : -1); + + //std::string fn = "output/inc_stars/"; + //fn += input_name_stripped; + //tc.export_inconsistent_stars_to_OFF_files(fn); + +#if !defined(TC_NO_EXPORT) && defined(TC_EXPORT_TO_RIB) + std::ofstream rib(std::string("output/") + input_name_stripped + ".rib"); + RIB_exporter rib_exporter( + tc.points(), + complex.simplex_range(), + rib, + input_name_stripped + ".tif", + false, // is_preview + std::make_tuple(2, 4, 6), + 1600, 503 // resolution + ); + rib_exporter.write_file(); + + std::ofstream rib_LQ(std::string("output/") + input_name_stripped + "_LQ.rib"); + RIB_exporter rib_exporter_LQ( + tc.points(), + complex.simplex_range(), + rib_LQ, + input_name_stripped + "_LQ.tif", + true, // is_preview + std::make_tuple(0, 4, 5) + ); + rib_exporter_LQ.write_file(); +#endif + } else { + GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr", "N/A"); + GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr", "N/A"); + GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr", "N/A"); + } + + max_dim = tc.create_complex(complex, true, false, 2); + + complex.display_stats(); + + if (intrinsic_dim == 2) + complex.euler_characteristic(true); + + //=========================================================================== + // Collapse + //=========================================================================== + if (collapse) { + complex.collapse(max_dim); + complex.display_stats(); + } + + //=========================================================================== + // Is the result a pure pseudomanifold? + //=========================================================================== + std::size_t num_wrong_dim_simplices, + num_wrong_number_of_cofaces, + num_unconnected_stars; + Simplex_set wrong_dim_simplices; + Simplex_set wrong_number_of_cofaces_simplices; + Simplex_set unconnected_stars_simplices; + bool is_pure_pseudomanifold = complex.is_pure_pseudomanifold( + intrinsic_dim, tc.number_of_vertices(), + false, // do NOT allow borders + false, 1, + &num_wrong_dim_simplices, &num_wrong_number_of_cofaces, + &num_unconnected_stars, + &wrong_dim_simplices, &wrong_number_of_cofaces_simplices, + &unconnected_stars_simplices); + + //=========================================================================== + // Export to OFF + //=========================================================================== + + double export_after_collapse_time = -1.; + if (collapse) { + t.begin(); + bool exported = export_to_off( + tc, input_name_stripped, "_AFTER_COLLAPSE", false, &complex, + &wrong_dim_simplices, &wrong_number_of_cofaces_simplices, + &unconnected_stars_simplices); + t.end(); + std::cerr + << " OFF colors:\n" + << " * Red: wrong dim simplices\n" + << " * Green: wrong number of cofaces simplices\n" + << " * Blue: not-connected stars\n"; + export_after_collapse_time = (exported ? t.num_seconds() : -1.); + } + + //=========================================================================== + // Display info + //=========================================================================== + + std::cerr + << "\n================================================\n" + << "Number of vertices: " << tc.number_of_vertices() << "\n" + << "Computation times (seconds): \n" + << " * Tangential complex: " << init_time + computation_time << "\n" + << " - Init + kd-tree = " << init_time << "\n" + << " - TC computation = " << computation_time << "\n" + << " * Export to OFF (before perturb): " << export_before_time << "\n" + << " * Fix inconsistencies 1: " << perturb_time + << " (" << num_perturb_steps << " steps) ==> " + << (perturb_success ? "FIXED" : "NOT fixed") << "\n" + << " * Export to OFF (after perturb): " << export_after_perturb_time << "\n" + << " * Export to OFF (after collapse): " + << export_after_collapse_time << "\n" + << "================================================\n"; + + //=========================================================================== + // Export info + //=========================================================================== + GUDHI_TC_SET_PERFORMANCE_DATA("Init_time", init_time); + GUDHI_TC_SET_PERFORMANCE_DATA("Comput_time", computation_time); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_successful", + (perturb_success ? 1 : 0)); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_time", perturb_time); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_steps", num_perturb_steps); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_pure_pseudomanifold", + (is_pure_pseudomanifold ? 1 : 0)); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_dim_simplices", + num_wrong_dim_simplices); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_number_of_cofaces", + num_wrong_number_of_cofaces); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_unconnected_stars", + num_unconnected_stars); + GUDHI_TC_SET_PERFORMANCE_DATA("Info", ""); +} + +int main() { + CGAL::set_error_behaviour(CGAL::ABORT); + +#ifdef GUDHI_USE_TBB +#ifdef _DEBUG + int num_threads = 1; +#else + int num_threads = tbb::task_scheduler_init::default_num_threads() - 4; +#endif +#endif + + unsigned int seed = static_cast (time(NULL)); + CGAL::default_random = CGAL::Random(seed); // TODO(CJ): use set_default_random + std::cerr << "Random seed = " << seed << "\n"; + + std::ifstream script_file; + script_file.open(BENCHMARK_SCRIPT_FILENAME); + // Script? + // Script file format: each line gives + // - Filename (point set) or "generate_XXX" (point set generation) + // - Ambient dim + // - Intrinsic dim + // - Number of iterations with these parameters + if (script_file.is_open()) { + int i = 1; +#ifdef GUDHI_USE_TBB +#ifdef BENCHMARK_WITH_1_TO_MAX_THREADS + for (num_threads = 1; + num_threads <= tbb::task_scheduler_init::default_num_threads(); + ++num_threads) +#endif +#endif + /*for (Concurrent_mesher_config::get().num_work_items_per_batch = 5 ; + Concurrent_mesher_config::get().num_work_items_per_batch < 100 ; + Concurrent_mesher_config::get().num_work_items_per_batch += 5)*/ { +#ifdef GUDHI_USE_TBB + tbb::task_scheduler_init init( + num_threads > 0 ? num_threads : tbb::task_scheduler_init::automatic); +#endif + + std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' found.\n"; + script_file.seekg(0); + while (script_file.good()) { + std::string line; + std::getline(script_file, line); + if (line.size() > 1 && line[0] != '#') { + boost::replace_all(line, "\t", " "); + boost::trim_all(line); + std::cerr << "\n\n"; + std::cerr << "*****************************************\n"; + std::cerr << "******* " << line << "\n"; + std::cerr << "*****************************************\n"; + std::stringstream sstr(line); + + std::string input; + std::string param1; + std::string param2; + std::string param3; + std::size_t num_points; + int ambient_dim; + int intrinsic_dim; + double sparsity; + double max_perturb; + char perturb, add_high_dim_simpl, collapse; + double time_limit_for_perturb; + int num_iteration; + sstr >> input; + sstr >> param1; + sstr >> param2; + sstr >> param3; + sstr >> num_points; + sstr >> ambient_dim; + sstr >> intrinsic_dim; + sstr >> sparsity; + sstr >> max_perturb; + sstr >> perturb; + sstr >> add_high_dim_simpl; + sstr >> collapse; + sstr >> time_limit_for_perturb; + sstr >> num_iteration; + + for (int j = 0; j < num_iteration; ++j) { + std::string input_stripped = input; + size_t slash_index = input_stripped.find_last_of('/'); + if (slash_index == std::string::npos) + slash_index = input_stripped.find_last_of('\\'); + if (slash_index == std::string::npos) + slash_index = 0; + else + ++slash_index; + input_stripped = input_stripped.substr( + slash_index, input_stripped.find_last_of('.') - slash_index); + + GUDHI_TC_SET_PERFORMANCE_DATA("Input", input_stripped); + GUDHI_TC_SET_PERFORMANCE_DATA("Param1", param1); + GUDHI_TC_SET_PERFORMANCE_DATA("Param2", param2); + GUDHI_TC_SET_PERFORMANCE_DATA("Param3", param3); + GUDHI_TC_SET_PERFORMANCE_DATA("Ambient_dim", ambient_dim); + GUDHI_TC_SET_PERFORMANCE_DATA("Intrinsic_dim", intrinsic_dim); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_technique", "Tangential_translation"); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_which_points", "Center_vertex"); + +#ifdef GUDHI_USE_TBB + GUDHI_TC_SET_PERFORMANCE_DATA( + "Num_threads", + (num_threads == -1 ? tbb::task_scheduler_init::default_num_threads() : num_threads)); +#else + GUDHI_TC_SET_PERFORMANCE_DATA("Num_threads", "N/A"); +#endif + + std::cerr << "\nTC #" << i << "...\n"; + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t_gen; +#endif + + std::vector points; + TC::TS_container tangent_spaces; + + if (input == "generate_moment_curve") { + points = Gudhi::generate_points_on_moment_curve( + num_points, ambient_dim, + std::atof(param1.c_str()), std::atof(param2.c_str())); + } else if (input == "generate_plane") { + points = Gudhi::generate_points_on_plane( + num_points, intrinsic_dim, ambient_dim); + } else if (input == "generate_sphere_d") { + points = Gudhi::generate_points_on_sphere_d( + num_points, ambient_dim, + std::atof(param1.c_str()), // radius + std::atof(param2.c_str())); // radius_noise_percentage + } else if (input == "generate_two_spheres_d") { + points = Gudhi::generate_points_on_two_spheres_d( + num_points, ambient_dim, + std::atof(param1.c_str()), + std::atof(param2.c_str()), + std::atof(param3.c_str())); + } else if (input == "generate_3sphere_and_circle_d") { + GUDHI_CHECK(intrinsic_dim == 3, + std::logic_error("Intrinsic dim should be 3")); + GUDHI_CHECK(ambient_dim == 5, + std::logic_error("Ambient dim should be 5")); + points = Gudhi::generate_points_on_3sphere_and_circle( + num_points, + std::atof(param1.c_str())); + } else if (input == "generate_torus_3D") { + points = Gudhi::generate_points_on_torus_3D( + num_points, + std::atof(param1.c_str()), + std::atof(param2.c_str()), + param3 == "Y"); + } else if (input == "generate_torus_d") { + points = Gudhi::generate_points_on_torus_d( + num_points, + intrinsic_dim, + param1 == "Y", // uniform + std::atof(param2.c_str())); // radius_noise_percentage + } else if (input == "generate_klein_bottle_3D") { + points = Gudhi::generate_points_on_klein_bottle_3D( + num_points, + std::atof(param1.c_str()), std::atof(param2.c_str())); + } else if (input == "generate_klein_bottle_4D") { + points = Gudhi::generate_points_on_klein_bottle_4D( + num_points, + std::atof(param1.c_str()), std::atof(param2.c_str()), + std::atof(param3.c_str())); // noise + } else if (input == "generate_klein_bottle_variant_5D") { + points = Gudhi::generate_points_on_klein_bottle_variant_5D( + num_points, + std::atof(param1.c_str()), std::atof(param2.c_str())); + } else { + // Contains tangent space basis + if (input.substr(input.size() - 3) == "pwt") { + load_points_and_tangent_space_basis_from_file + ( + input, std::back_inserter(points), + std::back_inserter(tangent_spaces), + intrinsic_dim, + ONLY_LOAD_THE_FIRST_N_POINTS); + } else { + load_points_from_file( + input, std::back_inserter(points), + ONLY_LOAD_THE_FIRST_N_POINTS); + } + } + +#ifdef GUDHI_TC_PROFILING + t_gen.end(); + std::cerr << "Point set generated/loaded in " << t_gen.num_seconds() + << " seconds.\n"; +#endif + + if (!points.empty()) { +#if defined(TC_INPUT_STRIDES) && TC_INPUT_STRIDES > 1 + auto p = points | boost::adaptors::strided(TC_INPUT_STRIDES); + std::vector points(p.begin(), p.end()); + std::cerr << "****************************************\n" + << "WARNING: taking 1 point every " << TC_INPUT_STRIDES + << " points.\n" + << "****************************************\n"; +#endif + + make_tc(points, tangent_spaces, intrinsic_dim, + sparsity, max_perturb, + perturb == 'Y', add_high_dim_simpl == 'Y', collapse == 'Y', + time_limit_for_perturb, input.c_str()); + + std::cerr << "TC #" << i++ << " done.\n"; + std::cerr << "\n---------------------------------\n"; + } else { + std::cerr << "TC #" << i++ << ": no points loaded.\n"; + } + + XML_perf_data::commit(); + } + } + } + script_file.seekg(0); + script_file.clear(); + } + + script_file.close(); + } // Or not script? + else { + std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' NOT found.\n"; + } + + // system("pause"); + return 0; +} diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h new file mode 100644 index 00000000..3d687c1d --- /dev/null +++ b/src/Tangential_complex/doc/Intro_tangential_complex.h @@ -0,0 +1,119 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ +#define DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ + +// needs namespaces for Doxygen to link on classes +namespace Gudhi { +namespace tangential_complex { + +/** \defgroup tangential_complex Tangential complex + +\author Clément Jamin + +@{ + +\section tangentialdefinition Definition + +A Tangential Delaunay complex is a simplicial complex +designed to reconstruct a \f$k\f$-dimensional smooth manifold embedded in \f$d\f$-dimensional Euclidean space. +The input is a point sample coming from an unknown manifold, which means that the points lie close to a structure of "small" intrinsic dimension. +The running time depends only linearly on the extrinsic dimension \f$ d \f$ +and exponentially on the intrinsic dimension \f$ k \f$. + +An extensive description of the Tangential complex can be found in \cite tangentialcomplex2014. + +\subsection whatisthetc What is a Tangential Complex? + +Let us start with the description of the Tangential complex of a simple example, with \f$ k=1 \f$ and \f$ d=2 \f$. +The input data is 4 points \f$ P \f$ located on a curve embedded in 2D. +\image html "tc_example_01.png" "The input" +For each point \f$ p \f$, estimate its tangent subspace \f$ T_p \f$ (e.g. using PCA). +\image html "tc_example_02.png" "The estimated normals" +Let us add the Voronoi diagram of the points in orange. For each point \f$ p \f$, construct its star in the Delaunay triangulation of \f$ P \f$ restricted to \f$ T_p \f$. +\image html "tc_example_03.png" "The Voronoi diagram" +The Tangential Delaunay complex is the union of those stars. + +In practice, neither the ambient Voronoi diagram nor the ambient Delaunay triangulation is computed. +Instead, local \f$ k \f$-dimensional regular triangulations are computed with a limited number of points as we only need the star of each point. +More details can be found in \cite tangentialcomplex2014. + +\subsection inconsistencies Inconsistencies + +Inconsistencies between the stars can occur. +An inconsistency occurs when a simplex is not in the star of all its vertices. + +Let us take the same example. +\image html "tc_example_07_before.png" "Before" +Let us slightly move the tangent subspace \f$ T_q \f$ +\image html "tc_example_07_after.png" "After" +Now, the star of \f$ Q \f$ contains \f$ QP \f$, but the star of \f$ P \f$ does not contain \f$ QP \f$. We have an inconsistency. +\image html "tc_example_08.png" "After" + +One way to solve inconsistencies is to randomly perturb the positions of the points involved in an inconsistency. +In the current implementation, this perturbation is done in the tangent subspace of each point. +The maximum perturbation radius is given as a parameter to the constructor. + +In most cases, we recommend to provide a point set where the minimum distance between any two points +is not too small. This can be achieved using the functions provided by the Subsampling module. Then, a good value to start with for +the maximum perturbation radius would be around half the minimum distance between any two points. +The \ref example_with_perturb below shows an example of such a process. + +In most cases, this process is able to dramatically reduce the number of inconsistencies, but is not guaranteed to succeed. + +\subsection output Output + +The result of the computation is exported as a `Simplex_tree`. It is the union of the stars of all the input points. +A vertex in the Simplex Tree is the index of the point in the range provided by the user. +The point corresponding to a vertex can also be obtained through the `Tangential_complex::get_point` function. +Note that even if the positions of the points are perturbed, their original positions are kept (e.g. `Tangential_complex::get_point` returns the original position of the point). + +The result can be obtained after the computation of the Tangential complex itself and/or after the perturbation process. + +\section simple_example Simple example + +This example builds the Tangential complex of point set. +Note that the dimension of the kernel here is dynamic, which is slower, but more flexible: +the intrinsic and ambient dimensions does not have to be known at compile-time. + +\include Tangential_complex/example_basic.cpp + +\section example_with_perturb Example with perturbation + +This example builds the Tangential complex of a point set, then tries to solve inconsistencies +by perturbing the positions of points involved in inconsistent simplices. +Note that the dimension of the kernel here is static, which is the best choice when the +dimensions are known at compile-time. + +\include Tangential_complex/example_with_perturb.cpp + +\copyright GNU General Public License v3. +\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim + */ +/** @} */ // end defgroup tangential_complex + +} // namespace tangential_complex + +} // namespace Gudhi + +#endif // DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/doc/tc_example_01.png b/src/Tangential_complex/doc/tc_example_01.png new file mode 100644 index 00000000..8afe6198 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_01.png differ diff --git a/src/Tangential_complex/doc/tc_example_02.png b/src/Tangential_complex/doc/tc_example_02.png new file mode 100644 index 00000000..01591c1d Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_02.png differ diff --git a/src/Tangential_complex/doc/tc_example_03.png b/src/Tangential_complex/doc/tc_example_03.png new file mode 100644 index 00000000..5de04e01 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_03.png differ diff --git a/src/Tangential_complex/doc/tc_example_05.png b/src/Tangential_complex/doc/tc_example_05.png new file mode 100644 index 00000000..fdd5e5fa Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_05.png differ diff --git a/src/Tangential_complex/doc/tc_example_06.png b/src/Tangential_complex/doc/tc_example_06.png new file mode 100644 index 00000000..31ad3c43 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_06.png differ diff --git a/src/Tangential_complex/doc/tc_example_07.png b/src/Tangential_complex/doc/tc_example_07.png new file mode 100644 index 00000000..47e34de7 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_07.png differ diff --git a/src/Tangential_complex/doc/tc_example_07_after.png b/src/Tangential_complex/doc/tc_example_07_after.png new file mode 100644 index 00000000..981350d2 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_07_after.png differ diff --git a/src/Tangential_complex/doc/tc_example_07_before.png b/src/Tangential_complex/doc/tc_example_07_before.png new file mode 100644 index 00000000..ddc6bc7b Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_07_before.png differ diff --git a/src/Tangential_complex/doc/tc_example_08.png b/src/Tangential_complex/doc/tc_example_08.png new file mode 100644 index 00000000..119a87de Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_08.png differ diff --git a/src/Tangential_complex/doc/tc_example_09.png b/src/Tangential_complex/doc/tc_example_09.png new file mode 100644 index 00000000..31bac1e0 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_09.png differ diff --git a/src/Tangential_complex/doc/tc_examples.png b/src/Tangential_complex/doc/tc_examples.png new file mode 100644 index 00000000..b6544afe Binary files /dev/null and b/src/Tangential_complex/doc/tc_examples.png differ diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt new file mode 100644 index 00000000..7ba043f0 --- /dev/null +++ b/src/Tangential_complex/example/CMakeLists.txt @@ -0,0 +1,30 @@ +cmake_minimum_required(VERSION 2.6) +project(Tangential_complex_examples) + +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + include_directories (BEFORE "../../include") + + add_executable( Tangential_complex_example_basic example_basic.cpp ) + target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY}) + add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp ) + target_link_libraries(Tangential_complex_example_with_perturb ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES}) + target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES}) + endif() + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for the Tangential_complex examples.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential_complex examples. Version 4.8.0 is required.") + endif () +else() + message(WARNING "CGAL not found. It is required for the Tangential_complex examples.") +endif() diff --git a/src/Tangential_complex/example/example_basic.cpp b/src/Tangential_complex/example/example_basic.cpp new file mode 100644 index 00000000..4f2b859e --- /dev/null +++ b/src/Tangential_complex/example/example_basic.cpp @@ -0,0 +1,46 @@ +#include +#include + +#include +#include + +#include +#include + +namespace tc = Gudhi::tangential_complex; + +typedef CGAL::Epick_d Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_d Point; +typedef Kernel::Vector_d Vector; +typedef tc::Tangential_complex< +Kernel, CGAL::Dynamic_dimension_tag, +CGAL::Parallel_tag> TC; + +int main(void) { + const int INTRINSIC_DIM = 2; + const int AMBIENT_DIM = 3; + const int NUM_POINTS = 1000; + + Kernel k; + + // Generate points on a 2-sphere + CGAL::Random_points_on_sphere_d generator(AMBIENT_DIM, 3.); + std::vector points; + points.reserve(NUM_POINTS); + for (int i = 0; i < NUM_POINTS; ++i) + points.push_back(*generator++); + + // Compute the TC + TC tc(points, INTRINSIC_DIM, k); + tc.compute_tangential_complex(); + + // Export the TC into a Simplex_tree + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree); + + // Display stats about inconsistencies + tc.number_of_inconsistent_simplices(true); // verbose + + return 0; +} diff --git a/src/Tangential_complex/example/example_with_perturb.cpp b/src/Tangential_complex/example/example_with_perturb.cpp new file mode 100644 index 00000000..d0d877ea --- /dev/null +++ b/src/Tangential_complex/example/example_with_perturb.cpp @@ -0,0 +1,53 @@ +#include +#include + +#include +#include + +#include +#include + +namespace subsampl = Gudhi::subsampling; +namespace tc = Gudhi::tangential_complex; + +typedef CGAL::Epick_d> Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_d Point; +typedef Kernel::Vector_d Vector; +typedef tc::Tangential_complex< +Kernel, CGAL::Dimension_tag<2>, +CGAL::Parallel_tag> TC; + +int main(void) { + const int INTRINSIC_DIM = 2; + const int AMBIENT_DIM = 3; + const int NUM_POINTS = 50; + + Kernel k; + + // Generate points on a 2-sphere + CGAL::Random_points_on_sphere_d generator(AMBIENT_DIM, 3.); + std::vector points; + points.reserve(NUM_POINTS); + for (int i = 0; i < NUM_POINTS; ++i) + points.push_back(*generator++); + + // Sparsify the point set + std::vector sparsified_points; + subsampl::sparsify_point_set(k, points, 0.1 * 0.1, + std::back_inserter(sparsified_points)); + sparsified_points.swap(points); + + // Compute the TC + TC tc(points, INTRINSIC_DIM, k); + tc.compute_tangential_complex(); + + // Try to fix inconsistencies. Give it 10 seconds to succeed + tc.fix_inconsistencies_using_perturbation(0.05, 10); + + // Export the TC into a Simplex_tree + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree); + + return 0; +} diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h new file mode 100644 index 00000000..7cf5c498 --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -0,0 +1,2277 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_H_ +#define TANGENTIAL_COMPLEX_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include // for CGAL::Identity +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // for std::sqrt +#include + +#ifdef GUDHI_USE_TBB +#include +#include +#include +#endif + +// #define GUDHI_TC_EXPORT_NORMALS // Only for 3D surfaces (k=2, d=3) + +namespace sps = Gudhi::spatial_searching; + +namespace Gudhi { + +namespace tangential_complex { + +using namespace internal; + +class Vertex_data { + public: + Vertex_data(std::size_t data = std::numeric_limits::max()) + : m_data(data) { } + + operator std::size_t() { + return m_data; + } + + operator std::size_t() const { + return m_data; + } + + private: + std::size_t m_data; +}; + +/** + * \class Tangential_complex Tangential_complex.h gudhi/Tangential_complex.h + * \brief Tangential complex data structure. + * + * \ingroup tangential_complex + * + * \details + * The class Tangential_complex represents a tangential complex. + * After the computation of the complex, an optional post-processing called perturbation can + * be run to attempt to remove inconsistencies. + * + * \tparam Kernel_ requires a CGAL::Epick_d class, which + * can be static if you know the ambiant dimension at compile-time, or dynamic if you don't. + * \tparam DimensionTag can be either Dimension_tag + * if you know the intrinsic dimension at compile-time, + * or CGAL::Dynamic_dimension_tag + * if you don't. + * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the default) and `CGAL::Sequential_tag`. + * \tparam Triangulation_ is the type used for storing the local regular triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`). + * + */ +template < +typename Kernel_, // ambiant kernel +typename DimensionTag, // intrinsic dimension +typename Concurrency_tag = CGAL::Parallel_tag, +typename Triangulation_ = CGAL::Default +> +class Tangential_complex { + typedef Kernel_ K; + typedef typename K::FT FT; + typedef typename K::Point_d Point; + typedef typename K::Weighted_point_d Weighted_point; + typedef typename K::Vector_d Vector; + + typedef typename CGAL::Default::Get + < + Triangulation_, + CGAL::Regular_triangulation + < + CGAL::Epick_d, + CGAL::Triangulation_data_structure + < + typename CGAL::Epick_d::Dimension, + CGAL::Triangulation_vertex >, Vertex_data >, + CGAL::Triangulation_full_cell > > + > + > + >::type Triangulation; + typedef typename Triangulation::Geom_traits Tr_traits; + typedef typename Triangulation::Weighted_point Tr_point; + typedef typename Triangulation::Bare_point Tr_bare_point; + typedef typename Triangulation::Vertex_handle Tr_vertex_handle; + typedef typename Triangulation::Full_cell_handle Tr_full_cell_handle; + typedef typename Tr_traits::Vector_d Tr_vector; + +#if defined(GUDHI_USE_TBB) + typedef tbb::mutex Mutex_for_perturb; + typedef Vector Translation_for_perturb; + typedef std::vector > Weights; +#else + typedef Vector Translation_for_perturb; + typedef std::vector Weights; +#endif + typedef std::vector Translations_for_perturb; + + // Store a local triangulation and a handle to its center vertex + + struct Tr_and_VH { + public: + Tr_and_VH() + : m_tr(NULL) { } + + Tr_and_VH(int dim) + : m_tr(new Triangulation(dim)) { } + + ~Tr_and_VH() { + destroy_triangulation(); + } + + Triangulation & construct_triangulation(int dim) { + delete m_tr; + m_tr = new Triangulation(dim); + return tr(); + } + + void destroy_triangulation() { + delete m_tr; + m_tr = NULL; + } + + Triangulation & tr() { + return *m_tr; + } + + Triangulation const& tr() const { + return *m_tr; + } + + Tr_vertex_handle const& center_vertex() const { + return m_center_vertex; + } + + Tr_vertex_handle & center_vertex() { + return m_center_vertex; + } + + private: + Triangulation* m_tr; + Tr_vertex_handle m_center_vertex; + }; + + public: + typedef Basis Tangent_space_basis; + typedef Basis Orthogonal_space_basis; + typedef std::vector TS_container; + typedef std::vector OS_container; + + typedef std::vector Points; + + typedef boost::container::flat_set Simplex; + typedef std::set Simplex_set; + + private: + typedef sps::Kd_tree_search Points_ds; + typedef typename Points_ds::KNS_range KNS_range; + typedef typename Points_ds::INS_range INS_range; + + typedef std::vector Tr_container; + typedef std::vector Vectors; + + // An Incident_simplex is the list of the vertex indices + // except the center vertex + typedef boost::container::flat_set Incident_simplex; + typedef std::vector Star; + typedef std::vector Stars_container; + + // For transform_iterator + + static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) { + return vh->point(); + } + + template + static const P &vertex_handle_to_point(VH vh) { + return vh->point(); + } + + public: + typedef internal::Simplicial_complex Simplicial_complex; + + /** \brief Constructor from a range of points. + * Points are copied into the instance, and a search data structure is initialized. + * Note the complex is not computed: `compute_tangential_complex` must be called after the creation + * of the object. + * + * @param[in] points Range of points (`Point_range::value_type` must be the same as `Kernel_::Point_d`). + * @param[in] intrinsic_dimension Intrinsic dimension of the manifold. + * @param[in] k Kernel instance. + */ + template + Tangential_complex(Point_range points, + int intrinsic_dimension, +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + InputIterator first_for_tse, InputIterator last_for_tse, +#endif + const K &k = K() + ) + : m_k(k), + m_intrinsic_dim(intrinsic_dimension), + m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())), + m_points(points.begin(), points.end()), + m_weights(m_points.size(), FT(0)) +#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION) + , m_p_perturb_mutexes(NULL) +#endif + , m_points_ds(m_points) + , m_last_max_perturb(0.) + , m_are_tangent_spaces_computed(m_points.size(), false) + , m_tangent_spaces(m_points.size(), Tangent_space_basis()) +#ifdef GUDHI_TC_EXPORT_NORMALS + , m_orth_spaces(m_points.size(), Orthogonal_space_basis()) +#endif +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + , m_points_for_tse(first_for_tse, last_for_tse) + , m_points_ds_for_tse(m_points_for_tse) +#endif + { } + + /// Destructor + ~Tangential_complex() { +#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION) + delete [] m_p_perturb_mutexes; +#endif + } + + /// Returns the intrinsic dimension of the manifold. + int intrinsic_dimension() const { + return m_intrinsic_dim; + } + + /// Returns the ambient dimension. + int ambient_dimension() const { + return m_ambient_dim; + } + + Points const& points() const { + return m_points; + } + + /** \brief Returns the point corresponding to the vertex given as parameter. + * + * @param[in] vertex Vertex handle of the point to retrieve. + * @return The point found. + */ + Point get_point(std::size_t vertex) const { + return m_points[vertex]; + } + + /** \brief Returns the perturbed position of the point corresponding to the vertex given as parameter. + * + * @param[in] vertex Vertex handle of the point to retrieve. + * @return The perturbed position of the point found. + */ + Point get_perturbed_point(std::size_t vertex) const { + return compute_perturbed_point(vertex); + } + + /// Returns the number of vertices. + + std::size_t number_of_vertices() const { + return m_points.size(); + } + + void set_weights(const Weights& weights) { + m_weights = weights; + } + + void set_tangent_planes(const TS_container& tangent_spaces +#ifdef GUDHI_TC_EXPORT_NORMALS + , const OS_container& orthogonal_spaces +#endif + ) { +#ifdef GUDHI_TC_EXPORT_NORMALS + GUDHI_CHECK( + m_points.size() == tangent_spaces.size() + && m_points.size() == orthogonal_spaces.size(), + std::logic_error("Wrong sizes")); +#else + GUDHI_CHECK( + m_points.size() == tangent_spaces.size(), + std::logic_error("Wrong sizes")); +#endif + m_tangent_spaces = tangent_spaces; +#ifdef GUDHI_TC_EXPORT_NORMALS + m_orth_spaces = orthogonal_spaces; +#endif + for (std::size_t i = 0; i < m_points.size(); ++i) + m_are_tangent_spaces_computed[i] = true; + } + + /// Computes the tangential complex. + void compute_tangential_complex() { +#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS + std::cerr << red << "WARNING: GUDHI_TC_PERFORM_EXTRA_CHECKS is defined. " + << "Computation might be slower than usual.\n" << white; +#endif + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB) + Gudhi::Clock t; +#endif + + // We need to do that because we don't want the container to copy the + // already-computed triangulations (while resizing) since it would + // invalidate the vertex handles stored beside the triangulations + m_triangulations.resize(m_points.size()); + m_stars.resize(m_points.size()); + m_squared_star_spheres_radii_incl_margin.resize(m_points.size(), FT(-1)); +#ifdef GUDHI_TC_PERTURB_POSITION + if (m_points.empty()) + m_translations.clear(); + else + m_translations.resize(m_points.size(), + m_k.construct_vector_d_object()(m_ambient_dim)); +#if defined(GUDHI_USE_TBB) + delete [] m_p_perturb_mutexes; + m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()]; +#endif +#endif + +#ifdef GUDHI_USE_TBB + // Parallel + if (boost::is_convertible::value) { + tbb::parallel_for(tbb::blocked_range(0, m_points.size()), + Compute_tangent_triangulation(*this)); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_points.size(); ++i) + compute_tangent_triangulation(i); +#ifdef GUDHI_USE_TBB + } +#endif // GUDHI_USE_TBB + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB) + t.end(); + std::cerr << "Tangential complex computed in " << t.num_seconds() + << " seconds.\n"; +#endif + } + + /// \brief Type returned by `Tangential_complex::fix_inconsistencies_using_perturbation`. + struct Fix_inconsistencies_info { + /// `true` if all inconsistencies could be removed, `false` if the time limit has been reached before + bool success = false; + /// number of steps performed + unsigned int num_steps = 0; + /// initial number of inconsistent stars + std::size_t initial_num_inconsistent_stars = 0; + /// best number of inconsistent stars during the process + std::size_t best_num_inconsistent_stars = 0; + /// final number of inconsistent stars + std::size_t final_num_inconsistent_stars = 0; + }; + + /** \brief Attempts to fix inconsistencies by perturbing the point positions. + * + * @param[in] max_perturb Maximum length of the translations used by the perturbation. + * @param[in] time_limit Time limit in seconds. If -1, no time limit is set. + */ + Fix_inconsistencies_info fix_inconsistencies_using_perturbation(double max_perturb, double time_limit = -1.) { + Fix_inconsistencies_info info; + + if (time_limit == 0.) + return info; + + Gudhi::Clock t; + +#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + std::tuple stats_before = + number_of_inconsistent_simplices(false); + + if (std::get<1>(stats_before) == 0) { +#ifdef DEBUG_TRACES + std::cerr << "Nothing to fix.\n"; +#endif + info.success = false; + return info; + } +#endif // GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + + m_last_max_perturb = max_perturb; + + bool done = false; + info.best_num_inconsistent_stars = m_triangulations.size(); + info.num_steps = 0; + while (!done) { +#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + std::cerr + << "\nBefore fix step:\n" + << " * Total number of simplices in stars (incl. duplicates): " + << std::get<0>(stats_before) << "\n" + << " * Num inconsistent simplices in stars (incl. duplicates): " + << red << std::get<1>(stats_before) << white << " (" + << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n" + << " * Number of stars containing inconsistent simplices: " + << red << std::get<2>(stats_before) << white << " (" + << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n"; +#endif + +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow + << "\nAttempt to fix inconsistencies using perturbations - step #" + << info.num_steps + 1 << "... " << white; +#endif + + std::size_t num_inconsistent_stars = 0; + std::vector updated_points; + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t_fix_step; +#endif + + // Parallel +#if defined(GUDHI_USE_TBB) + if (boost::is_convertible::value) { + tbb::combinable num_inconsistencies; + tbb::combinable > tls_updated_points; + tbb::parallel_for( + tbb::blocked_range(0, m_triangulations.size()), + Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb, + num_inconsistencies, + tls_updated_points)); + num_inconsistent_stars = + num_inconsistencies.combine(std::plus()); + updated_points = tls_updated_points.combine( + [](std::vector const& x, + std::vector const& y) { + std::vector res; + res.reserve(x.size() + y.size()); + res.insert(res.end(), x.begin(), x.end()); + res.insert(res.end(), y.begin(), y.end()); + return res; + }); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_triangulations.size(); ++i) { + num_inconsistent_stars += + try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb, + std::back_inserter(updated_points)); + } +#if defined(GUDHI_USE_TBB) + } +#endif // GUDHI_USE_TBB + +#ifdef GUDHI_TC_PROFILING + t_fix_step.end(); +#endif + +#if defined(GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES) || defined(DEBUG_TRACES) + std::cerr + << "\nEncountered during fix:\n" + << " * Num stars containing inconsistent simplices: " + << red << num_inconsistent_stars << white + << " (" << 100. * num_inconsistent_stars / m_points.size() << "%)\n"; +#endif + +#ifdef GUDHI_TC_PROFILING + std::cerr << yellow << "done in " << t_fix_step.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + + if (num_inconsistent_stars > 0) + refresh_tangential_complex(updated_points); + +#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS + // Confirm that all stars were actually refreshed + std::size_t num_inc_1 = + std::get<1>(number_of_inconsistent_simplices(false)); + refresh_tangential_complex(); + std::size_t num_inc_2 = + std::get<1>(number_of_inconsistent_simplices(false)); + if (num_inc_1 != num_inc_2) + std::cerr << red << "REFRESHMENT CHECK: FAILED. (" + << num_inc_1 << " vs " << num_inc_2 << ")\n" << white; + else + std::cerr << green << "REFRESHMENT CHECK: PASSED.\n" << white; +#endif + +#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + std::tuple stats_after = + number_of_inconsistent_simplices(false); + + std::cerr + << "\nAfter fix:\n" + << " * Total number of simplices in stars (incl. duplicates): " + << std::get<0>(stats_after) << "\n" + << " * Num inconsistent simplices in stars (incl. duplicates): " + << red << std::get<1>(stats_after) << white << " (" + << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n" + << " * Number of stars containing inconsistent simplices: " + << red << std::get<2>(stats_after) << white << " (" + << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n"; + + stats_before = stats_after; +#endif + + if (info.num_steps == 0) + info.initial_num_inconsistent_stars = num_inconsistent_stars; + + if (num_inconsistent_stars < info.best_num_inconsistent_stars) + info.best_num_inconsistent_stars = num_inconsistent_stars; + + info.final_num_inconsistent_stars = num_inconsistent_stars; + + done = (num_inconsistent_stars == 0); + if (!done) { + ++info.num_steps; + if (time_limit > 0. && t.num_seconds() > time_limit) { +#ifdef DEBUG_TRACES + std::cerr << red << "Time limit reached.\n" << white; +#endif + info.success = false; + return info; + } + } + } + +#ifdef DEBUG_TRACES + std::cerr << green << "Fixed!\n" << white; +#endif + info.success = true; + return info; + } + + /// \brief Type returned by `Tangential_complex::number_of_inconsistent_simplices`. + struct Num_inconsistencies { + /// Total number of simplices in stars (including duplicates that appear in several stars) + std::size_t num_simplices = 0; + /// Number of inconsistent simplices + std::size_t num_inconsistent_simplices = 0; + /// Number of stars containing at least one inconsistent simplex + std::size_t num_inconsistent_stars = 0; + }; + + /// Returns the number of inconsistencies + /// @param[in] verbose If true, outputs a message into `std::cerr`. + + Num_inconsistencies + number_of_inconsistent_simplices( +#ifdef DEBUG_TRACES + bool verbose = true +#else + bool verbose = false +#endif + ) const { + Num_inconsistencies stats; + + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + bool is_star_inconsistent = false; + + // For each cell + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + // Don't check infinite cells + if (is_infinite(*it_inc_simplex)) + continue; + + Simplex c = *it_inc_simplex; + c.insert(idx); // Add the missing index + + if (!is_simplex_consistent(c)) { + ++stats.num_inconsistent_simplices; + is_star_inconsistent = true; + } + + ++stats.num_simplices; + } + stats.num_inconsistent_stars += is_star_inconsistent; + } + + if (verbose) { + std::cerr + << "\n==========================================================\n" + << "Inconsistencies:\n" + << " * Total number of simplices in stars (incl. duplicates): " + << stats.num_simplices << "\n" + << " * Number of inconsistent simplices in stars (incl. duplicates): " + << stats.num_inconsistent_simplices << " (" + << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n" + << " * Number of stars containing inconsistent simplices: " + << stats.num_inconsistent_stars << " (" + << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n" + << "==========================================================\n"; + } + + return stats; + } + + /** \brief Exports the complex into a Simplex_tree. + * + * \tparam Simplex_tree_ must be a `Simplex_tree`. + * + * @param[out] tree The result, where each `Vertex_handle` is the index of the + * corresponding point in the range provided to the constructor (it can also be + * retrieved through the `Tangential_complex::get_point` function. + * @param[in] export_inconsistent_simplices Also export inconsistent simplices or not? + * @return The maximal dimension of the simplices. + */ + template + int create_complex(Simplex_tree_ &tree + , bool export_inconsistent_simplices = true + /// \cond ADVANCED_PARAMETERS + , bool export_infinite_simplices = false + , Simplex_set *p_inconsistent_simplices = NULL + /// \endcond + ) const { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow + << "\nExporting the TC as a Simplex_tree... " << white; +#endif +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif + + int max_dim = -1; + + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + // For each cell of the star + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + Simplex c = *it_inc_simplex; + + // Don't export infinite cells + if (!export_infinite_simplices && is_infinite(c)) + continue; + + if (!export_inconsistent_simplices && !is_simplex_consistent(c)) + continue; + + if (static_cast (c.size()) > max_dim) + max_dim = static_cast (c.size()); + // Add the missing center vertex + c.insert(idx); + + // Try to insert the simplex + bool inserted = tree.insert_simplex_and_subfaces(c).second; + + // Inconsistent? + if (p_inconsistent_simplices && inserted && !is_simplex_consistent(c)) { + p_inconsistent_simplices->insert(c); + } + } + } + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + + return max_dim; + } + + // First clears the complex then exports the TC into it + // Returns the max dimension of the simplices + // check_lower_and_higher_dim_simplices : 0 (false), 1 (true), 2 (auto) + // If the check is enabled, the function: + // - won't insert the simplex if it is already in a higher dim simplex + // - will erase any lower-dim simplices that are faces of the new simplex + // "auto" (= 2) will enable the check as a soon as it encounters a + // simplex whose dimension is different from the previous ones. + // N.B.: The check is quite expensive. + + int create_complex(Simplicial_complex &complex, + bool export_inconsistent_simplices = true, + bool export_infinite_simplices = false, + int check_lower_and_higher_dim_simplices = 2, + Simplex_set *p_inconsistent_simplices = NULL) const { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow + << "\nExporting the TC as a Simplicial_complex... " << white; +#endif +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif + + int max_dim = -1; + complex.clear(); + + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + // For each cell of the star + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + Simplex c = *it_inc_simplex; + + // Don't export infinite cells + if (!export_infinite_simplices && is_infinite(c)) + continue; + + if (!export_inconsistent_simplices && !is_simplex_consistent(c)) + continue; + + // Unusual simplex dim? + if (check_lower_and_higher_dim_simplices == 2 + && max_dim != -1 + && static_cast (c.size()) != max_dim) { + // Let's activate the check + std::cerr << red << + "Info: check_lower_and_higher_dim_simplices ACTIVATED. " + "Export might be take some time...\n" << white; + check_lower_and_higher_dim_simplices = 1; + } + + if (static_cast (c.size()) > max_dim) + max_dim = static_cast (c.size()); + // Add the missing center vertex + c.insert(idx); + + // Try to insert the simplex + bool added = + complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1); + + // Inconsistent? + if (p_inconsistent_simplices && added && !is_simplex_consistent(c)) { + p_inconsistent_simplices->insert(c); + } + } + } + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + + return max_dim; + } + + template > + std::ostream &export_to_off( + const Simplicial_complex &complex, std::ostream & os, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL, + ProjectionFunctor const& point_projection = ProjectionFunctor()) + const { + return export_to_off( + os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green, + p_simpl_to_color_in_blue, &complex, point_projection); + } + + template > + std::ostream &export_to_off( + std::ostream & os, bool color_inconsistencies = false, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL, + const Simplicial_complex *p_complex = NULL, + ProjectionFunctor const& point_projection = ProjectionFunctor()) const { + if (m_points.empty()) + return os; + + if (m_ambient_dim < 2) { + std::cerr << "Error: export_to_off => ambient dimension should be >= 2.\n"; + os << "Error: export_to_off => ambient dimension should be >= 2.\n"; + return os; + } + if (m_ambient_dim > 3) { + std::cerr << "Warning: export_to_off => ambient dimension should be " + "<= 3. Only the first 3 coordinates will be exported.\n"; + } + + if (m_intrinsic_dim < 1 || m_intrinsic_dim > 3) { + std::cerr << "Error: export_to_off => intrinsic dimension should be " + "between 1 and 3.\n"; + os << "Error: export_to_off => intrinsic dimension should be " + "between 1 and 3.\n"; + return os; + } + + std::stringstream output; + std::size_t num_simplices, num_vertices; + export_vertices_to_off(output, num_vertices, false, point_projection); + if (p_complex) { + export_simplices_to_off( + *p_complex, output, num_simplices, p_simpl_to_color_in_red, + p_simpl_to_color_in_green, p_simpl_to_color_in_blue); + } else { + export_simplices_to_off( + output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red, + p_simpl_to_color_in_green, p_simpl_to_color_in_blue); + } + +#ifdef GUDHI_TC_EXPORT_NORMALS + os << "N"; +#endif + + os << "OFF \n" + << num_vertices << " " + << num_simplices << " " + << "0 \n" + << output.str(); + + return os; + } + + private: + void refresh_tangential_complex() { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow << "\nRefreshing TC... " << white; +#endif + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif +#ifdef GUDHI_USE_TBB + // Parallel + if (boost::is_convertible::value) { + tbb::parallel_for(tbb::blocked_range(0, m_points.size()), + Compute_tangent_triangulation(*this)); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_points.size(); ++i) + compute_tangent_triangulation(i); +#ifdef GUDHI_USE_TBB + } +#endif // GUDHI_USE_TBB + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + } + + // If the list of perturbed points is provided, it is much faster + template + void refresh_tangential_complex( + Point_indices_range const& perturbed_points_indices) { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow << "\nRefreshing TC... " << white; +#endif + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif + + // ANN tree containing only the perturbed points + Points_ds updated_pts_ds(m_points, perturbed_points_indices); + +#ifdef GUDHI_USE_TBB + // Parallel + if (boost::is_convertible::value) { + tbb::parallel_for(tbb::blocked_range(0, m_points.size()), + Refresh_tangent_triangulation(*this, updated_pts_ds)); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_points.size(); ++i) + refresh_tangent_triangulation(i, updated_pts_ds); +#ifdef GUDHI_USE_TBB + } +#endif // GUDHI_USE_TBB + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + } + + void export_inconsistent_stars_to_OFF_files(std::string const& filename_base) const { + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + // We build a SC along the way in case it's inconsistent + Simplicial_complex sc; + // For each cell + bool is_inconsistent = false; + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; + ++it_inc_simplex) { + // Skip infinite cells + if (is_infinite(*it_inc_simplex)) + continue; + + Simplex c = *it_inc_simplex; + c.insert(idx); // Add the missing index + + sc.add_simplex(c); + + // If we do not already know this star is inconsistent, test it + if (!is_inconsistent && !is_simplex_consistent(c)) + is_inconsistent = true; + } + + if (is_inconsistent) { + // Export star to OFF file + std::stringstream output_filename; + output_filename << filename_base << "_" << idx << ".off"; + std::ofstream off_stream(output_filename.str().c_str()); + export_to_off(sc, off_stream); + } + } + } + + class Compare_distance_to_ref_point { + public: + Compare_distance_to_ref_point(Point const& ref, K const& k) + : m_ref(ref), m_k(k) { } + + bool operator()(Point const& p1, Point const& p2) { + typename K::Squared_distance_d sqdist = + m_k.squared_distance_d_object(); + return sqdist(p1, m_ref) < sqdist(p2, m_ref); + } + + private: + Point const& m_ref; + K const& m_k; + }; + +#ifdef GUDHI_USE_TBB + // Functor for compute_tangential_complex function + class Compute_tangent_triangulation { + Tangential_complex & m_tc; + + public: + // Constructor + Compute_tangent_triangulation(Tangential_complex &tc) + : m_tc(tc) { } + + // Constructor + Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt) + : m_tc(ctt.m_tc) { } + + // operator() + void operator()(const tbb::blocked_range& r) const { + for (size_t i = r.begin(); i != r.end(); ++i) + m_tc.compute_tangent_triangulation(i); + } + }; + + // Functor for refresh_tangential_complex function + class Refresh_tangent_triangulation { + Tangential_complex & m_tc; + Points_ds const& m_updated_pts_ds; + + public: + // Constructor + Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const& updated_pts_ds) + : m_tc(tc), m_updated_pts_ds(updated_pts_ds) { } + + // Constructor + Refresh_tangent_triangulation(const Refresh_tangent_triangulation &ctt) + : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) { } + + // operator() + void operator()(const tbb::blocked_range& r) const { + for (size_t i = r.begin(); i != r.end(); ++i) + m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds); + } + }; +#endif // GUDHI_USE_TBB + + bool is_infinite(Simplex const& s) const { + return *s.rbegin() == std::numeric_limits::max(); + } + + // Output: "triangulation" is a Regular Triangulation containing at least the + // star of "center_pt" + // Returns the handle of the center vertex + Tr_vertex_handle compute_star(std::size_t i, const Point ¢er_pt, const Tangent_space_basis &tsb, + Triangulation &triangulation, bool verbose = false) { + int tangent_space_dim = tsb.dimension(); + const Tr_traits &local_tr_traits = triangulation.geom_traits(); + Tr_vertex_handle center_vertex; + + // Kernel functor & objects + typename K::Squared_distance_d k_sqdist = m_k.squared_distance_d_object(); + + // Triangulation's traits functor & objects + typename Tr_traits::Compute_weight_d point_weight = local_tr_traits.compute_weight_d_object(); + typename Tr_traits::Power_center_d power_center = local_tr_traits.power_center_d_object(); + + //*************************************************** + // Build a minimal triangulation in the tangent space + // (we only need the star of p) + //*************************************************** + + // Insert p + Tr_point proj_wp; + if (i == tsb.origin()) { + // Insert {(0, 0, 0...), m_weights[i]} + proj_wp = local_tr_traits.construct_weighted_point_d_object()(local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN), + m_weights[i]); + } else { + const Weighted_point& wp = compute_perturbed_weighted_point(i); + proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits); + } + + center_vertex = triangulation.insert(proj_wp); + center_vertex->data() = i; + if (verbose) + std::cerr << "* Inserted point #" << i << "\n"; + +#ifdef GUDHI_TC_VERY_VERBOSE + std::size_t num_attempts_to_insert_points = 1; + std::size_t num_inserted_points = 1; +#endif + // const int NUM_NEIGHBORS = 150; + // KNS_range ins_range = m_points_ds.query_k_nearest_neighbors(center_pt, NUM_NEIGHBORS); + INS_range ins_range = m_points_ds.query_incremental_nearest_neighbors(center_pt); + + // While building the local triangulation, we keep the radius + // of the sphere "star sphere" centered at "center_vertex" + // and which contains all the + // circumspheres of the star of "center_vertex" + boost::optional squared_star_sphere_radius_plus_margin; + + // Insert points until we find a point which is outside "star sphere" + for (auto nn_it = ins_range.begin(); + nn_it != ins_range.end(); + ++nn_it) { + std::size_t neighbor_point_idx = nn_it->first; + + // ith point = p, which is already inserted + if (neighbor_point_idx != i) { + // No need to lock the Mutex_for_perturb here since this will not be + // called while other threads are perturbing the positions + Point neighbor_pt; + FT neighbor_weight; + compute_perturbed_weighted_point(neighbor_point_idx, neighbor_pt, neighbor_weight); + + if (squared_star_sphere_radius_plus_margin && + k_sqdist(center_pt, neighbor_pt) > *squared_star_sphere_radius_plus_margin) + break; + + Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb, + local_tr_traits); + +#ifdef GUDHI_TC_VERY_VERBOSE + ++num_attempts_to_insert_points; +#endif + + + Tr_vertex_handle vh = triangulation.insert_if_in_star(proj_pt, center_vertex); + // Tr_vertex_handle vh = triangulation.insert(proj_pt); + if (vh != Tr_vertex_handle()) { +#ifdef GUDHI_TC_VERY_VERBOSE + ++num_inserted_points; +#endif + if (verbose) + std::cerr << "* Inserted point #" << neighbor_point_idx << "\n"; + + vh->data() = neighbor_point_idx; + + // Let's recompute squared_star_sphere_radius_plus_margin + if (triangulation.current_dimension() >= tangent_space_dim) { + squared_star_sphere_radius_plus_margin = boost::none; + // Get the incident cells and look for the biggest circumsphere + std::vector incident_cells; + triangulation.incident_full_cells( + center_vertex, + std::back_inserter(incident_cells)); + for (typename std::vector::iterator cit = + incident_cells.begin(); cit != incident_cells.end(); ++cit) { + Tr_full_cell_handle cell = *cit; + if (triangulation.is_infinite(cell)) { + squared_star_sphere_radius_plus_margin = boost::none; + break; + } else { + // Note that this uses the perturbed point since it uses + // the points of the local triangulation + Tr_point c = power_center(boost::make_transform_iterator(cell->vertices_begin(), + vertex_handle_to_point), + boost::make_transform_iterator(cell->vertices_end(), + vertex_handle_to_point)); + + FT sq_power_sphere_diam = 4 * point_weight(c); + + if (!squared_star_sphere_radius_plus_margin || + sq_power_sphere_diam > *squared_star_sphere_radius_plus_margin) { + squared_star_sphere_radius_plus_margin = sq_power_sphere_diam; + } + } + } + + // Let's add the margin, now + // The value depends on whether we perturb weight or position + if (squared_star_sphere_radius_plus_margin) { + // "2*m_last_max_perturb" because both points can be perturbed + squared_star_sphere_radius_plus_margin = CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin) + + 2 * m_last_max_perturb); + + // Save it in `m_squared_star_spheres_radii_incl_margin` + m_squared_star_spheres_radii_incl_margin[i] = + *squared_star_sphere_radius_plus_margin; + } else { + m_squared_star_spheres_radii_incl_margin[i] = FT(-1); + } + } + } + } + } + + return center_vertex; + } + + void refresh_tangent_triangulation(std::size_t i, Points_ds const& updated_pts_ds, bool verbose = false) { + if (verbose) + std::cerr << "** Refreshing tangent tri #" << i << " **\n"; + + if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1)) + return compute_tangent_triangulation(i, verbose); + + Point center_point = compute_perturbed_point(i); + // Among updated point, what is the closer from our center point? + std::size_t closest_pt_index = + updated_pts_ds.query_k_nearest_neighbors(center_point, 1, false).begin()->first; + + typename K::Construct_weighted_point_d k_constr_wp = + m_k.construct_weighted_point_d_object(); + typename K::Power_distance_d k_power_dist = m_k.power_distance_d_object(); + + // Construct a weighted point equivalent to the star sphere + Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i), + m_squared_star_spheres_radii_incl_margin[i]); + Weighted_point closest_updated_point = + compute_perturbed_weighted_point(closest_pt_index); + + // Is the "closest point" inside our star sphere? + if (k_power_dist(star_sphere, closest_updated_point) <= FT(0)) + compute_tangent_triangulation(i, verbose); + } + + void compute_tangent_triangulation(std::size_t i, bool verbose = false) { + if (verbose) + std::cerr << "** Computing tangent tri #" << i << " **\n"; + // std::cerr << "***********************************************\n"; + + // No need to lock the mutex here since this will not be called while + // other threads are perturbing the positions + const Point center_pt = compute_perturbed_point(i); + Tangent_space_basis &tsb = m_tangent_spaces[i]; + + // Estimate the tangent space + if (!m_are_tangent_spaces_computed[i]) { +#ifdef GUDHI_TC_EXPORT_NORMALS + tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]); +#else + tsb = compute_tangent_space(center_pt, i); +#endif + } + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE) + Gudhi::Clock t; +#endif + int tangent_space_dim = tangent_basis_dim(i); + Triangulation &local_tr = + m_triangulations[i].construct_triangulation(tangent_space_dim); + + m_triangulations[i].center_vertex() = + compute_star(i, center_pt, tsb, local_tr, verbose); + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE) + t.end(); + std::cerr << " - triangulation construction: " << t.num_seconds() << " s.\n"; + t.reset(); +#endif + +#ifdef GUDHI_TC_VERY_VERBOSE + std::cerr << "Inserted " << num_inserted_points << " points / " + << num_attempts_to_insert_points << " attemps to compute the star\n"; +#endif + + update_star(i); + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE) + t.end(); + std::cerr << " - update_star: " << t.num_seconds() << " s.\n"; +#endif + } + + // Updates m_stars[i] directly from m_triangulations[i] + + void update_star(std::size_t i) { + Star &star = m_stars[i]; + star.clear(); + Triangulation &local_tr = m_triangulations[i].tr(); + Tr_vertex_handle center_vertex = m_triangulations[i].center_vertex(); + int cur_dim_plus_1 = local_tr.current_dimension() + 1; + + std::vector incident_cells; + local_tr.incident_full_cells( + center_vertex, std::back_inserter(incident_cells)); + + typename std::vector::const_iterator it_c = incident_cells.begin(); + typename std::vector::const_iterator it_c_end = incident_cells.end(); + // For each cell + for (; it_c != it_c_end; ++it_c) { + // Will contain all indices except center_vertex + Incident_simplex incident_simplex; + for (int j = 0; j < cur_dim_plus_1; ++j) { + std::size_t index = (*it_c)->vertex(j)->data(); + if (index != i) + incident_simplex.insert(index); + } + star.push_back(incident_simplex); + } + } + + // Estimates tangent subspaces using PCA + + Tangent_space_basis compute_tangent_space(const Point &p + , const std::size_t i + , bool normalize_basis = true + , Orthogonal_space_basis *p_orth_space_basis = NULL + ) { + unsigned int num_pts_for_pca = static_cast (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)); + + // Kernel functors + typename K::Construct_vector_d constr_vec = + m_k.construct_vector_d_object(); + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + typename K::Squared_length_d sqlen = + m_k.squared_length_d_object(); + typename K::Scaled_vector_d scaled_vec = + m_k.scaled_vector_d_object(); + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_vectors_d diff_vec = + m_k.difference_of_vectors_d_object(); + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors( + p, num_pts_for_pca, false); + const Points &points_for_pca = m_points_for_tse; +#else + KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false); + const Points &points_for_pca = m_points; +#endif + + // One row = one point + Eigen::MatrixXd mat_points(num_pts_for_pca, m_ambient_dim); + auto nn_it = kns_range.begin(); + for (unsigned int j = 0; + j < num_pts_for_pca && nn_it != kns_range.end(); + ++j, ++nn_it) { + for (int i = 0; i < m_ambient_dim; ++i) { + mat_points(j, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i)); + } + } + Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean(); + Eigen::MatrixXd cov = centered.adjoint() * centered; + Eigen::SelfAdjointEigenSolver eig(cov); + + Tangent_space_basis tsb(i); // p = compute_perturbed_point(i) here + + // The eigenvectors are sorted in increasing order of their corresponding + // eigenvalues + for (int j = m_ambient_dim - 1; + j >= m_ambient_dim - m_intrinsic_dim; + --j) { + if (normalize_basis) { + Vector v = constr_vec(m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim); + tsb.push_back(normalize_vector(v, m_k)); + } else { + tsb.push_back(constr_vec( + m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim)); + } + } + + if (p_orth_space_basis) { + p_orth_space_basis->set_origin(i); + for (int j = m_ambient_dim - m_intrinsic_dim - 1; + j >= 0; + --j) { + if (normalize_basis) { + Vector v = constr_vec(m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim); + p_orth_space_basis->push_back(normalize_vector(v, m_k)); + } else { + p_orth_space_basis->push_back(constr_vec( + m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim)); + } + } + } + + m_are_tangent_spaces_computed[i] = true; + + return tsb; + } + + // Compute the space tangent to a simplex (p1, p2, ... pn) + // TODO(CJ): Improve this? + // Basically, it takes all the neighbor points to p1, p2... pn and runs PCA + // on it. Note that most points are duplicated. + + Tangent_space_basis compute_tangent_space(const Simplex &s, bool normalize_basis = true) { + unsigned int num_pts_for_pca = static_cast (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)); + + // Kernel functors + typename K::Construct_vector_d constr_vec = + m_k.construct_vector_d_object(); + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + typename K::Squared_length_d sqlen = + m_k.squared_length_d_object(); + typename K::Scaled_vector_d scaled_vec = + m_k.scaled_vector_d_object(); + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_vectors_d diff_vec = + m_k.difference_of_vectors_d_object(); + + // One row = one point + Eigen::MatrixXd mat_points(s.size() * num_pts_for_pca, m_ambient_dim); + unsigned int current_row = 0; + + for (Simplex::const_iterator it_index = s.begin(); + it_index != s.end(); ++it_index) { + const Point &p = m_points[*it_index]; + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors( + p, num_pts_for_pca, false); + const Points &points_for_pca = m_points_for_tse; +#else + KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false); + const Points &points_for_pca = m_points; +#endif + + auto nn_it = kns_range.begin(); + for (; + current_row < num_pts_for_pca && nn_it != kns_range.end(); + ++current_row, ++nn_it) { + for (int i = 0; i < m_ambient_dim; ++i) { + mat_points(current_row, i) = + CGAL::to_double(coord(points_for_pca[nn_it->first], i)); + } + } + } + Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean(); + Eigen::MatrixXd cov = centered.adjoint() * centered; + Eigen::SelfAdjointEigenSolver eig(cov); + + Tangent_space_basis tsb; + + // The eigenvectors are sorted in increasing order of their corresponding + // eigenvalues + for (int j = m_ambient_dim - 1; + j >= m_ambient_dim - m_intrinsic_dim; + --j) { + if (normalize_basis) { + Vector v = constr_vec(m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim); + tsb.push_back(normalize_vector(v, m_k)); + } else { + tsb.push_back(constr_vec( + m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim)); + } + } + + return tsb; + } + + // Returns the dimension of the ith local triangulation + + int tangent_basis_dim(std::size_t i) const { + return m_tangent_spaces[i].dimension(); + } + + Point compute_perturbed_point(std::size_t pt_idx) const { +#ifdef GUDHI_TC_PERTURB_POSITION + return m_k.translated_point_d_object()( + m_points[pt_idx], m_translations[pt_idx]); +#else + return m_points[pt_idx]; +#endif + } + + void compute_perturbed_weighted_point(std::size_t pt_idx, Point &p, FT &w) const { +#ifdef GUDHI_TC_PERTURB_POSITION + p = m_k.translated_point_d_object()( + m_points[pt_idx], m_translations[pt_idx]); +#else + p = m_points[pt_idx]; +#endif + w = m_weights[pt_idx]; + } + + Weighted_point compute_perturbed_weighted_point(std::size_t pt_idx) const { + typename K::Construct_weighted_point_d k_constr_wp = + m_k.construct_weighted_point_d_object(); + + Weighted_point wp = k_constr_wp( +#ifdef GUDHI_TC_PERTURB_POSITION + m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]), +#else + m_points[pt_idx], +#endif + m_weights[pt_idx]); + + return wp; + } + + Point unproject_point(const Tr_point &p, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + typename K::Translated_point_d k_transl = + m_k.translated_point_d_object(); + typename K::Scaled_vector_d k_scaled_vec = + m_k.scaled_vector_d_object(); + typename Tr_traits::Compute_coordinate_d coord = + tr_traits.compute_coordinate_d_object(); + + Point global_point = compute_perturbed_point(tsb.origin()); + for (int i = 0; i < m_intrinsic_dim; ++i) + global_point = k_transl(global_point, + k_scaled_vec(tsb[i], coord(p, i))); + + return global_point; + } + + // Project the point in the tangent space + // Resulting point coords are expressed in tsb's space + Tr_bare_point project_point(const Point &p, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_points_d diff_points = + m_k.difference_of_points_d_object(); + + Vector v = diff_points(p, compute_perturbed_point(tsb.origin())); + + std::vector coords; + // Ambiant-space coords of the projected point + coords.reserve(tsb.dimension()); + for (std::size_t i = 0; i < m_intrinsic_dim; ++i) { + // Local coords are given by the scalar product with the vectors of tsb + FT coord = scalar_pdct(v, tsb[i]); + coords.push_back(coord); + } + + return tr_traits.construct_point_d_object()( + static_cast (coords.size()), coords.begin(), coords.end()); + } + + // Project the point in the tangent space + // The weight will be the squared distance between p and the projection of p + // Resulting point coords are expressed in tsb's space + + Tr_point project_point_and_compute_weight(const Weighted_point &wp, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + typename K::Point_drop_weight_d k_drop_w = + m_k.point_drop_weight_d_object(); + typename K::Compute_weight_d k_point_weight = + m_k.compute_weight_d_object(); + return project_point_and_compute_weight( + k_drop_w(wp), k_point_weight(wp), tsb, tr_traits); + } + + // Same as above, with slightly different parameters + Tr_point project_point_and_compute_weight(const Point &p, const FT w, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + const int point_dim = m_k.point_dimension_d_object()(p); + + typename K::Construct_point_d constr_pt = + m_k.construct_point_d_object(); + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_points_d diff_points = + m_k.difference_of_points_d_object(); + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + typename K::Construct_cartesian_const_iterator_d ccci = + m_k.construct_cartesian_const_iterator_d_object(); + + Point origin = compute_perturbed_point(tsb.origin()); + Vector v = diff_points(p, origin); + + // Same dimension? Then the weight is 0 + bool same_dim = (point_dim == tsb.dimension()); + + std::vector coords; + // Ambiant-space coords of the projected point + std::vector p_proj(ccci(origin), ccci(origin, 0)); + coords.reserve(tsb.dimension()); + for (int i = 0; i < tsb.dimension(); ++i) { + // Local coords are given by the scalar product with the vectors of tsb + FT c = scalar_pdct(v, tsb[i]); + coords.push_back(c); + + // p_proj += c * tsb[i] + if (!same_dim) { + for (int j = 0; j < point_dim; ++j) + p_proj[j] += c * coord(tsb[i], j); + } + } + + // Same dimension? Then the weight is 0 + FT sq_dist_to_proj_pt = 0; + if (!same_dim) { + Point projected_pt = constr_pt(point_dim, p_proj.begin(), p_proj.end()); + sq_dist_to_proj_pt = m_k.squared_distance_d_object()(p, projected_pt); + } + + return tr_traits.construct_weighted_point_d_object() + (tr_traits.construct_point_d_object()(static_cast (coords.size()), coords.begin(), coords.end()), + w - sq_dist_to_proj_pt); + } + + // Project all the points in the tangent space + + template + std::vector project_points_and_compute_weights( + const Indexed_point_range &point_indices, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + std::vector ret; + for (typename Indexed_point_range::const_iterator + it = point_indices.begin(), it_end = point_indices.end(); + it != it_end; ++it) { + ret.push_back(project_point_and_compute_weight( + compute_perturbed_weighted_point(*it), tsb, tr_traits)); + } + return ret; + } + + // A simplex here is a local tri's full cell handle + + bool is_simplex_consistent(Tr_full_cell_handle fch, int cur_dim) const { + Simplex c; + for (int i = 0; i < cur_dim + 1; ++i) { + std::size_t data = fch->vertex(i)->data(); + c.insert(data); + } + return is_simplex_consistent(c); + } + + // A simplex here is a list of point indices + // TODO(CJ): improve it like the other "is_simplex_consistent" below + + bool is_simplex_consistent(Simplex const& simplex) const { + // Check if the simplex is in the stars of all its vertices + Simplex::const_iterator it_point_idx = simplex.begin(); + // For each point p of the simplex, we parse the incidents cells of p + // and we check if "simplex" is among them + for (; it_point_idx != simplex.end(); ++it_point_idx) { + std::size_t point_idx = *it_point_idx; + // Don't check infinite simplices + if (point_idx == std::numeric_limits::max()) + continue; + + Star const& star = m_stars[point_idx]; + + // What we're looking for is "simplex" \ point_idx + Incident_simplex is_to_find = simplex; + is_to_find.erase(point_idx); + + // For each cell + if (std::find(star.begin(), star.end(), is_to_find) == star.end()) + return false; + } + + return true; + } + + // A simplex here is a list of point indices + // "s" contains all the points of the simplex except "center_point" + // This function returns the points whose star doesn't contain the simplex + // N.B.: the function assumes that the simplex is contained in + // star(center_point) + + template // value_type = std::size_t + bool is_simplex_consistent( + std::size_t center_point, + Incident_simplex const& s, // without "center_point" + OutputIterator points_whose_star_does_not_contain_s, + bool check_also_in_non_maximal_faces = false) const { + Simplex full_simplex = s; + full_simplex.insert(center_point); + + // Check if the simplex is in the stars of all its vertices + Incident_simplex::const_iterator it_point_idx = s.begin(); + // For each point p of the simplex, we parse the incidents cells of p + // and we check if "simplex" is among them + for (; it_point_idx != s.end(); ++it_point_idx) { + std::size_t point_idx = *it_point_idx; + // Don't check infinite simplices + if (point_idx == std::numeric_limits::max()) + continue; + + Star const& star = m_stars[point_idx]; + + // What we're looking for is full_simplex \ point_idx + Incident_simplex is_to_find = full_simplex; + is_to_find.erase(point_idx); + + if (check_also_in_non_maximal_faces) { + // For each simplex "is" of the star, check if ic_to_simplex is + // included in "is" + bool found = false; + for (Star::const_iterator is = star.begin(), is_end = star.end(); + !found && is != is_end; ++is) { + if (std::includes(is->begin(), is->end(), + is_to_find.begin(), is_to_find.end())) + found = true; + } + + if (!found) + *points_whose_star_does_not_contain_s++ = point_idx; + } else { + // Does the star contain is_to_find? + if (std::find(star.begin(), star.end(), is_to_find) == star.end()) + *points_whose_star_does_not_contain_s++ = point_idx; + } + } + + return true; + } + + // A simplex here is a list of point indices + // It looks for s in star(p). + // "s" contains all the points of the simplex except p. + bool is_simplex_in_star(std::size_t p, + Incident_simplex const& s, + bool check_also_in_non_maximal_faces = true) const { + Star const& star = m_stars[p]; + + if (check_also_in_non_maximal_faces) { + // For each simplex "is" of the star, check if ic_to_simplex is + // included in "is" + bool found = false; + for (Star::const_iterator is = star.begin(), is_end = star.end(); + !found && is != is_end; ++is) { + if (std::includes(is->begin(), is->end(), s.begin(), s.end())) + found = true; + } + + return found; + } else { + return !(std::find(star.begin(), star.end(), s) == star.end()); + } + } + +#ifdef GUDHI_USE_TBB + // Functor for try_to_solve_inconsistencies_in_a_local_triangulation function + class Try_to_solve_inconsistencies_in_a_local_triangulation { + Tangential_complex & m_tc; + double m_max_perturb; + tbb::combinable &m_num_inconsistencies; + tbb::combinable > &m_updated_points; + + public: + // Constructor + Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc, + double max_perturb, + tbb::combinable &num_inconsistencies, + tbb::combinable > &updated_points) + : m_tc(tc), + m_max_perturb(max_perturb), + m_num_inconsistencies(num_inconsistencies), + m_updated_points(updated_points) { } + + // Constructor + Try_to_solve_inconsistencies_in_a_local_triangulation(const Try_to_solve_inconsistencies_in_a_local_triangulation& + tsilt) + : m_tc(tsilt.m_tc), + m_max_perturb(tsilt.m_max_perturb), + m_num_inconsistencies(tsilt.m_num_inconsistencies), + m_updated_points(tsilt.m_updated_points) { } + + // operator() + void operator()(const tbb::blocked_range& r) const { + for (size_t i = r.begin(); i != r.end(); ++i) { + m_num_inconsistencies.local() += + m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(i, m_max_perturb, + std::back_inserter(m_updated_points.local())); + } + } + }; +#endif // GUDHI_USE_TBB + + void perturb(std::size_t point_idx, double max_perturb) { + const Tr_traits &local_tr_traits = + m_triangulations[point_idx].tr().geom_traits(); + typename Tr_traits::Compute_coordinate_d coord = + local_tr_traits.compute_coordinate_d_object(); + typename K::Translated_point_d k_transl = + m_k.translated_point_d_object(); + typename K::Construct_vector_d k_constr_vec = + m_k.construct_vector_d_object(); + typename K::Scaled_vector_d k_scaled_vec = + m_k.scaled_vector_d_object(); + + CGAL::Random_points_in_ball_d + tr_point_in_ball_generator(m_intrinsic_dim, + m_random_generator.get_double(0., max_perturb)); + + Tr_point local_random_transl = + local_tr_traits.construct_weighted_point_d_object()(*tr_point_in_ball_generator++, 0); + Translation_for_perturb global_transl = k_constr_vec(m_ambient_dim); + const Tangent_space_basis &tsb = m_tangent_spaces[point_idx]; + for (int i = 0; i < m_intrinsic_dim; ++i) { + global_transl = k_transl(global_transl, + k_scaled_vec(tsb[i], coord(local_random_transl, i))); + } + // Parallel +#if defined(GUDHI_USE_TBB) + m_p_perturb_mutexes[point_idx].lock(); + m_translations[point_idx] = global_transl; + m_p_perturb_mutexes[point_idx].unlock(); + // Sequential +#else + m_translations[point_idx] = global_transl; +#endif + } + + // Return true if inconsistencies were found + template + bool try_to_solve_inconsistencies_in_a_local_triangulation(std::size_t tr_index, + double max_perturb, + OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) { + bool is_inconsistent = false; + + Star const& star = m_stars[tr_index]; + Tr_vertex_handle center_vh = m_triangulations[tr_index].center_vertex(); + + // For each incident simplex + Star::const_iterator it_inc_simplex = star.begin(); + Star::const_iterator it_inc_simplex_end = star.end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + const Incident_simplex &incident_simplex = *it_inc_simplex; + + // Don't check infinite cells + if (is_infinite(incident_simplex)) + continue; + + Simplex c = incident_simplex; + c.insert(tr_index); // Add the missing index + + // Perturb the center point + if (!is_simplex_consistent(c)) { + is_inconsistent = true; + + std::size_t idx = tr_index; + + perturb(tr_index, max_perturb); + *perturbed_pts_indices++ = idx; + + // We will try the other cells next time + break; + } + } + + return is_inconsistent; + } + + + // 1st line: number of points + // Then one point per line + std::ostream &export_point_set(std::ostream & os, + bool use_perturbed_points = false, + const char *coord_separator = " ") const { + if (use_perturbed_points) { + std::vector perturbed_points; + perturbed_points.reserve(m_points.size()); + for (std::size_t i = 0; i < m_points.size(); ++i) + perturbed_points.push_back(compute_perturbed_point(i)); + + return export_point_set( + m_k, perturbed_points, os, coord_separator); + } else { + return export_point_set( + m_k, m_points, os, coord_separator); + } + } + + template > + std::ostream &export_vertices_to_off( + std::ostream & os, std::size_t &num_vertices, + bool use_perturbed_points = false, + ProjectionFunctor const& point_projection = ProjectionFunctor()) const { + if (m_points.empty()) { + num_vertices = 0; + return os; + } + + // If m_intrinsic_dim = 1, we output each point two times + // to be able to export each segment as a flat triangle with 3 different + // indices (otherwise, Meshlab detects degenerated simplices) + const int N = (m_intrinsic_dim == 1 ? 2 : 1); + + // Kernel functors + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + +#ifdef GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF + int num_coords = m_ambient_dim; +#else + int num_coords = std::min(m_ambient_dim, 3); +#endif + +#ifdef GUDHI_TC_EXPORT_NORMALS + OS_container::const_iterator it_os = m_orth_spaces.begin(); +#endif + typename Points::const_iterator it_p = m_points.begin(); + typename Points::const_iterator it_p_end = m_points.end(); + // For each point p + for (std::size_t i = 0; it_p != it_p_end; ++it_p, ++i) { + Point p = point_projection( + use_perturbed_points ? compute_perturbed_point(i) : *it_p); + for (int ii = 0; ii < N; ++ii) { + int j = 0; + for (; j < num_coords; ++j) + os << CGAL::to_double(coord(p, j)) << " "; + if (j == 2) + os << "0"; + +#ifdef GUDHI_TC_EXPORT_NORMALS + for (j = 0; j < num_coords; ++j) + os << " " << CGAL::to_double(coord(*it_os->begin(), j)); +#endif + os << "\n"; + } +#ifdef GUDHI_TC_EXPORT_NORMALS + ++it_os; +#endif + } + + num_vertices = N * m_points.size(); + return os; + } + + std::ostream &export_simplices_to_off(std::ostream & os, std::size_t &num_OFF_simplices, + bool color_inconsistencies = false, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL) + const { + // If m_intrinsic_dim = 1, each point is output two times + // (see export_vertices_to_off) + num_OFF_simplices = 0; + std::size_t num_maximal_simplices = 0; + std::size_t num_inconsistent_maximal_simplices = 0; + std::size_t num_inconsistent_stars = 0; + typename Tr_container::const_iterator it_tr = m_triangulations.begin(); + typename Tr_container::const_iterator it_tr_end = m_triangulations.end(); + // For each triangulation + for (std::size_t idx = 0; it_tr != it_tr_end; ++it_tr, ++idx) { + bool is_star_inconsistent = false; + + Triangulation const& tr = it_tr->tr(); + Tr_vertex_handle center_vh = it_tr->center_vertex(); + + if (&tr == NULL || tr.current_dimension() < m_intrinsic_dim) + continue; + + // Color for this star + std::stringstream color; + // color << rand()%256 << " " << 100+rand()%156 << " " << 100+rand()%156; + color << 128 << " " << 128 << " " << 128; + + // Gather the triangles here, with an int telling its color + typedef std::vector > Star_using_triangles; + Star_using_triangles star_using_triangles; + + // For each cell of the star + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + Simplex c = *it_inc_simplex; + c.insert(idx); + std::size_t num_vertices = c.size(); + ++num_maximal_simplices; + + int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue + if (color_inconsistencies && !is_simplex_consistent(c)) { + ++num_inconsistent_maximal_simplices; + color_simplex = 0; + is_star_inconsistent = true; + } else { + if (p_simpl_to_color_in_red && + std::find( + p_simpl_to_color_in_red->begin(), + p_simpl_to_color_in_red->end(), + c) != p_simpl_to_color_in_red->end()) { + color_simplex = 1; + } else if (p_simpl_to_color_in_green && + std::find( + p_simpl_to_color_in_green->begin(), + p_simpl_to_color_in_green->end(), + c) != p_simpl_to_color_in_green->end()) { + color_simplex = 2; + } else if (p_simpl_to_color_in_blue && + std::find( + p_simpl_to_color_in_blue->begin(), + p_simpl_to_color_in_blue->end(), + c) != p_simpl_to_color_in_blue->end()) { + color_simplex = 3; + } + } + + // If m_intrinsic_dim = 1, each point is output two times, + // so we need to multiply each index by 2 + // And if only 2 vertices, add a third one (each vertex is duplicated in + // the file when m_intrinsic dim = 2) + if (m_intrinsic_dim == 1) { + Simplex tmp_c; + Simplex::iterator it = c.begin(); + for (; it != c.end(); ++it) + tmp_c.insert(*it * 2); + if (num_vertices == 2) + tmp_c.insert(*tmp_c.rbegin() + 1); + + c = tmp_c; + } + + if (num_vertices <= 3) { + star_using_triangles.push_back(std::make_pair(c, color_simplex)); + } else { + // num_vertices >= 4: decompose the simplex in triangles + std::vector booleans(num_vertices, false); + std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true); + do { + Simplex triangle; + Simplex::iterator it = c.begin(); + for (int i = 0; it != c.end(); ++i, ++it) { + if (booleans[i]) + triangle.insert(*it); + } + star_using_triangles.push_back( + std::make_pair(triangle, color_simplex)); + } while (std::next_permutation(booleans.begin(), booleans.end())); + } + } + + // For each cell + Star_using_triangles::const_iterator it_simplex = + star_using_triangles.begin(); + Star_using_triangles::const_iterator it_simplex_end = + star_using_triangles.end(); + for (; it_simplex != it_simplex_end; ++it_simplex) { + const Simplex &c = it_simplex->first; + + // Don't export infinite cells + if (is_infinite(c)) + continue; + + int color_simplex = it_simplex->second; + + std::stringstream sstr_c; + + Simplex::const_iterator it_point_idx = c.begin(); + for (; it_point_idx != c.end(); ++it_point_idx) { + sstr_c << *it_point_idx << " "; + } + + os << 3 << " " << sstr_c.str(); + if (color_inconsistencies || p_simpl_to_color_in_red + || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) { + switch (color_simplex) { + case 0: os << " 255 255 0"; + break; + case 1: os << " 255 0 0"; + break; + case 2: os << " 0 255 0"; + break; + case 3: os << " 0 0 255"; + break; + default: os << " " << color.str(); + break; + } + } + ++num_OFF_simplices; + os << "\n"; + } + if (is_star_inconsistent) + ++num_inconsistent_stars; + } + +#ifdef DEBUG_TRACES + std::cerr + << "\n==========================================================\n" + << "Export from list of stars to OFF:\n" + << " * Number of vertices: " << m_points.size() << "\n" + << " * Total number of maximal simplices: " << num_maximal_simplices + << "\n"; + if (color_inconsistencies) { + std::cerr + << " * Number of inconsistent stars: " + << num_inconsistent_stars << " (" + << (m_points.size() > 0 ? + 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n" + << " * Number of inconsistent maximal simplices: " + << num_inconsistent_maximal_simplices << " (" + << (num_maximal_simplices > 0 ? + 100. * num_inconsistent_maximal_simplices / num_maximal_simplices + : 0.) << "%)\n"; + } + std::cerr << "==========================================================\n"; +#endif + + return os; + } + + public: + std::ostream &export_simplices_to_off( + const Simplicial_complex &complex, + std::ostream & os, std::size_t &num_OFF_simplices, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL) + const { + typedef Simplicial_complex::Simplex Simplex; + typedef Simplicial_complex::Simplex_set Simplex_set; + + // If m_intrinsic_dim = 1, each point is output two times + // (see export_vertices_to_off) + num_OFF_simplices = 0; + std::size_t num_maximal_simplices = 0; + + typename Simplex_set::const_iterator it_s = + complex.simplex_range().begin(); + typename Simplex_set::const_iterator it_s_end = + complex.simplex_range().end(); + // For each simplex + for (; it_s != it_s_end; ++it_s) { + Simplex c = *it_s; + ++num_maximal_simplices; + + int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue + if (p_simpl_to_color_in_red && + std::find( + p_simpl_to_color_in_red->begin(), + p_simpl_to_color_in_red->end(), + c) != p_simpl_to_color_in_red->end()) { + color_simplex = 1; + } else if (p_simpl_to_color_in_green && + std::find(p_simpl_to_color_in_green->begin(), + p_simpl_to_color_in_green->end(), + c) != p_simpl_to_color_in_green->end()) { + color_simplex = 2; + } else if (p_simpl_to_color_in_blue && + std::find(p_simpl_to_color_in_blue->begin(), + p_simpl_to_color_in_blue->end(), + c) != p_simpl_to_color_in_blue->end()) { + color_simplex = 3; + } + + // Gather the triangles here + typedef std::vector Triangles; + Triangles triangles; + + std::size_t num_vertices = c.size(); + // Do not export smaller dimension simplices + if (num_vertices < m_intrinsic_dim + 1) + continue; + + // If m_intrinsic_dim = 1, each point is output two times, + // so we need to multiply each index by 2 + // And if only 2 vertices, add a third one (each vertex is duplicated in + // the file when m_intrinsic dim = 2) + if (m_intrinsic_dim == 1) { + Simplex tmp_c; + Simplex::iterator it = c.begin(); + for (; it != c.end(); ++it) + tmp_c.insert(*it * 2); + if (num_vertices == 2) + tmp_c.insert(*tmp_c.rbegin() + 1); + + c = tmp_c; + } + + if (num_vertices <= 3) { + triangles.push_back(c); + } else { + // num_vertices >= 4: decompose the simplex in triangles + std::vector booleans(num_vertices, false); + std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true); + do { + Simplex triangle; + Simplex::iterator it = c.begin(); + for (int i = 0; it != c.end(); ++i, ++it) { + if (booleans[i]) + triangle.insert(*it); + } + triangles.push_back(triangle); + } while (std::next_permutation(booleans.begin(), booleans.end())); + } + + // For each cell + Triangles::const_iterator it_tri = triangles.begin(); + Triangles::const_iterator it_tri_end = triangles.end(); + for (; it_tri != it_tri_end; ++it_tri) { + // Don't export infinite cells + if (is_infinite(*it_tri)) + continue; + + os << 3 << " "; + Simplex::const_iterator it_point_idx = it_tri->begin(); + for (; it_point_idx != it_tri->end(); ++it_point_idx) { + os << *it_point_idx << " "; + } + + if (p_simpl_to_color_in_red || p_simpl_to_color_in_green + || p_simpl_to_color_in_blue) { + switch (color_simplex) { + case 0: os << " 255 255 0"; + break; + case 1: os << " 255 0 0"; + break; + case 2: os << " 0 255 0"; + break; + case 3: os << " 0 0 255"; + break; + default: os << " 128 128 128"; + break; + } + } + + ++num_OFF_simplices; + os << "\n"; + } + } + +#ifdef DEBUG_TRACES + std::cerr + << "\n==========================================================\n" + << "Export from complex to OFF:\n" + << " * Number of vertices: " << m_points.size() << "\n" + << " * Total number of maximal simplices: " << num_maximal_simplices + << "\n" + << "==========================================================\n"; +#endif + + return os; + } + + private: + const K m_k; + const int m_intrinsic_dim; + const int m_ambient_dim; + + Points m_points; + Weights m_weights; +#ifdef GUDHI_TC_PERTURB_POSITION + Translations_for_perturb m_translations; +#if defined(GUDHI_USE_TBB) + Mutex_for_perturb *m_p_perturb_mutexes; +#endif +#endif + + Points_ds m_points_ds; + double m_last_max_perturb; + std::vector m_are_tangent_spaces_computed; + TS_container m_tangent_spaces; +#ifdef GUDHI_TC_EXPORT_NORMALS + OS_container m_orth_spaces; +#endif + Tr_container m_triangulations; // Contains the triangulations + // and their center vertex + Stars_container m_stars; + std::vector m_squared_star_spheres_radii_incl_margin; + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + Points m_points_for_tse; + Points_ds m_points_ds_for_tse; +#endif + + mutable CGAL::Random m_random_generator; +}; // /class Tangential_complex + +} // end namespace tangential_complex +} // end namespace Gudhi + +#endif // TANGENTIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h new file mode 100644 index 00000000..65c74ca5 --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h @@ -0,0 +1,539 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ +#define TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ + +#include +#include +#include +#include + +#include + +// For is_pure_pseudomanifold +#include +#include +#include +#include + +#include +#include +#include +#include // for map<> +#include // for vector<> +#include // for set<> + +namespace Gudhi { +namespace tangential_complex { +namespace internal { + +class Simplicial_complex { + public: + typedef boost::container::flat_set Simplex; + typedef std::set Simplex_set; + + // If perform_checks = true, the function: + // - won't insert the simplex if it is already in a higher dim simplex + // - will erase any lower-dim simplices that are faces of the new simplex + // Returns true if the simplex was added + bool add_simplex( + const Simplex &s, bool perform_checks = true) { + if (perform_checks) { + unsigned int num_pts = static_cast (s.size()); + std::vector to_erase; + bool check_higher_dim_simpl = true; + for (Complex::iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + // Check if the simplex is not already in a higher dim simplex + if (check_higher_dim_simpl + && it_simplex->size() > num_pts + && std::includes(it_simplex->begin(), it_simplex->end(), + s.begin(), s.end())) { + // No need to insert it, then + return false; + } + // Check if the simplex includes some lower-dim simplices + if (it_simplex->size() < num_pts + && std::includes(s.begin(), s.end(), + it_simplex->begin(), it_simplex->end())) { + to_erase.push_back(it_simplex); + // We don't need to check higher-sim simplices any more + check_higher_dim_simpl = false; + } + } + for (std::vector::const_iterator it = to_erase.begin(); + it != to_erase.end(); ++it) { + m_complex.erase(*it); + } + } + return m_complex.insert(s).second; + } + + const Simplex_set &simplex_range() const { + return m_complex; + } + + bool empty() { + return m_complex.empty(); + } + + void clear() { + m_complex.clear(); + } + + template + void get_simplices_matching_test(Test test, Output_it out) { + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (test(*it_simplex)) + *out++ = *it_simplex; + } + } + + // When a simplex S has only one co-face C, we can remove S and C + // without changing the topology + + void collapse(int max_simplex_dim, bool quiet = false) { +#ifdef DEBUG_TRACES + if (!quiet) + std::cerr << "Collapsing... "; +#endif + // We note k = max_simplex_dim - 1 + int k = max_simplex_dim - 1; + + typedef Complex::iterator Simplex_iterator; + typedef std::vector Simplex_iterator_list; + typedef std::map Cofaces_map; + + std::size_t num_collapsed_maximal_simplices = 0; + do { + num_collapsed_maximal_simplices = 0; + // Create a map associating each non-maximal k-faces to the list of its + // maximal cofaces + Cofaces_map cofaces_map; + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (static_cast (it_simplex->size()) > k + 1) { + std::vector k_faces; + // Get the k-faces composing the simplex + combinations(*it_simplex, k + 1, std::back_inserter(k_faces)); + for (const auto &comb : k_faces) + cofaces_map[comb].push_back(it_simplex); + } + } + + // For each non-maximal k-face F, if F has only one maximal coface Cf: + // - Look for the other k-faces F2, F3... of Cf in the map and: + // * if the list contains only Cf, clear the list (we don't remove the + // list since it creates troubles with the iterators) and add the F2, + // F3... to the complex + // * otherwise, remove Cf from the associated list + // - Remove Cf from the complex + for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(), + it_map_end = cofaces_map.end(); + it_map_elt != it_map_end; + ++it_map_elt) { + if (it_map_elt->second.size() == 1) { + std::vector k_faces; + const Simplex_iterator_list::value_type &it_Cf = + *it_map_elt->second.begin(); + GUDHI_CHECK(it_Cf->size() == max_simplex_dim + 1, + std::logic_error("Wrong dimension")); + // Get the k-faces composing the simplex + combinations(*it_Cf, k + 1, std::back_inserter(k_faces)); + for (const auto &f2 : k_faces) { + // Skip F + if (f2 != it_map_elt->first) { + Cofaces_map::iterator it_comb_in_map = cofaces_map.find(f2); + if (it_comb_in_map->second.size() == 1) { + it_comb_in_map->second.clear(); + m_complex.insert(f2); + } else { // it_comb_in_map->second.size() > 1 + Simplex_iterator_list::iterator it = std::find(it_comb_in_map->second.begin(), + it_comb_in_map->second.end(), + it_Cf); + GUDHI_CHECK(it != it_comb_in_map->second.end(), + std::logic_error("Error: it == it_comb_in_map->second.end()")); + it_comb_in_map->second.erase(it); + } + } + } + m_complex.erase(it_Cf); + ++num_collapsed_maximal_simplices; + } + } + // Repeat until no maximal simplex got removed + } while (num_collapsed_maximal_simplices > 0); + + // Collapse the lower dimension simplices + if (k > 0) + collapse(max_simplex_dim - 1, true); + +#ifdef DEBUG_TRACES + if (!quiet) + std::cerr << "done.\n"; +#endif + } + + void display_stats() const { + std::cerr << yellow << "Complex stats:\n" << white; + + if (m_complex.empty()) { + std::cerr << " * No simplices.\n"; + } else { + // Number of simplex for each dimension + std::map simplex_stats; + + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + ++simplex_stats[static_cast (it_simplex->size()) - 1]; + } + + for (std::map::const_iterator it_map = simplex_stats.begin(); + it_map != simplex_stats.end(); ++it_map) { + std::cerr << " * " << it_map->first << "-simplices: " + << it_map->second << "\n"; + } + } + } + + // verbose_level = 0, 1 or 2 + bool is_pure_pseudomanifold__do_not_check_if_stars_are_connected(int simplex_dim, + bool allow_borders = false, + bool exit_at_the_first_problem = false, + int verbose_level = 0, + std::size_t *p_num_wrong_dim_simplices = NULL, + std::size_t *p_num_wrong_number_of_cofaces = NULL) const { + typedef Simplex K_1_face; + typedef std::map Cofaces_map; + + std::size_t num_wrong_dim_simplices = 0; + std::size_t num_wrong_number_of_cofaces = 0; + + // Counts the number of cofaces of each K_1_face + + // Create a map associating each non-maximal k-faces to the list of its + // maximal cofaces + Cofaces_map cofaces_map; + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (static_cast (it_simplex->size()) != simplex_dim + 1) { + if (verbose_level >= 2) + std::cerr << "Found a simplex with dim = " + << it_simplex->size() - 1 << "\n"; + ++num_wrong_dim_simplices; + } else { + std::vector k_1_faces; + // Get the facets composing the simplex + combinations( + *it_simplex, simplex_dim, std::back_inserter(k_1_faces)); + for (const auto &k_1_face : k_1_faces) { + ++cofaces_map[k_1_face]; + } + } + } + + for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(), + it_map_end = cofaces_map.end(); + it_map_elt != it_map_end; + ++it_map_elt) { + if (it_map_elt->second != 2 + && (!allow_borders || it_map_elt->second != 1)) { + if (verbose_level >= 2) + std::cerr << "Found a k-1-face with " + << it_map_elt->second << " cofaces\n"; + + if (exit_at_the_first_problem) + return false; + else + ++num_wrong_number_of_cofaces; + } + } + + bool ret = num_wrong_dim_simplices == 0 && num_wrong_number_of_cofaces == 0; + + if (verbose_level >= 1) { + std::cerr << "Pure pseudo-manifold: "; + if (ret) { + std::cerr << green << "YES" << white << "\n"; + } else { + std::cerr << red << "NO" << white << "\n" + << " * Number of wrong dimension simplices: " + << num_wrong_dim_simplices << "\n" + << " * Number of wrong number of cofaces: " + << num_wrong_number_of_cofaces << "\n"; + } + } + + if (p_num_wrong_dim_simplices) + *p_num_wrong_dim_simplices = num_wrong_dim_simplices; + if (p_num_wrong_number_of_cofaces) + *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces; + + return ret; + } + + template + std::size_t num_K_simplices() const { + Simplex_set k_simplices; + + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (it_simplex->size() == K + 1) { + k_simplices.insert(*it_simplex); + } else if (it_simplex->size() > K + 1) { + // Get the k-faces composing the simplex + combinations( + *it_simplex, K + 1, std::inserter(k_simplices, k_simplices.begin())); + } + } + + return k_simplices.size(); + } + + std::ptrdiff_t euler_characteristic(bool verbose = false) const { + if (verbose) + std::cerr << "\nComputing Euler characteristic of the complex...\n"; + + std::size_t num_vertices = num_K_simplices<0>(); + std::size_t num_edges = num_K_simplices<1>(); + std::size_t num_triangles = num_K_simplices<2>(); + + std::ptrdiff_t ec = + (std::ptrdiff_t) num_vertices + - (std::ptrdiff_t) num_edges + + (std::ptrdiff_t) num_triangles; + + if (verbose) + std::cerr << "Euler characteristic: V - E + F = " + << num_vertices << " - " << num_edges << " + " << num_triangles << " = " + << blue + << ec + << white << "\n"; + + return ec; + } + + // TODO(CJ): ADD COMMENTS + + bool is_pure_pseudomanifold( + int simplex_dim, + std::size_t num_vertices, + bool allow_borders = false, + bool exit_at_the_first_problem = false, + int verbose_level = 0, + std::size_t *p_num_wrong_dim_simplices = NULL, + std::size_t *p_num_wrong_number_of_cofaces = NULL, + std::size_t *p_num_unconnected_stars = NULL, + Simplex_set *p_wrong_dim_simplices = NULL, + Simplex_set *p_wrong_number_of_cofaces_simplices = NULL, + Simplex_set *p_unconnected_stars_simplices = NULL) const { + // If simplex_dim == 1, we do not need to check if stars are connected + if (simplex_dim == 1) { + if (p_num_unconnected_stars) + *p_num_unconnected_stars = 0; + return is_pure_pseudomanifold__do_not_check_if_stars_are_connected(simplex_dim, + allow_borders, + exit_at_the_first_problem, + verbose_level, + p_num_wrong_dim_simplices, + p_num_wrong_number_of_cofaces); + } + // Associates each vertex (= the index in the vector) + // to its star (list of simplices) + typedef std::vector > Stars; + std::size_t num_wrong_dim_simplices = 0; + std::size_t num_wrong_number_of_cofaces = 0; + std::size_t num_unconnected_stars = 0; + + // Fills a Stars data structure + Stars stars; + stars.resize(num_vertices); + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (static_cast (it_simplex->size()) != simplex_dim + 1) { + if (verbose_level >= 2) + std::cerr << "Found a simplex with dim = " + << it_simplex->size() - 1 << "\n"; + ++num_wrong_dim_simplices; + if (p_wrong_dim_simplices) + p_wrong_dim_simplices->insert(*it_simplex); + } else { + for (Simplex::const_iterator it_point_idx = it_simplex->begin(); + it_point_idx != it_simplex->end(); + ++it_point_idx) { + stars[*it_point_idx].push_back(it_simplex); + } + } + } + + // Now, for each star, we have a vector of its d-simplices + // i.e. one index for each d-simplex + // Boost Graph only deals with indexes, so we also need indexes for the + // (d-1)-simplices + std::size_t center_vertex_index = 0; + for (Stars::const_iterator it_star = stars.begin(); + it_star != stars.end(); + ++it_star, ++center_vertex_index) { + typedef std::map > + Dm1_faces_to_adj_D_faces; + Dm1_faces_to_adj_D_faces dm1_faces_to_adj_d_faces; + + for (std::size_t i_dsimpl = 0; i_dsimpl < it_star->size(); ++i_dsimpl) { + Simplex dm1_simpl_of_link = *((*it_star)[i_dsimpl]); + dm1_simpl_of_link.erase(center_vertex_index); + // Copy it to a vector so that we can use operator[] on it + std::vector dm1_simpl_of_link_vec( + dm1_simpl_of_link.begin(), dm1_simpl_of_link.end()); + + CGAL::Combination_enumerator dm2_simplices( + simplex_dim - 1, 0, simplex_dim); + for (; !dm2_simplices.finished(); ++dm2_simplices) { + Simplex dm2_simpl; + for (int j = 0; j < simplex_dim - 1; ++j) + dm2_simpl.insert(dm1_simpl_of_link_vec[dm2_simplices[j]]); + dm1_faces_to_adj_d_faces[dm2_simpl].push_back(i_dsimpl); + } + } + + Adj_graph adj_graph; + std::vector d_faces_descriptors; + d_faces_descriptors.resize(it_star->size()); + for (std::size_t j = 0; j < it_star->size(); ++j) + d_faces_descriptors[j] = boost::add_vertex(adj_graph); + + Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it = + dm1_faces_to_adj_d_faces.begin(); + Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it_end = + dm1_faces_to_adj_d_faces.end(); + for (std::size_t i_km1_face = 0; + dm1_to_d_it != dm1_to_d_it_end; + ++dm1_to_d_it, ++i_km1_face) { + Graph_vertex km1_gv = boost::add_vertex(adj_graph); + + for (std::vector::const_iterator kface_it = + dm1_to_d_it->second.begin(); + kface_it != dm1_to_d_it->second.end(); + ++kface_it) { + boost::add_edge(km1_gv, *kface_it, adj_graph); + } + + if (dm1_to_d_it->second.size() != 2 + && (!allow_borders || dm1_to_d_it->second.size() != 1)) { + ++num_wrong_number_of_cofaces; + if (p_wrong_number_of_cofaces_simplices) { + for (auto idx : dm1_to_d_it->second) + p_wrong_number_of_cofaces_simplices->insert(*((*it_star)[idx])); + } + } + } + + // What is left is to check the connexity + bool is_connected = true; + if (boost::num_vertices(adj_graph) > 0) { + std::vector components(boost::num_vertices(adj_graph)); + is_connected = + (boost::connected_components(adj_graph, &components[0]) == 1); + } + + if (!is_connected) { + if (verbose_level >= 2) + std::cerr << "Error: star #" << center_vertex_index + << " is not connected\n"; + ++num_unconnected_stars; + if (p_unconnected_stars_simplices) { + for (std::vector::const_iterator + it_simpl = it_star->begin(), + it_simpl_end = it_star->end(); + it_simpl != it_simpl_end; + ++it_simpl) { + p_unconnected_stars_simplices->insert(**it_simpl); + } + } + } + } + + // Each one has been counted several times ("simplex_dim" times) + num_wrong_number_of_cofaces /= simplex_dim; + + bool ret = + num_wrong_dim_simplices == 0 + && num_wrong_number_of_cofaces == 0 + && num_unconnected_stars == 0; + + if (verbose_level >= 1) { + std::cerr << "Pure pseudo-manifold: "; + if (ret) { + std::cerr << green << "YES" << white << "\n"; + } else { + std::cerr << red << "NO" << white << "\n" + << " * Number of wrong dimension simplices: " + << num_wrong_dim_simplices << "\n" + << " * Number of wrong number of cofaces: " + << num_wrong_number_of_cofaces << "\n" + << " * Number of not-connected stars: " + << num_unconnected_stars << "\n"; + } + } + + if (p_num_wrong_dim_simplices) + *p_num_wrong_dim_simplices = num_wrong_dim_simplices; + if (p_num_wrong_number_of_cofaces) + *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces; + if (p_num_unconnected_stars) + *p_num_unconnected_stars = num_unconnected_stars; + + return ret; + } + + private: + typedef Simplex_set Complex; + + // graph is an adjacency list + typedef boost::adjacency_list Adj_graph; + // map that gives to a certain simplex its node in graph and its dimension + typedef boost::graph_traits::vertex_descriptor Graph_vertex; + typedef boost::graph_traits::edge_descriptor Graph_edge; + + Complex m_complex; +}; // class Simplicial_complex + +} // namespace internal +} // namespace tangential_complex +} // namespace Gudhi + +#endif // TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h new file mode 100644 index 00000000..98a1b14f --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h @@ -0,0 +1,44 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_CONFIG_H_ +#define TANGENTIAL_COMPLEX_CONFIG_H_ + +#include + +// ========================= Debugging & profiling ============================= +#define GUDHI_TC_PROFILING +#define DEBUG_TRACES +// #define GUDHI_TC_VERY_VERBOSE +// #define GUDHI_TC_PERFORM_EXTRA_CHECKS +// #define GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + +// ========================= Strategy ========================================== +#define GUDHI_TC_PERTURB_POSITION +// #define GUDHI_TC_PERTURB_WEIGHT + +// ========================= Parameters ======================================== + +// PCA will use GUDHI_TC_BASE_VALUE_FOR_PCA^intrinsic_dim points +const std::size_t GUDHI_TC_BASE_VALUE_FOR_PCA = 5; + +#endif // TANGENTIAL_COMPLEX_CONFIG_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h new file mode 100644 index 00000000..b2d6d674 --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h @@ -0,0 +1,195 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_UTILITIES_H_ +#define TANGENTIAL_COMPLEX_UTILITIES_H_ + +#include +#include +#include + +#include + +#include +#include + +#include +#include +#include +#include +#include +#include // for std::sqrt + +namespace Gudhi { +namespace tangential_complex { +namespace internal { + +// Provides copy constructors to std::atomic so that +// it can be used in a vector +template +struct Atomic_wrapper +: public std::atomic { + typedef std::atomic Base; + + Atomic_wrapper() { } + + Atomic_wrapper(const T &t) : Base(t) { } + + Atomic_wrapper(const std::atomic &a) : Base(a.load()) { } + + Atomic_wrapper(const Atomic_wrapper &other) : Base(other.load()) { } + + Atomic_wrapper &operator=(const T &other) { + Base::store(other); + return *this; + } + + Atomic_wrapper &operator=(const std::atomic &other) { + Base::store(other.load()); + return *this; + } + + Atomic_wrapper &operator=(const Atomic_wrapper &other) { + Base::store(other.load()); + return *this; + } +}; + +// Modifies v in-place +template +typename K::Vector_d& normalize_vector(typename K::Vector_d& v, + K const& k) { + v = k.scaled_vector_d_object()( + v, typename K::FT(1) / std::sqrt(k.squared_length_d_object()(v))); + return v; +} + +template +struct Basis { + typedef typename Kernel::FT FT; + typedef typename Kernel::Point_d Point; + typedef typename Kernel::Vector_d Vector; + typedef typename std::vector::const_iterator const_iterator; + + std::size_t m_origin; + std::vector m_vectors; + + std::size_t origin() const { + return m_origin; + } + + void set_origin(std::size_t o) { + m_origin = o; + } + + const_iterator begin() const { + return m_vectors.begin(); + } + + const_iterator end() const { + return m_vectors.end(); + } + + std::size_t size() const { + return m_vectors.size(); + } + + Vector& operator[](const std::size_t i) { + return m_vectors[i]; + } + + const Vector& operator[](const std::size_t i) const { + return m_vectors[i]; + } + + void push_back(const Vector& v) { + m_vectors.push_back(v); + } + + void reserve(const std::size_t s) { + m_vectors.reserve(s); + } + + Basis() { } + + Basis(std::size_t origin) : m_origin(origin) { } + + Basis(std::size_t origin, const std::vector& vectors) + : m_origin(origin), m_vectors(vectors) { } + + int dimension() const { + return static_cast (m_vectors.size()); + } +}; + +// 1st line: number of points +// Then one point per line +template +std::ostream &export_point_set( + Kernel const& k, + Point_range const& points, + std::ostream & os, + const char *coord_separator = " ") { + // Kernel functors + typename Kernel::Construct_cartesian_const_iterator_d ccci = + k.construct_cartesian_const_iterator_d_object(); + + os << points.size() << "\n"; + + typename Point_range::const_iterator it_p = points.begin(); + typename Point_range::const_iterator it_p_end = points.end(); + // For each point p + for (; it_p != it_p_end; ++it_p) { + for (auto it = ccci(*it_p); it != ccci(*it_p, 0); ++it) + os << CGAL::to_double(*it) << coord_separator; + + os << "\n"; + } + + return os; +} + +// Compute all the k-combinations of elements +// Output_iterator::value_type must be +// boost::container::flat_set +template +void combinations(const Elements_container elements, int k, + Output_iterator combinations) { + std::size_t n = elements.size(); + std::vector booleans(n, false); + std::fill(booleans.begin() + n - k, booleans.end(), true); + do { + boost::container::flat_set combination; + typename Elements_container::const_iterator it_elt = elements.begin(); + for (std::size_t i = 0; i < n; ++i, ++it_elt) { + if (booleans[i]) + combination.insert(*it_elt); + } + *combinations++ = combination; + } while (std::next_permutation(booleans.begin(), booleans.end())); +} + +} // namespace internal +} // namespace tangential_complex +} // namespace Gudhi + +#endif // TANGENTIAL_COMPLEX_UTILITIES_H_ diff --git a/src/Tangential_complex/test/CMakeLists.txt b/src/Tangential_complex/test/CMakeLists.txt new file mode 100644 index 00000000..075028c8 --- /dev/null +++ b/src/Tangential_complex/test/CMakeLists.txt @@ -0,0 +1,31 @@ +cmake_minimum_required(VERSION 2.6) +project(Tangential_complex_tests) + +if (GCOVR_PATH) + # for gcovr to make coverage reports - Corbera Jenkins plugin + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage") +endif() +if (GPROF_PATH) + # for gprof to make coverage reports - Jenkins + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") +endif() + +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + include_directories (BEFORE "../../include") + + add_executable( Tangential_complex_test_TC test_tangential_complex.cpp ) + target_link_libraries(Tangential_complex_test_TC ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Tangential_complex_test_TC ${TBB_LIBRARIES}) + endif() + add_test(Tangential_complex_test_TC ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_test_TC + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Tangential_complex_UT.xml --log_level=test_suite --report_level=no) + + endif() + endif () +endif() \ No newline at end of file diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp new file mode 100644 index 00000000..f8b0d2fb --- /dev/null +++ b/src/Tangential_complex/test/test_tangential_complex.cpp @@ -0,0 +1,70 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE Tangential_complex - test tangential complex +#include + +#include +#include + +#include +#include + +#include +#include + +namespace tc = Gudhi::tangential_complex; + +BOOST_AUTO_TEST_CASE(test_Spatial_tree_data_structure) { + typedef CGAL::Epick_d Kernel; + typedef Kernel::Point_d Point; + typedef tc::Tangential_complex< + Kernel, CGAL::Dynamic_dimension_tag, + CGAL::Parallel_tag> TC; + + const int INTRINSIC_DIM = 2; + const int AMBIENT_DIM = 3; + const int NUM_POINTS = 50; + + Kernel k; + + // Generate points on a 2-sphere + CGAL::Random_points_on_sphere_d generator(AMBIENT_DIM, 3.); + std::vector points; + points.reserve(NUM_POINTS); + for (int i = 0; i < NUM_POINTS; ++i) + points.push_back(*generator++); + + // Compute the TC + TC tc(points, INTRINSIC_DIM, k); + tc.compute_tangential_complex(); + + // Try to fix inconsistencies. Give it 60 seconds to succeed + auto perturb_ret = tc.fix_inconsistencies_using_perturbation(0.01, 60); + + BOOST_CHECK(perturb_ret.success); + + // Export the TC into a Simplex_tree + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree); +} diff --git a/src/cmake/modules/GUDHI_user_version_target.txt b/src/cmake/modules/GUDHI_user_version_target.txt index 0ab36cfc..51553e7e 100644 --- a/src/cmake/modules/GUDHI_user_version_target.txt +++ b/src/cmake/modules/GUDHI_user_version_target.txt @@ -48,7 +48,7 @@ if (NOT CMAKE_VERSION VERSION_LESS 2.8.11) add_custom_command(TARGET user_version PRE_BUILD COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/src/GudhUI ${GUDHI_USER_VERSION_DIR}/GudhUI) - set(GUDHI_MODULES "common;Alpha_complex;Bitmap_cubical_complex;Contraction;Hasse_complex;Persistent_cohomology;Simplex_tree;Skeleton_blocker;Spatial_searching;Subsampling;Witness_complex") + set(GUDHI_MODULES "common;Alpha_complex;Bitmap_cubical_complex;Contraction;Hasse_complex;Persistent_cohomology;Simplex_tree;Skeleton_blocker;Spatial_searching;Subsampling;Tangential_complex;Witness_complex") foreach(GUDHI_MODULE ${GUDHI_MODULES}) # doc files diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index 21cf6925..fe23c4e7 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -92,6 +92,25 @@ User manual: \ref skbl - Reference manual: Gudhi::skeleton_blocker::Skeleton_blocker_complex
+ \subsection TangentialComplexDataStructure Tangential complex + \image html "tc_examples.png" "Tangential complex representation" + + + + +
+ Author: Clément Jamin
+ Introduced in: GUDHI 1.4.0
+ Copyright: GPL v3
+
+ A Tangential Delaunay complex is a simplicial complex + designed to reconstruct a \f$ k \f$-dimensional manifold embedded in \f$ d \f$-dimensional Euclidean space. + The input is a point sample coming from an unknown manifold. + The running time depends only linearly on the extrinsic dimension \f$ d \f$ + and exponentially on the intrinsic dimension \f$ k \f$.
+ User manual: \ref tangential_complex - Reference manual: Gudhi::tangential_complex::Tangential_complex +
\subsection WitnessComplexDataStructure Witness complex \image html "Witness_complex_representation.png" "Witness complex representation" @@ -337,6 +356,8 @@ make \endverbatim * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp * @example Skeleton_blocker/Skeleton_blocker_link.cpp + * @example Tangential_complex/example_basic.cpp + * @example Tangential_complex/example_with_perturb.cpp * @example Witness_complex/witness_complex_from_file.cpp * @example Witness_complex/witness_complex_sphere.cpp */ diff --git a/src/common/include/gudhi/Clock.h b/src/common/include/gudhi/Clock.h index 04c6ffb9..77f196ca 100644 --- a/src/common/include/gudhi/Clock.h +++ b/src/common/include/gudhi/Clock.h @@ -27,47 +27,55 @@ #include +namespace Gudhi { + class Clock { public: - Clock() : end_called(false) { - startTime = boost::posix_time::microsec_clock::local_time(); - } - - Clock(const std::string& msg_) { - end_called = false; - begin(); - msg = msg_; - } + // Construct and start the timer + Clock(const std::string& msg_ = std::string()) + : startTime(boost::posix_time::microsec_clock::local_time()), + end_called(false), + msg(msg_) { } + // Restart the timer void begin() const { end_called = false; startTime = boost::posix_time::microsec_clock::local_time(); } + // Stop the timer void end() const { end_called = true; endTime = boost::posix_time::microsec_clock::local_time(); } + std::string message() const { + return msg; + } + + // Print current value to std::cout void print() const { std::cout << *this << std::endl; } friend std::ostream& operator<<(std::ostream& stream, const Clock& clock) { - if (!clock.end_called) - clock.end(); + if (!clock.msg.empty()) + stream << clock.msg << ": "; - if (!clock.end_called) { - stream << "end not called"; - } else { - stream << clock.msg << ":" << clock.num_seconds() << "s"; - } + stream << clock.num_seconds() << "s"; return stream; } + // Get the number of seconds between the timer start and: + // - the last call of end() if it was called + // - or now otherwise. In this case, the timer is not stopped. double num_seconds() const { - if (!end_called) return -1; - return (endTime - startTime).total_milliseconds() / 1000.; + if (!end_called) { + auto end = boost::posix_time::microsec_clock::local_time(); + return (end - startTime).total_milliseconds() / 1000.; + } else { + return (endTime - startTime).total_milliseconds() / 1000.; + } } private: @@ -76,4 +84,6 @@ class Clock { std::string msg; }; -#endif // CLOCK_H_ +} // namespace Gudhi + +#endif // CLOCK_H_ diff --git a/src/common/include/gudhi/Debug_utils.h b/src/common/include/gudhi/Debug_utils.h index 7573a9db..8ed3b7b3 100644 --- a/src/common/include/gudhi/Debug_utils.h +++ b/src/common/include/gudhi/Debug_utils.h @@ -33,8 +33,10 @@ // Could assert in release mode, but cmake sets NDEBUG (for "NO DEBUG") in this mode, means assert does nothing. #ifdef GUDHI_DEBUG #define GUDHI_CHECK(expression, excpt) if ((expression) == 0) throw excpt + #define GUDHI_CHECK_code(CODE) CODE #else #define GUDHI_CHECK(expression, excpt) (void) 0 + #define GUDHI_CHECK_code(CODE) #endif #define PRINT(a) std::cerr << #a << ": " << (a) << " (DISP)" << std::endl diff --git a/src/common/include/gudhi/console_color.h b/src/common/include/gudhi/console_color.h new file mode 100644 index 00000000..c4671da3 --- /dev/null +++ b/src/common/include/gudhi/console_color.h @@ -0,0 +1,97 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA Sophia-Antipolis (France) + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef CONSOLE_COLOR_H_ +#define CONSOLE_COLOR_H_ + +#include + +#if defined(WIN32) +#include +#endif + +inline std::ostream& blue(std::ostream &s) { +#if defined(WIN32) + HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE); + SetConsoleTextAttribute(hStdout, + FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_INTENSITY); +#else + s << "\x1b[0;34m"; +#endif + return s; +} + +inline std::ostream& red(std::ostream &s) { +#if defined(WIN32) + HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE); + SetConsoleTextAttribute(hStdout, FOREGROUND_RED | FOREGROUND_INTENSITY); +#else + s << "\x1b[0;31m"; +#endif + return s; +} + +inline std::ostream& green(std::ostream &s) { +#if defined(WIN32) + HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE); + SetConsoleTextAttribute(hStdout, FOREGROUND_GREEN | FOREGROUND_INTENSITY); +#else + s << "\x1b[0;32m"; +#endif + return s; +} + +inline std::ostream& yellow(std::ostream &s) { +#if defined(WIN32) + HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE); + SetConsoleTextAttribute(hStdout, + FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY); +#else + s << "\x1b[0;33m"; +#endif + return s; +} + +inline std::ostream& white(std::ostream &s) { +#if defined(WIN32) + HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE); + SetConsoleTextAttribute(hStdout, + FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE); +#else + s << "\x1b[0;37m"; +#endif + return s; +} + +inline std::ostream& black_on_white(std::ostream &s) { +#if defined(WIN32) + HANDLE hStdout = GetStdHandle(STD_OUTPUT_HANDLE); + SetConsoleTextAttribute(hStdout, + BACKGROUND_RED | BACKGROUND_GREEN | BACKGROUND_BLUE); +#else + s << "\x1b[0;33m"; +#endif + return s; +} + + +#endif // CONSOLE_COLOR_H_ diff --git a/src/common/include/gudhi/random_point_generators.h b/src/common/include/gudhi/random_point_generators.h new file mode 100644 index 00000000..3050b7ea --- /dev/null +++ b/src/common/include/gudhi/random_point_generators.h @@ -0,0 +1,477 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef RANDOM_POINT_GENERATORS_H_ +#define RANDOM_POINT_GENERATORS_H_ + +#include +#include +#include + +#include // for vector<> + +namespace Gudhi { + +/////////////////////////////////////////////////////////////////////////////// +// Note: All these functions have been tested with the CGAL::Epick_d kernel +/////////////////////////////////////////////////////////////////////////////// + +// construct_point: dim 2 + +template +typename Kernel::Point_d construct_point(const Kernel &k, + typename Kernel::FT x1, typename Kernel::FT x2) { + typename Kernel::FT tab[2]; + tab[0] = x1; + tab[1] = x2; + return k.construct_point_d_object()(2, &tab[0], &tab[2]); +} + +// construct_point: dim 3 + +template +typename Kernel::Point_d construct_point(const Kernel &k, + typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3) { + typename Kernel::FT tab[3]; + tab[0] = x1; + tab[1] = x2; + tab[2] = x3; + return k.construct_point_d_object()(3, &tab[0], &tab[3]); +} + +// construct_point: dim 4 + +template +typename Kernel::Point_d construct_point(const Kernel &k, + typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3, + typename Kernel::FT x4) { + typename Kernel::FT tab[4]; + tab[0] = x1; + tab[1] = x2; + tab[2] = x3; + tab[3] = x4; + return k.construct_point_d_object()(4, &tab[0], &tab[4]); +} + +// construct_point: dim 5 + +template +typename Kernel::Point_d construct_point(const Kernel &k, + typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3, + typename Kernel::FT x4, typename Kernel::FT x5) { + typename Kernel::FT tab[5]; + tab[0] = x1; + tab[1] = x2; + tab[2] = x3; + tab[3] = x4; + tab[4] = x5; + return k.construct_point_d_object()(5, &tab[0], &tab[5]); +} + +// construct_point: dim 6 + +template +typename Kernel::Point_d construct_point(const Kernel &k, + typename Kernel::FT x1, typename Kernel::FT x2, typename Kernel::FT x3, + typename Kernel::FT x4, typename Kernel::FT x5, typename Kernel::FT x6) { + typename Kernel::FT tab[6]; + tab[0] = x1; + tab[1] = x2; + tab[2] = x3; + tab[3] = x4; + tab[4] = x5; + tab[5] = x6; + return k.construct_point_d_object()(6, &tab[0], &tab[6]); +} + +template +std::vector generate_points_on_plane(std::size_t num_points, int intrinsic_dim, + int ambient_dim, + double coord_min = -5., double coord_max = 5.) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + std::vector pt(ambient_dim, FT(0)); + for (int j = 0; j < intrinsic_dim; ++j) + pt[j] = rng.get_double(coord_min, coord_max); + + Point p = k.construct_point_d_object()(ambient_dim, pt.begin(), pt.end()); + points.push_back(p); + ++i; + } + return points; +} + +template +std::vector generate_points_on_moment_curve(std::size_t num_points, int dim, + typename Kernel::FT min_x, + typename Kernel::FT max_x) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + FT x = rng.get_double(min_x, max_x); + std::vector coords; + coords.reserve(dim); + for (int p = 1; p <= dim; ++p) + coords.push_back(std::pow(CGAL::to_double(x), p)); + Point p = k.construct_point_d_object()( + dim, coords.begin(), coords.end()); + points.push_back(p); + ++i; + } + return points; +} + + +// R = big radius, r = small radius +template +std::vector generate_points_on_torus_3D(std::size_t num_points, double R, double r, + bool uniform = false) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + + // if uniform + std::size_t num_lines = (std::size_t)sqrt(num_points); + + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + FT u, v; + if (uniform) { + std::size_t k1 = i / num_lines; + std::size_t k2 = i % num_lines; + u = 6.2832 * k1 / num_lines; + v = 6.2832 * k2 / num_lines; + } else { + u = rng.get_double(0, 6.2832); + v = rng.get_double(0, 6.2832); + } + Point p = construct_point(k, + (R + r * std::cos(u)) * std::cos(v), + (R + r * std::cos(u)) * std::sin(v), + r * std::sin(u)); + points.push_back(p); + ++i; + } + return points; +} + +// "Private" function used by generate_points_on_torus_d +template +static void generate_uniform_points_on_torus_d(const Kernel &k, int dim, std::size_t num_slices, + OutputIterator out, + double radius_noise_percentage = 0., + std::vector current_point = std::vector()) { + CGAL::Random rng; + if (current_point.size() == 2 * dim) { + *out++ = k.construct_point_d_object()( + static_cast (current_point.size()), + current_point.begin(), current_point.end()); + } else { + for (std::size_t slice_idx = 0; slice_idx < num_slices; ++slice_idx) { + double radius_noise_ratio = 1.; + if (radius_noise_percentage > 0.) { + radius_noise_ratio = rng.get_double( + (100. - radius_noise_percentage) / 100., + (100. + radius_noise_percentage) / 100.); + } + std::vector cp2 = current_point; + double alpha = 6.2832 * slice_idx / num_slices; + cp2.push_back(radius_noise_ratio * std::cos(alpha)); + cp2.push_back(radius_noise_ratio * std::sin(alpha)); + generate_uniform_points_on_torus_d( + k, dim, num_slices, out, radius_noise_percentage, cp2); + } + } +} + +template +std::vector generate_points_on_torus_d(std::size_t num_points, int dim, bool uniform = false, + double radius_noise_percentage = 0.) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + + std::vector points; + points.reserve(num_points); + if (uniform) { + std::size_t num_slices = (std::size_t)std::pow(num_points, 1. / dim); + generate_uniform_points_on_torus_d( + k, dim, num_slices, std::back_inserter(points), radius_noise_percentage); + } else { + for (std::size_t i = 0; i < num_points;) { + double radius_noise_ratio = 1.; + if (radius_noise_percentage > 0.) { + radius_noise_ratio = rng.get_double( + (100. - radius_noise_percentage) / 100., + (100. + radius_noise_percentage) / 100.); + } + std::vector pt; + pt.reserve(dim * 2); + for (int curdim = 0; curdim < dim; ++curdim) { + FT alpha = rng.get_double(0, 6.2832); + pt.push_back(radius_noise_ratio * std::cos(alpha)); + pt.push_back(radius_noise_ratio * std::sin(alpha)); + } + + Point p = k.construct_point_d_object()(pt.begin(), pt.end()); + points.push_back(p); + ++i; + } + } + return points; +} + +template +std::vector generate_points_on_sphere_d(std::size_t num_points, int dim, double radius, + double radius_noise_percentage = 0.) { + typedef typename Kernel::Point_d Point; + Kernel k; + CGAL::Random rng; + CGAL::Random_points_on_sphere_d generator(dim, radius); + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + Point p = *generator++; + if (radius_noise_percentage > 0.) { + double radius_noise_ratio = rng.get_double( + (100. - radius_noise_percentage) / 100., + (100. + radius_noise_percentage) / 100.); + + typename Kernel::Point_to_vector_d k_pt_to_vec = + k.point_to_vector_d_object(); + typename Kernel::Vector_to_point_d k_vec_to_pt = + k.vector_to_point_d_object(); + typename Kernel::Scaled_vector_d k_scaled_vec = + k.scaled_vector_d_object(); + p = k_vec_to_pt(k_scaled_vec(k_pt_to_vec(p), radius_noise_ratio)); + } + points.push_back(p); + ++i; + } + return points; +} + +template +std::vector generate_points_on_two_spheres_d(std::size_t num_points, int dim, double radius, + double distance_between_centers, + double radius_noise_percentage = 0.) { + typedef typename Kernel::FT FT; + typedef typename Kernel::Point_d Point; + typedef typename Kernel::Vector_d Vector; + Kernel k; + CGAL::Random rng; + CGAL::Random_points_on_sphere_d generator(dim, radius); + std::vector points; + points.reserve(num_points); + + std::vector t(dim, FT(0)); + t[0] = distance_between_centers; + Vector c1_to_c2(t.begin(), t.end()); + + for (std::size_t i = 0; i < num_points;) { + Point p = *generator++; + if (radius_noise_percentage > 0.) { + double radius_noise_ratio = rng.get_double( + (100. - radius_noise_percentage) / 100., + (100. + radius_noise_percentage) / 100.); + + typename Kernel::Point_to_vector_d k_pt_to_vec = + k.point_to_vector_d_object(); + typename Kernel::Vector_to_point_d k_vec_to_pt = + k.vector_to_point_d_object(); + typename Kernel::Scaled_vector_d k_scaled_vec = + k.scaled_vector_d_object(); + p = k_vec_to_pt(k_scaled_vec(k_pt_to_vec(p), radius_noise_ratio)); + } + + typename Kernel::Translated_point_d k_transl = + k.translated_point_d_object(); + Point p2 = k_transl(p, c1_to_c2); + points.push_back(p); + points.push_back(p2); + i += 2; + } + return points; +} + +// Product of a 3-sphere and a circle => d = 3 / D = 5 + +template +std::vector generate_points_on_3sphere_and_circle(std::size_t num_points, + double sphere_radius) { + typedef typename Kernel::FT FT; + typedef typename Kernel::Point_d Point; + Kernel k; + CGAL::Random rng; + CGAL::Random_points_on_sphere_d generator(3, sphere_radius); + std::vector points; + points.reserve(num_points); + + typename Kernel::Translated_point_d k_transl = + k.translated_point_d_object(); + typename Kernel::Compute_coordinate_d k_coord = + k.compute_coordinate_d_object(); + for (std::size_t i = 0; i < num_points;) { + Point p_sphere = *generator++; // First 3 coords + + FT alpha = rng.get_double(0, 6.2832); + std::vector pt(5); + pt[0] = k_coord(p_sphere, 0); + pt[1] = k_coord(p_sphere, 1); + pt[2] = k_coord(p_sphere, 2); + pt[3] = std::cos(alpha); + pt[4] = std::sin(alpha); + Point p(pt.begin(), pt.end()); + points.push_back(p); + ++i; + } + return points; +} + +// a = big radius, b = small radius +template +std::vector generate_points_on_klein_bottle_3D(std::size_t num_points, double a, double b, + bool uniform = false) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + + // if uniform + std::size_t num_lines = (std::size_t)sqrt(num_points); + + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + FT u, v; + if (uniform) { + std::size_t k1 = i / num_lines; + std::size_t k2 = i % num_lines; + u = 6.2832 * k1 / num_lines; + v = 6.2832 * k2 / num_lines; + } else { + u = rng.get_double(0, 6.2832); + v = rng.get_double(0, 6.2832); + } + double tmp = cos(u / 2) * sin(v) - sin(u / 2) * sin(2. * v); + Point p = construct_point(k, + (a + b * tmp) * cos(u), + (a + b * tmp) * sin(u), + b * (sin(u / 2) * sin(v) + cos(u / 2) * sin(2. * v))); + points.push_back(p); + ++i; + } + return points; +} + +// a = big radius, b = small radius +template +std::vector generate_points_on_klein_bottle_4D(std::size_t num_points, double a, double b, + double noise = 0., bool uniform = false) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + + // if uniform + std::size_t num_lines = (std::size_t)sqrt(num_points); + + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + FT u, v; + if (uniform) { + std::size_t k1 = i / num_lines; + std::size_t k2 = i % num_lines; + u = 6.2832 * k1 / num_lines; + v = 6.2832 * k2 / num_lines; + } else { + u = rng.get_double(0, 6.2832); + v = rng.get_double(0, 6.2832); + } + Point p = construct_point(k, + (a + b * cos(v)) * cos(u) + (noise == 0. ? 0. : rng.get_double(0, noise)), + (a + b * cos(v)) * sin(u) + (noise == 0. ? 0. : rng.get_double(0, noise)), + b * sin(v) * cos(u / 2) + (noise == 0. ? 0. : rng.get_double(0, noise)), + b * sin(v) * sin(u / 2) + (noise == 0. ? 0. : rng.get_double(0, noise))); + points.push_back(p); + ++i; + } + return points; +} + + +// a = big radius, b = small radius + +template +std::vector +generate_points_on_klein_bottle_variant_5D( + std::size_t num_points, double a, double b, bool uniform = false) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::FT FT; + Kernel k; + CGAL::Random rng; + + // if uniform + std::size_t num_lines = (std::size_t)sqrt(num_points); + + std::vector points; + points.reserve(num_points); + for (std::size_t i = 0; i < num_points;) { + FT u, v; + if (uniform) { + std::size_t k1 = i / num_lines; + std::size_t k2 = i % num_lines; + u = 6.2832 * k1 / num_lines; + v = 6.2832 * k2 / num_lines; + } else { + u = rng.get_double(0, 6.2832); + v = rng.get_double(0, 6.2832); + } + FT x1 = (a + b * cos(v)) * cos(u); + FT x2 = (a + b * cos(v)) * sin(u); + FT x3 = b * sin(v) * cos(u / 2); + FT x4 = b * sin(v) * sin(u / 2); + FT x5 = x1 + x2 + x3 + x4; + + Point p = construct_point(k, x1, x2, x3, x4, x5); + points.push_back(p); + ++i; + } + return points; +} + +} // namespace Gudhi + +#endif // RANDOM_POINT_GENERATORS_H_ diff --git a/src/common/include/gudhi_patches/CGAL/Convex_hull.h b/src/common/include/gudhi_patches/CGAL/Convex_hull.h new file mode 100644 index 00000000..a8f91bf8 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Convex_hull.h @@ -0,0 +1,56 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +/* RANDOM DESIGN IDEAS: +- Use a policy tag to choose for incremental with inserts only or + incremental with removals and inserts. + In the first case: use Triangulation for storage. + In the second case: use Delaunay ! + In this second case, we must keeps the points that are inserted in the hull, + as they may become part of the boundary later on, when some points are removed. +- Constructor with range argument uses quickhull. +*/ + +#ifndef CGAL_CONVEX_HULL_H +#define CGAL_CONVEX_HULL_H + +namespace CGAL { + +template < class CHTraits, class TDS_ = Default > +class Convex_hull +{ + typedef typename Maximal_dimension::type + Maximal_dimension_; + typedef typename Default::Get, + Triangulation_full_cell > + >::type TDS; + typedef Convex_hull Self; + + typedef typename CHTraits::Coaffine_orientation_d + Coaffine_orientation_d; + typedef typename CHTraits::Orientation_d Orientation_d; + +public: +}; + +} //namespace CGAL + +#endif // CGAL_CONVEX_HULL_H diff --git a/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h b/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h new file mode 100644 index 00000000..071cd184 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Delaunay_triangulation.h @@ -0,0 +1,933 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_DELAUNAY_COMPLEX_H +#define CGAL_DELAUNAY_COMPLEX_H + +#include +#include +#include +#include + +#include + +#include + +namespace CGAL { + +template< typename DCTraits, typename _TDS = Default > +class Delaunay_triangulation +: public Triangulation, + Triangulation_full_cell > + >::type > +{ + typedef typename DCTraits::Dimension Maximal_dimension_; + typedef typename Default::Get<_TDS, Triangulation_data_structure< + Maximal_dimension_, + Triangulation_vertex, + Triangulation_full_cell > + >::type TDS; + typedef Triangulation Base; + typedef Delaunay_triangulation Self; + + typedef typename DCTraits::Side_of_oriented_sphere_d + Side_of_oriented_sphere_d; + typedef typename DCTraits::Orientation_d Orientation_d; + +public: // PUBLIC NESTED TYPES + + typedef DCTraits Geom_traits; + typedef typename Base::Triangulation_ds Triangulation_ds; + + typedef typename Base::Vertex Vertex; + typedef typename Base::Full_cell Full_cell; + typedef typename Base::Facet Facet; + typedef typename Base::Face Face; + + typedef typename Base::Maximal_dimension Maximal_dimension; + typedef typename DCTraits::Point_d Point; + typedef typename DCTraits::Point_d Point_d; + + typedef typename Base::Vertex_handle Vertex_handle; + typedef typename Base::Vertex_iterator Vertex_iterator; + typedef typename Base::Vertex_const_handle Vertex_const_handle; + typedef typename Base::Vertex_const_iterator Vertex_const_iterator; + + typedef typename Base::Full_cell_handle Full_cell_handle; + typedef typename Base::Full_cell_iterator Full_cell_iterator; + typedef typename Base::Full_cell_const_handle Full_cell_const_handle; + typedef typename Base::Full_cell_const_iterator Full_cell_const_iterator; + typedef typename Base::Finite_full_cell_const_iterator + Finite_full_cell_const_iterator; + + typedef typename Base::size_type size_type; + typedef typename Base::difference_type difference_type; + + typedef typename Base::Locate_type Locate_type; + + //Tag to distinguish triangulations with weighted_points + typedef Tag_false Weighted_tag; + +protected: // DATA MEMBERS + + +public: + + using typename Base::Rotor; + using Base::maximal_dimension; + using Base::are_incident_full_cells_valid; + using Base::coaffine_orientation_predicate; + using Base::reset_flat_orientation; + using Base::current_dimension; + //using Base::star; + //using Base::incident_full_cells; + using Base::geom_traits; + using Base::index_of_covertex; + //using Base::index_of_second_covertex; + using Base::infinite_vertex; + using Base::rotate_rotor; + using Base::insert_in_hole; + using Base::insert_outside_convex_hull_1; + using Base::is_infinite; + using Base::locate; + using Base::points_begin; + using Base::set_neighbors; + using Base::new_full_cell; + using Base::number_of_vertices; + using Base::orientation; + using Base::tds; + using Base::reorient_full_cells; + using Base::full_cell; + using Base::full_cells_begin; + using Base::full_cells_end; + using Base::finite_full_cells_begin; + using Base::finite_full_cells_end; + using Base::vertices_begin; + using Base::vertices_end; + // using Base:: + +private: + //*** Side_of_oriented_subsphere_d *** + typedef typename Base::Flat_orientation_d Flat_orientation_d; + typedef typename Base::Construct_flat_orientation_d Construct_flat_orientation_d; + typedef typename DCTraits::In_flat_side_of_oriented_sphere_d In_flat_side_of_oriented_sphere_d; + // Wrapper + struct Side_of_oriented_subsphere_d + { + boost::optional* fop; + Construct_flat_orientation_d cfo; + In_flat_side_of_oriented_sphere_d ifsoos; + + Side_of_oriented_subsphere_d( + boost::optional& x, + Construct_flat_orientation_d const&y, + In_flat_side_of_oriented_sphere_d const&z) + : fop(&x), cfo(y), ifsoos(z) {} + + template + CGAL::Orientation operator()(Iter a, Iter b, const Point & p)const + { + if(!*fop) + *fop=cfo(a,b); + return ifsoos(fop->get(),a,b,p); + } + }; +public: + +// - - - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS + + Delaunay_triangulation(int dim, const Geom_traits &k = Geom_traits()) + : Base(dim, k) + { + } + + // With this constructor, + // the user can specify a Flat_orientation_d object to be used for + // orienting simplices of a specific dimension + // (= preset_flat_orientation_.first) + // It it used by the dark triangulations created by DT::remove + Delaunay_triangulation( + int dim, + const std::pair &preset_flat_orientation, + const Geom_traits &k = Geom_traits()) + : Base(dim, preset_flat_orientation, k) + { + } + + ~Delaunay_triangulation() {} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS + + // Not Documented + Side_of_oriented_subsphere_d side_of_oriented_subsphere_predicate() const + { + return Side_of_oriented_subsphere_d ( + flat_orientation_, + geom_traits().construct_flat_orientation_d_object(), + geom_traits().in_flat_side_of_oriented_sphere_d_object() + ); + } + + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS + + Full_cell_handle remove(Vertex_handle); + Full_cell_handle remove(const Point & p, Full_cell_handle hint = Full_cell_handle()) + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, hint); + if( Base::ON_VERTEX == lt ) + { + return remove(s->vertex(f.index(0))); + } + return Full_cell_handle(); + } + + template< typename ForwardIterator > + void remove(ForwardIterator start, ForwardIterator end) + { + while( start != end ) + remove(*start++); + } + + // Not documented + void remove_decrease_dimension(Vertex_handle); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS + + template< typename ForwardIterator > + size_type insert(ForwardIterator start, ForwardIterator end) + { + size_type n = number_of_vertices(); + std::vector points(start, end); + spatial_sort(points.begin(), points.end(), geom_traits()); + Full_cell_handle hint; + for( typename std::vector::const_iterator p = points.begin(); p != points.end(); ++p ) + { + hint = insert(*p, hint)->full_cell(); + } + return number_of_vertices() - n; + } + Vertex_handle insert(const Point &, Locate_type, const Face &, const Facet &, Full_cell_handle); + Vertex_handle insert(const Point & p, Full_cell_handle start = Full_cell_handle()) + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, start); + return insert(p, lt, f, ft, s); + } + Vertex_handle insert(const Point & p, Vertex_handle hint) + { + CGAL_assertion( Vertex_handle() != hint ); + return insert(p, hint->full_cell()); + } + Vertex_handle insert_outside_affine_hull(const Point &); + Vertex_handle insert_in_conflicting_cell(const Point &, Full_cell_handle); + +// - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES + + bool is_in_conflict(const Point &, Full_cell_const_handle) const; + template< class OrientationPredicate > + Oriented_side perturbed_side_of_positive_sphere(const Point &, + Full_cell_const_handle, const OrientationPredicate &) const; + + template< typename OutputIterator > + Facet compute_conflict_zone(const Point &, Full_cell_handle, OutputIterator) const; + + template < typename OrientationPredicate, typename SideOfOrientedSpherePredicate > + class Conflict_predicate + { + const Self & dc_; + const Point & p_; + OrientationPredicate ori_; + SideOfOrientedSpherePredicate side_of_s_; + int cur_dim_; + public: + Conflict_predicate( + const Self & dc, + const Point & p, + const OrientationPredicate & ori, + const SideOfOrientedSpherePredicate & side) + : dc_(dc), p_(p), ori_(ori), side_of_s_(side), cur_dim_(dc.current_dimension()) {} + + inline + bool operator()(Full_cell_const_handle s) const + { + bool ok; + if( ! dc_.is_infinite(s) ) + { + Oriented_side side = side_of_s_(dc_.points_begin(s), dc_.points_begin(s) + cur_dim_ + 1, p_); + if( ON_POSITIVE_SIDE == side ) + ok = true; + else if( ON_NEGATIVE_SIDE == side ) + ok = false; + else + ok = ON_POSITIVE_SIDE == dc_.perturbed_side_of_positive_sphere(p_, s, ori_); + } + else + { + typedef typename Full_cell::Vertex_handle_const_iterator VHCI; + typedef Substitute_point_in_vertex_iterator F; + F spivi(dc_.infinite_vertex(), &p_); + + Orientation o = ori_( + boost::make_transform_iterator(s->vertices_begin(), spivi), + boost::make_transform_iterator(s->vertices_begin() + cur_dim_ + 1, + spivi)); + + if( POSITIVE == o ) + ok = true; + else if( o == NEGATIVE ) + ok = false; + else + ok = (*this)(s->neighbor( s->index( dc_.infinite_vertex() ) )); + } + return ok; + } + }; + + template < typename ConflictPredicate > + class Conflict_traversal_predicate + { + const Self & dc_; + const ConflictPredicate & pred_; + public: + Conflict_traversal_predicate(const Self & dc, const ConflictPredicate & pred) + : dc_(dc), pred_(pred) + {} + inline + bool operator()(const Facet & f) const + { + return pred_(dc_.full_cell(f)->neighbor(dc_.index_of_covertex(f))); + } + }; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY + + bool is_valid(bool verbose = false, int level = 0) const; + +private: + // Some internal types to shorten notation + typedef typename Base::Coaffine_orientation_d Coaffine_orientation_d; + using Base::flat_orientation_; + typedef Conflict_predicate + Conflict_pred_in_subspace; + typedef Conflict_predicate + Conflict_pred_in_fullspace; + typedef Conflict_traversal_predicate + Conflict_traversal_pred_in_subspace; + typedef Conflict_traversal_predicate + Conflict_traversal_pred_in_fullspace; +}; + +// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = +// FUNCTIONS THAT ARE MEMBER METHODS: + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS + +template< typename DCTraits, typename TDS > +typename Delaunay_triangulation::Full_cell_handle +Delaunay_triangulation +::remove( Vertex_handle v ) +{ + CGAL_precondition( ! is_infinite(v) ); + CGAL_expensive_precondition( is_vertex(v) ); + + // THE CASE cur_dim == 0 + if( 0 == current_dimension() ) + { + remove_decrease_dimension(v); + return Full_cell_handle(); + } + else if( 1 == current_dimension() ) + { // THE CASE cur_dim == 1 + if( 2 == number_of_vertices() ) + { + remove_decrease_dimension(v); + return Full_cell_handle(); + } + Full_cell_handle left = v->full_cell(); + if( 0 == left->index(v) ) + left = left->neighbor(1); + CGAL_assertion( 1 == left->index(v) ); + Full_cell_handle right = left->neighbor(0); + + tds().associate_vertex_with_full_cell(left, 1, right->vertex(1)); + set_neighbors(left, 0, right->neighbor(0), right->mirror_index(0)); + + tds().delete_vertex(v); + tds().delete_full_cell(right); + return left; + } + + // THE CASE cur_dim >= 2 + // Gather the finite vertices sharing an edge with |v| + typedef typename Base::template Full_cell_set Simplices; + Simplices simps; + std::back_insert_iterator out(simps); + tds().incident_full_cells(v, out); + typedef std::set Vertex_set; + Vertex_set verts; + Vertex_handle vh; + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + for( int i = 0; i <= current_dimension(); ++i ) + { + vh = (*it)->vertex(i); + if( is_infinite(vh) ) + continue; + if( vh == v ) + continue; + verts.insert(vh); + } + + // After gathering finite neighboring vertices, create their Dark Delaunay triangulation + typedef Triangulation_vertex Dark_vertex_base; + typedef Triangulation_full_cell > Dark_full_cell_base; + typedef Triangulation_data_structure Dark_tds; + typedef Delaunay_triangulation Dark_triangulation; + typedef typename Dark_triangulation::Face Dark_face; + typedef typename Dark_triangulation::Facet Dark_facet; + typedef typename Dark_triangulation::Vertex_handle Dark_v_handle; + typedef typename Dark_triangulation::Full_cell_handle Dark_s_handle; + + // If flat_orientation_ is defined, we give it the Dark triangulation + // so that the orientation it uses for "current_dimension()"-simplices is + // coherent with the global triangulation + Dark_triangulation dark_side( + maximal_dimension(), + flat_orientation_ ? + std::pair(current_dimension(), flat_orientation_.get_ptr()) + : std::pair((std::numeric_limits::max)(), (Flat_orientation_d*) NULL) ); + + Dark_s_handle dark_s; + Dark_v_handle dark_v; + typedef std::map Vertex_map; + Vertex_map light_to_dark; + typename Vertex_set::iterator vit = verts.begin(); + while( vit != verts.end() ) + { + dark_v = dark_side.insert((*vit)->point(), dark_s); + dark_s = dark_v->full_cell(); + dark_v->data() = *vit; + light_to_dark[*vit] = dark_v; + ++vit; + } + + if( dark_side.current_dimension() != current_dimension() ) + { + CGAL_assertion( dark_side.current_dimension() + 1 == current_dimension() ); + // Here, the finite neighbors of |v| span a affine subspace of + // dimension one less than the current dimension. Two cases are possible: + if( (size_type)(verts.size() + 1) == number_of_vertices() ) + { + remove_decrease_dimension(v); + return Full_cell_handle(); + } + else + { // |v| is strictly outside the convex hull of the rest of the points. This is an + // easy case: first, modify the finite full_cells, then, delete the infinite ones. + // We don't even need the Dark triangulation. + Simplices infinite_simps; + { + Simplices finite_simps; + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + if( is_infinite(*it) ) + infinite_simps.push_back(*it); + else + finite_simps.push_back(*it); + simps.swap(finite_simps); + } // now, simps only contains finite simplices + // First, modify the finite full_cells: + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + { + int v_idx = (*it)->index(v); + tds().associate_vertex_with_full_cell(*it, v_idx, infinite_vertex()); + } + // Make the handles to infinite full cells searchable + infinite_simps.make_searchable(); + // Then, modify the neighboring relation + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + { + for( int i = 0; i <= current_dimension(); ++i ) + { + if (is_infinite((*it)->vertex(i))) + continue; + (*it)->vertex(i)->set_full_cell(*it); + Full_cell_handle n = (*it)->neighbor(i); + // Was |n| a finite full cell prior to removing |v| ? + if( ! infinite_simps.contains(n) ) + continue; + int n_idx = n->index(v); + set_neighbors(*it, i, n->neighbor(n_idx), n->neighbor(n_idx)->index(n)); + } + } + Full_cell_handle ret_s; + // Then, we delete the infinite full_cells + for( typename Simplices::iterator it = infinite_simps.begin(); it != infinite_simps.end(); ++it ) + tds().delete_full_cell(*it); + tds().delete_vertex(v); + return simps.front(); + } + } + else // From here on, dark_side.current_dimension() == current_dimension() + { + dark_side.infinite_vertex()->data() = infinite_vertex(); + light_to_dark[infinite_vertex()] = dark_side.infinite_vertex(); + } + + // Now, compute the conflict zone of v->point() in + // the dark side. This is precisely the set of full_cells + // that we have to glue back into the light side. + Dark_face dark_f(dark_side.maximal_dimension()); + Dark_facet dark_ft; + typename Dark_triangulation::Locate_type lt; + dark_s = dark_side.locate(v->point(), lt, dark_f, dark_ft); + CGAL_assertion( lt != Dark_triangulation::ON_VERTEX + && lt != Dark_triangulation::OUTSIDE_AFFINE_HULL ); + + // |ret_s| is the full_cell that we return + Dark_s_handle dark_ret_s = dark_s; + Full_cell_handle ret_s; + + typedef typename Base::template Full_cell_set Dark_full_cells; + Dark_full_cells conflict_zone; + std::back_insert_iterator dark_out(conflict_zone); + + dark_ft = dark_side.compute_conflict_zone(v->point(), dark_s, dark_out); + // Make the dark simplices in the conflict zone searchable + conflict_zone.make_searchable(); + + // THE FOLLOWING SHOULD MAYBE GO IN TDS. + // Here is the plan: + // 1. Pick any Facet from boundary of the light zone + // 2. Find corresponding Facet on boundary of dark zone + // 3. stitch. + + // 1. Build a facet on the boudary of the light zone: + Full_cell_handle light_s = *simps.begin(); + Facet light_ft(light_s, light_s->index(v)); + + // 2. Find corresponding Dark_facet on boundary of the dark zone + Dark_full_cells dark_incident_s; + for( int i = 0; i <= current_dimension(); ++i ) + { + if( index_of_covertex(light_ft) == i ) + continue; + Dark_v_handle dark_v = light_to_dark[full_cell(light_ft)->vertex(i)]; + dark_incident_s.clear(); + dark_out = std::back_inserter(dark_incident_s); + dark_side.tds().incident_full_cells(dark_v, dark_out); + for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it ) + { + (*it)->data().count_ += 1; + } + } + + for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it ) + { + if( current_dimension() != (*it)->data().count_ ) + continue; + if( ! conflict_zone.contains(*it) ) + continue; + // We found a full_cell incident to the dark facet corresponding to the light facet |light_ft| + int ft_idx = 0; + while( light_s->has_vertex( (*it)->vertex(ft_idx)->data() ) ) + ++ft_idx; + dark_ft = Dark_facet(*it, ft_idx); + break; + } + // Pre-3. Now, we are ready to traverse both boundary and do the stiching. + + // But first, we create the new full_cells in the light triangulation, + // with as much adjacency information as possible. + + // Create new full_cells with vertices + for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) + { + Full_cell_handle new_s = new_full_cell(); + (*it)->data().light_copy_ = new_s; + for( int i = 0; i <= current_dimension(); ++i ) + tds().associate_vertex_with_full_cell(new_s, i, (*it)->vertex(i)->data()); + if( dark_ret_s == *it ) + ret_s = new_s; + } + + // Setup adjacencies inside the hole + for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) + { + Full_cell_handle new_s = (*it)->data().light_copy_; + for( int i = 0; i <= current_dimension(); ++i ) + if( conflict_zone.contains((*it)->neighbor(i)) ) + tds().set_neighbors(new_s, i, (*it)->neighbor(i)->data().light_copy_, (*it)->mirror_index(i)); + } + + // 3. Stitch + simps.make_searchable(); + typedef std::queue > Queue; + Queue q; + q.push(std::make_pair(light_ft, dark_ft)); + dark_s = dark_side.full_cell(dark_ft); + int dark_i = dark_side.index_of_covertex(dark_ft); + // mark dark_ft as visited: + // TODO try by marking with Dark_v_handle (vertex) + dark_s->neighbor(dark_i)->set_neighbor(dark_s->mirror_index(dark_i), Dark_s_handle()); + while( ! q.empty() ) + { + std::pair p = q.front(); + q.pop(); + light_ft = p.first; + dark_ft = p.second; + light_s = full_cell(light_ft); + int light_i = index_of_covertex(light_ft); + dark_s = dark_side.full_cell(dark_ft); + int dark_i = dark_side.index_of_covertex(dark_ft); + Full_cell_handle light_n = light_s->neighbor(light_i); + set_neighbors(dark_s->data().light_copy_, dark_i, light_n, light_s->mirror_index(light_i)); + for( int di = 0; di <= current_dimension(); ++di ) + { + if( di == dark_i ) + continue; + int li = light_s->index(dark_s->vertex(di)->data()); + Rotor light_r(light_s, li, light_i); + typename Dark_triangulation::Rotor dark_r(dark_s, di, dark_i); + + while (simps.contains(cpp11::get<0>(light_r)->neighbor(cpp11::get<1>(light_r)))) + light_r = rotate_rotor(light_r); + + while (conflict_zone.contains(cpp11::get<0>(dark_r)->neighbor(cpp11::get<1>(dark_r)))) + dark_r = dark_side.rotate_rotor(dark_r); + + Dark_s_handle dark_ns = cpp11::get<0>(dark_r); + int dark_ni = cpp11::get<1>(dark_r); + Full_cell_handle light_ns = cpp11::get<0>(light_r); + int light_ni = cpp11::get<1>(light_r); + // mark dark_r as visited: + // TODO try by marking with Dark_v_handle (vertex) + Dark_s_handle outside = dark_ns->neighbor(dark_ni); + Dark_v_handle mirror = dark_ns->mirror_vertex(dark_ni, current_dimension()); + int dn = outside->index(mirror); + if( Dark_s_handle() == outside->neighbor(dn) ) + continue; + outside->set_neighbor(dn, Dark_s_handle()); + q.push(std::make_pair(Facet(light_ns, light_ni), Dark_facet(dark_ns, dark_ni))); + } + } + tds().delete_full_cells(simps.begin(), simps.end()); + tds().delete_vertex(v); + return ret_s; +} + +template< typename DCTraits, typename TDS > +void +Delaunay_triangulation +::remove_decrease_dimension(Vertex_handle v) +{ + CGAL_precondition( current_dimension() >= 0 ); + tds().remove_decrease_dimension(v, infinite_vertex()); + // reset the predicates: + reset_flat_orientation(); + if( 1 <= current_dimension() ) + { + Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); + int inf_v_index = inf_v_cell->index(infinite_vertex()); + Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); + Orientation o = orientation(s); + CGAL_assertion( ZERO != o ); + if( NEGATIVE == o ) + reorient_full_cells(); + } +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS + +template< typename DCTraits, typename TDS > +typename Delaunay_triangulation::Vertex_handle +Delaunay_triangulation +::insert(const Point & p, Locate_type lt, const Face & f, const Facet &, Full_cell_handle s) +{ + switch( lt ) + { + case Base::OUTSIDE_AFFINE_HULL: + return insert_outside_affine_hull(p); + break; + case Base::ON_VERTEX: + { + Vertex_handle v = s->vertex(f.index(0)); + v->set_point(p); + return v; + break; + } + default: + if( 1 == current_dimension() ) + { + if( Base::OUTSIDE_CONVEX_HULL == lt ) + { + return insert_outside_convex_hull_1(p, s); + } + Vertex_handle v = tds().insert_in_full_cell(s); + v->set_point(p); + return v; + } + else + return insert_in_conflicting_cell(p, s); + break; + } +} + +/* +[Undocumented function] + +Inserts the point `p` in the Delaunay triangulation. Returns a handle to the +(possibly newly created) vertex at that position. +\pre The point `p` +must lie outside the affine hull of the Delaunay triangulation. This implies that +`dt`.`current_dimension()` must be less than `dt`.`maximal_dimension()`. +*/ +template< typename DCTraits, typename TDS > +typename Delaunay_triangulation::Vertex_handle +Delaunay_triangulation +::insert_outside_affine_hull(const Point & p) +{ + // we don't use Base::insert_outside_affine_hull(...) because here, we + // also need to reset the side_of_oriented_subsphere functor. + CGAL_precondition( current_dimension() < maximal_dimension() ); + Vertex_handle v = tds().insert_increase_dimension(infinite_vertex()); + // reset the predicates: + reset_flat_orientation(); + v->set_point(p); + if( current_dimension() >= 1 ) + { + Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); + int inf_v_index = inf_v_cell->index(infinite_vertex()); + Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); + Orientation o = orientation(s); + CGAL_assertion( ZERO != o ); + if( NEGATIVE == o ) + reorient_full_cells(); + + // We just inserted the second finite point and the right infinite + // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be + // consistent with the rest of the cells + if (current_dimension() == 1) + { + // Is "inf_v_cell" the right infinite cell? + // Then inf_v_index should be 1 + if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 + && inf_v_index == 0) + { + inf_v_cell->swap_vertices( + current_dimension() - 1, current_dimension()); + } + // Otherwise, let's find the right infinite cell + else + { + inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2); + inf_v_index = inf_v_cell->index(infinite_vertex()); + // Is "inf_v_cell" the right infinite cell? + // Then inf_v_index should be 1 + if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 + && inf_v_index == 0) + { + inf_v_cell->swap_vertices( + current_dimension() - 1, current_dimension()); + } + } + } + } + return v; +} + +/*! +[Undocumented function] + +Inserts the point `p` in the Delaunay triangulation. Returns a handle to the +(possibly newly created) vertex at that position. +\pre The point `p` must be in conflict with the full cell `c`. +*/ +template< typename DCTraits, typename TDS > +typename Delaunay_triangulation::Vertex_handle +Delaunay_triangulation +::insert_in_conflicting_cell(const Point & p, Full_cell_handle s) +{ + CGAL_precondition(is_in_conflict(p, s)); + + // for storing conflicting full_cells. + typedef std::vector Full_cell_h_vector; + CGAL_STATIC_THREAD_LOCAL_VARIABLE(Full_cell_h_vector,cs,0); + cs.clear(); + + std::back_insert_iterator out(cs); + Facet ft = compute_conflict_zone(p, s, out); + return insert_in_hole(p, cs.begin(), cs.end(), ft); +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES + +// NOT DOCUMENTED +template< typename DCTraits, typename TDS > +template< typename OrientationPred > +Oriented_side +Delaunay_triangulation +::perturbed_side_of_positive_sphere(const Point & p, Full_cell_const_handle s, + const OrientationPred & ori) const +{ + CGAL_precondition_msg( ! is_infinite(s), "full cell must be finite"); + CGAL_expensive_precondition( POSITIVE == orientation(s) ); + typedef std::vector Points; + Points points(current_dimension() + 2); + int i(0); + for( ; i <= current_dimension(); ++i ) + points[i] = &(s->vertex(i)->point()); + points[i] = &p; + std::sort(points.begin(), points.end(), + internal::Triangulation::Compare_points_for_perturbation(*this)); + typename Points::const_reverse_iterator cut_pt = points.rbegin(); + Points test_points; + while( cut_pt != points.rend() ) + { + if( &p == *cut_pt ) + // because the full_cell "s" is assumed to be positively oriented + return ON_NEGATIVE_SIDE; // we consider |p| to lie outside the sphere + test_points.clear(); + typename Base::Point_const_iterator spit = points_begin(s); + int adjust_sign = -1; + for( i = 0; i < current_dimension(); ++i ) + { + if( &(*spit) == *cut_pt ) + { + ++spit; + adjust_sign = (((current_dimension() + i) % 2) == 0) ? -1 : +1; + } + test_points.push_back(&(*spit)); + ++spit; + } + test_points.push_back(&p); + + typedef typename CGAL::Iterator_project, + const Point &, const Point *> Point_pointer_iterator; + + Orientation ori_value = ori( + Point_pointer_iterator(test_points.begin()), + Point_pointer_iterator(test_points.end())); + + if( ZERO != ori_value ) + return Oriented_side( - adjust_sign * ori_value ); + + ++cut_pt; + } + CGAL_assertion(false); // we should never reach here + return ON_NEGATIVE_SIDE; +} + +template< typename DCTraits, typename TDS > +bool +Delaunay_triangulation +::is_in_conflict(const Point & p, Full_cell_const_handle s) const +{ + CGAL_precondition( 2 <= current_dimension() ); + if( current_dimension() < maximal_dimension() ) + { + Conflict_pred_in_subspace c(*this, p, coaffine_orientation_predicate(), side_of_oriented_subsphere_predicate()); + return c(s); + } + else + { + Orientation_d ori = geom_traits().orientation_d_object(); + Side_of_oriented_sphere_d side = geom_traits().side_of_oriented_sphere_d_object(); + Conflict_pred_in_fullspace c(*this, p, ori, side); + return c(s); + } +} + +template< typename DCTraits, typename TDS > +template< typename OutputIterator > +typename Delaunay_triangulation::Facet +Delaunay_triangulation +::compute_conflict_zone(const Point & p, Full_cell_handle s, OutputIterator out) const +{ + CGAL_precondition( 2 <= current_dimension() ); + if( current_dimension() < maximal_dimension() ) + { + Conflict_pred_in_subspace c(*this, p, coaffine_orientation_predicate(), side_of_oriented_subsphere_predicate()); + Conflict_traversal_pred_in_subspace tp(*this, c); + return tds().gather_full_cells(s, tp, out); + } + else + { + Orientation_d ori = geom_traits().orientation_d_object(); + Side_of_oriented_sphere_d side = geom_traits().side_of_oriented_sphere_d_object(); + Conflict_pred_in_fullspace c(*this, p, ori, side); + Conflict_traversal_pred_in_fullspace tp(*this, c); + return tds().gather_full_cells(s, tp, out); + } +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY + +template< typename DCTraits, typename TDS > +bool +Delaunay_triangulation +::is_valid(bool verbose, int level) const +{ + if (!Base::is_valid(verbose, level)) + return false; + + int dim = current_dimension(); + if (dim == maximal_dimension()) + { + for (Finite_full_cell_const_iterator cit = this->finite_full_cells_begin() ; + cit != this->finite_full_cells_end() ; ++cit ) + { + Full_cell_const_handle ch = cit.base(); + for(int i = 0; i < dim+1 ; ++i ) + { + // If the i-th neighbor is not an infinite cell + Vertex_handle opposite_vh = + ch->neighbor(i)->vertex(ch->neighbor(i)->index(ch)); + if (!is_infinite(opposite_vh)) + { + Side_of_oriented_sphere_d side = + geom_traits().side_of_oriented_sphere_d_object(); + if (side(Point_const_iterator(ch->vertices_begin()), + Point_const_iterator(ch->vertices_end()), + opposite_vh->point()) == ON_BOUNDED_SIDE) + { + if (verbose) + CGAL_warning_msg(false, "Non-empty sphere"); + return false; + } + } + } + } + } + return true; +} + + +} //namespace CGAL + +#endif // CGAL_DELAUNAY_COMPLEX_H diff --git a/src/common/include/gudhi_patches/CGAL/Epeck_d.h b/src/common/include/gudhi_patches/CGAL/Epeck_d.h new file mode 100644 index 00000000..52bce84c --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Epeck_d.h @@ -0,0 +1,53 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_EPECK_D_H +#define CGAL_EPECK_D_H +#include +#include +#include +#include + + +namespace CGAL { +#define CGAL_BASE \ + Cartesian_base_d::Type, Dim> +template +struct Epeck_d_help1 +: CGAL_BASE +{ + CGAL_CONSTEXPR Epeck_d_help1(){} + CGAL_CONSTEXPR Epeck_d_help1(int d):CGAL_BASE(d){} +}; +#undef CGAL_BASE +#define CGAL_BASE \ + Kernel_d_interface< \ + Cartesian_wrap< \ + Epeck_d_help1, \ + Epeck_d > > +template +struct Epeck_d +: CGAL_BASE +{ + CGAL_CONSTEXPR Epeck_d(){} + CGAL_CONSTEXPR Epeck_d(int d):CGAL_BASE(d){} +}; +#undef CGAL_BASE +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/Epick_d.h b/src/common/include/gudhi_patches/CGAL/Epick_d.h new file mode 100644 index 00000000..64438539 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Epick_d.h @@ -0,0 +1,71 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_EPICK_D_H +#define CGAL_EPICK_D_H +#include +#include +#include +#include +#include +#include +#include +#include + + +namespace CGAL { +#define CGAL_BASE \ + Cartesian_filter_K< \ + Cartesian_base_d, \ + Cartesian_base_d, \ + Cartesian_base_d::Type, Dim> \ + > +template +struct Epick_d_help1 +: CGAL_BASE +{ + CGAL_CONSTEXPR Epick_d_help1(){} + CGAL_CONSTEXPR Epick_d_help1(int d):CGAL_BASE(d){} +}; +#undef CGAL_BASE +#define CGAL_BASE \ + Cartesian_static_filters,Epick_d_help2 > +template +struct Epick_d_help2 +: CGAL_BASE +{ + CGAL_CONSTEXPR Epick_d_help2(){} + CGAL_CONSTEXPR Epick_d_help2(int d):CGAL_BASE(d){} +}; +#undef CGAL_BASE +#define CGAL_BASE \ + Kernel_d_interface< \ + Cartesian_wrap< \ + Epick_d_help2, \ + Epick_d > > +template +struct Epick_d +: CGAL_BASE +{ + CGAL_CONSTEXPR Epick_d(){} + CGAL_CONSTEXPR Epick_d(int d):CGAL_BASE(d){} +}; +#undef CGAL_BASE +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h b/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h new file mode 100644 index 00000000..701f0820 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/IO/Triangulation_off_ostream.h @@ -0,0 +1,320 @@ +// Copyright (c) 2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL: $ +// $Id: $ +// +// Author(s) : Clement Jamin + + +#ifndef CGAL_TRIANGULATION_IO_H +#define CGAL_TRIANGULATION_IO_H + +#include +#include +#include +#include + +namespace CGAL { + +namespace Triangulation_IO +{ +// TODO: test if the stream is binary or text? +template +int +output_point(std::ostream & os, const Traits &traits, const P & p) +{ + typedef typename Traits::Compute_coordinate_d Ccd; + const Ccd ccd = traits.compute_coordinate_d_object(); + const int dim = traits.point_dimension_d_object()(p); + if (dim > 0) + { + os << ccd(p, 0); + for (int i = 1 ; i < dim ; ++i) + os << " " << CGAL::to_double(ccd(p, i)); + } + return dim; +} + +// TODO: test if the stream is binary or text? +template +int +output_weighted_point(std::ostream & os, const Traits &traits, const P & p, + bool output_weight = true) +{ + typedef typename Traits::Compute_coordinate_d Ccd; + typename Traits::Construct_point_d cp = + traits.construct_point_d_object(); + typename Traits::Compute_weight_d pt_weight = traits.compute_weight_d_object(); + const Ccd ccd = traits.compute_coordinate_d_object(); + const int dim = traits.point_dimension_d_object()(p); + if (dim > 0) + { + output_point(os, traits, p); + if (output_weight) + os << " " << pt_weight(p); + } + return dim; +} + +// TODO: test if the stream is binary or text? +template +void +output_full_cell(std::ostream & os, const Traits &traits, const FCH & fch, + bool output_weights = false) +{ + typename FCH::value_type::Vertex_handle_iterator vit = fch->vertices_begin(); + for( ; vit != fch->vertices_end(); ++vit ) + { + int dim; + if (output_weights) + dim = output_weighted_point(os, traits, (*vit)->point()); + else + dim = output_point(os, traits, (*vit)->point()); + if (dim > 0) + os << std::endl; + } +} + +// TODO: test if the stream is binary or text? +/*template +void +input_point(std::istream & is, const Traits &traits, P & p) +{ + typedef typename Traits::FT FT; + std::vector coords; + + std::string line; + for(;;) + { + if (!std::getline(is, line)) + return is; + if (line != "") + break; + } + std::stringstream line_sstr(line); + FT temp; + while (line_sstr >> temp) + coords.push_back(temp); + + p = traits.construct_point_d_object()(coords.begin(), coords.end()); +}*/ + +} // namespace Triangulation_IO + +/////////////////////////////////////////////////////////////// +// TODO: replace these operator>> by an "input_point" function +/////////////////////////////////////////////////////////////// + +// TODO: test if the stream is binary or text? +template +std::istream & +operator>>(std::istream &is, typename Wrap::Point_d & p) +{ + typedef typename Wrap::Point_d P; + typedef typename K::FT FT; + std::vector coords; + + std::string line; + for(;;) + { + if (!std::getline(is, line)) + return is; + if (line != "") + break; + } + std::stringstream line_sstr(line); + FT temp; + while (line_sstr >> temp) + coords.push_back(temp); + + p = P(coords.begin(), coords.end()); + return is; +} + +// TODO: test if the stream is binary or text? +template +std::istream & +operator>>(std::istream &is, typename Wrap::Weighted_point_d & wp) +{ + typedef typename Wrap::Point_d P; + typedef typename Wrap::Weighted_point_d WP; + typedef typename K::FT FT; + + std::string line; + for(;;) + { + if (!std::getline(is, line)) + return is; + if (line != "") + break; + } + std::stringstream line_sstr(line); + FT temp; + std::vector coords; + while (line_sstr >> temp) + coords.push_back(temp); + + typename std::vector::iterator last = coords.end() - 1; + P p = P(coords.begin(), last); + wp = WP(p, *last); + + return is; +} + +// TODO: test if the stream is binary or text? +template +std::istream & +operator>>(std::istream &is, typename Wrap::Vector_d & v) +{ + typedef typename Wrap::Vector_d V; + typedef typename K::FT FT; + std::vector coords; + + std::string line; + for (;;) + { + if (!std::getline(is, line)) + return is; + if (line != "") + break; + } + std::stringstream line_sstr(line); + FT temp; + while (line_sstr >> temp) + coords.push_back(temp); + + v = V(coords.begin(), coords.end()); + return is; +} + +template < class GT, class TDS > +std::ostream & +export_triangulation_to_off(std::ostream & os, + const Triangulation & tr, + bool in_3D_export_surface_only = false) +{ + typedef Triangulation Tr; + typedef typename Tr::Vertex_const_handle Vertex_handle; + typedef typename Tr::Finite_vertex_const_iterator Finite_vertex_iterator; + typedef typename Tr::Finite_full_cell_const_iterator Finite_full_cell_iterator; + typedef typename Tr::Full_cell_const_iterator Full_cell_iterator; + typedef typename Tr::Full_cell Full_cell; + typedef typename Full_cell::Vertex_handle_const_iterator Full_cell_vertex_iterator; + + if (tr.maximal_dimension() < 2 || tr.maximal_dimension() > 3) + { + std::cerr << "Warning: export_tds_to_off => dimension should be 2 or 3."; + os << "Warning: export_tds_to_off => dimension should be 2 or 3."; + return os; + } + + size_t n = tr.number_of_vertices(); + + std::stringstream output; + + // write the vertices + std::map index_of_vertex; + int i = 0; + for(Finite_vertex_iterator it = tr.finite_vertices_begin(); + it != tr.finite_vertices_end(); ++it, ++i) + { + Triangulation_IO::output_point(output, tr.geom_traits(), it->point()); + if (tr.maximal_dimension() == 2) + output << " 0"; + output << std::endl; + index_of_vertex[it.base()] = i; + } + CGAL_assertion( i == n ); + + size_t number_of_triangles = 0; + if (tr.maximal_dimension() == 2) + { + for (Finite_full_cell_iterator fch = tr.finite_full_cells_begin() ; + fch != tr.finite_full_cells_end() ; ++fch) + { + output << "3 "; + for (Full_cell_vertex_iterator vit = fch->vertices_begin() ; + vit != fch->vertices_end() ; ++vit) + { + output << index_of_vertex[*vit] << " "; + } + output << std::endl; + ++number_of_triangles; + } + } + else if (tr.maximal_dimension() == 3) + { + if (in_3D_export_surface_only) + { + // Parse boundary facets + for (Full_cell_iterator fch = tr.full_cells_begin() ; + fch != tr.full_cells_end() ; ++fch) + { + if (tr.is_infinite(fch)) + { + output << "3 "; + for (Full_cell_vertex_iterator vit = fch->vertices_begin() ; + vit != fch->vertices_end() ; ++vit) + { + if (!tr.is_infinite(*vit)) + output << index_of_vertex[*vit] << " "; + } + output << std::endl; + ++number_of_triangles; + } + } + } + else + { + // Parse finite cells + for (Finite_full_cell_iterator fch = tr.finite_full_cells_begin() ; + fch != tr.finite_full_cells_end() ; ++fch) + { + output << "3 " + << index_of_vertex[fch->vertex(0)] << " " + << index_of_vertex[fch->vertex(1)] << " " + << index_of_vertex[fch->vertex(2)] + << std::endl; + output << "3 " + << index_of_vertex[fch->vertex(0)] << " " + << index_of_vertex[fch->vertex(2)] << " " + << index_of_vertex[fch->vertex(3)] + << std::endl; + output << "3 " + << index_of_vertex[fch->vertex(1)] << " " + << index_of_vertex[fch->vertex(2)] << " " + << index_of_vertex[fch->vertex(3)] + << std::endl; + output << "3 " + << index_of_vertex[fch->vertex(0)] << " " + << index_of_vertex[fch->vertex(1)] << " " + << index_of_vertex[fch->vertex(3)] + << std::endl; + number_of_triangles += 4; + } + } + } + + os << "OFF \n" + << n << " " + << number_of_triangles << " 0\n" + << output.str(); + + return os; +} + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_IO_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h new file mode 100644 index 00000000..c13a9801 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_base.h @@ -0,0 +1,177 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_LA_BASE_H +#define CGAL_KERNEL_D_CARTESIAN_LA_BASE_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#ifdef CGAL_EIGEN3_ENABLED +#include +#else +#error Eigen3 is required +#endif + +namespace CGAL { + +template < typename FT_, typename Dim_, +#if 1 + typename Vec_=Mix_vector, + Vector_vector, + FT_, Dim_>, +#elif 0 + typename Vec_=Array_vector, +#elif 0 + typename Vec_=Vector_vector, +#else + // Dangerous because of alignment. Ok on x86_64 without AVX. + typename Vec_=LA_eigen, +#endif + typename LA_=LA_eigen > + /* Default LA to Vec or to LA_eigen? */ +struct Cartesian_LA_base_d : public Dimension_base +{ + typedef Cartesian_LA_base_d Self; + typedef Cartesian_tag Rep_tag; + typedef Cartesian_tag Kernel_tag; + typedef Dim_ Default_ambient_dimension; + typedef Dim_ Max_ambient_dimension; + typedef Dim_ Dimension; + typedef LA_ LA; + template struct Ambient_dimension { typedef Dim_ type; }; + + typedef Vec_ LA_vector; + typedef typename LA_vector::Vector Point; + typedef typename LA_vector::Vector Vector; + typedef typename LA_vector::Vector Vector_; + typedef typename LA_vector::Construct_vector Constructor; + typedef typename LA_vector::Vector_const_iterator Point_cartesian_const_iterator; + typedef typename LA_vector::Vector_const_iterator Vector_cartesian_const_iterator; + + template struct Type {}; + template struct Type< Point_tag, D> { typedef Vector_ type; }; + template struct Type { typedef Vector_ type; }; + template struct Type< FT_tag, D> { typedef FT_ type; }; + template struct Type< RT_tag, D> { typedef FT_ type; }; + + typedef typeset + ::add::type + // FIXME: These have nothing to do here. + ::add::type + ::add::type + ::add::type + ::add::type + Object_list; + + typedef typeset< Point_cartesian_const_iterator_tag>::type + ::add::type + Iterator_list; + + template > struct Functor { + typedef Null_functor type; + }; + template struct Functor,D> { + typedef CartesianDVectorBase::Construct_LA_vector type; + }; + template struct Functor,D> { + typedef CartesianDVectorBase::Construct_LA_vector type; + }; + template struct Functor,D> { + typedef CartesianDVectorBase::Construct_cartesian_const_iterator type; + }; + template struct Functor,D> { + typedef CartesianDVectorBase::Construct_cartesian_const_iterator type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Sum_of_vectors type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Difference_of_vectors type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Opposite_vector type; + }; + template struct Functor::value + || !LA_vector::template Property::value> > { + typedef CartesianDVectorBase::Midpoint type; + }; + template struct Functor { + typedef CartesianDVectorBase::Compute_cartesian_coordinate type; + }; + template struct Functor { + typedef CartesianDVectorBase::Compute_cartesian_coordinate type; + }; + template struct Functor { + typedef CartesianDVectorBase::PV_dimension type; + }; + template struct Functor { + typedef CartesianDVectorBase::PV_dimension type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Orientation_of_vectors type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Orientation_of_points type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Scalar_product type; + }; + template struct Functor::value> > { + typedef CartesianDVectorBase::Squared_distance_to_origin_stored type; + }; + // Use integral_constant in case of failure, to distinguish from the previous one. + template struct Functor::value + || !LA_vector::template Property::value)*2> > { + typedef CartesianDVectorBase::Squared_distance_to_origin_via_dotprod type; + }; + template struct Functor { + typedef CartesianDVectorBase::Identity_functor type; + }; + template struct Functor { + typedef CartesianDVectorBase::Identity_functor type; + }; + + CGAL_CONSTEXPR Cartesian_LA_base_d(){} + CGAL_CONSTEXPR Cartesian_LA_base_d(int d):Dimension_base(d){} +}; + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_CARTESIAN_LA_BASE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h new file mode 100644 index 00000000..871c463a --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_LA_functors.h @@ -0,0 +1,344 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_CARTESIAN_LA_FUNCTORS_H +#define CGAL_CARTESIAN_LA_FUNCTORS_H + +#include +#include +#include +#include +#include +#include +#include + +namespace CGAL { +namespace CartesianDVectorBase { +#ifndef CGAL_CXX11 +namespace internal { +template struct Construct_LA_vector_ { + struct Never_use {}; + void operator()(Never_use)const; +}; +#define CGAL_CODE(Z,N,_) template struct Construct_LA_vector_ > { \ + typedef typename R::Constructor Constructor; \ + typedef typename Get_type::type RT; \ + typedef typename R::Vector_ result_type; \ + result_type operator() \ + (BOOST_PP_ENUM_PARAMS(N,RT const& t)) const { \ + return typename Constructor::Values()(BOOST_PP_ENUM_PARAMS(N,t)); \ + } \ + result_type operator() \ + (BOOST_PP_ENUM_PARAMS(BOOST_PP_INC(N),RT const& t)) const { \ + return typename Constructor::Values_divide()(t##N,BOOST_PP_ENUM_PARAMS(N,t)); \ + } \ + }; +BOOST_PP_REPEAT_FROM_TO(2, 11, CGAL_CODE, _ ) +#undef CGAL_CODE +} +#endif + +template struct Construct_LA_vector +: private Store_kernel +#ifndef CGAL_CXX11 +, public internal::Construct_LA_vector_ +#endif +{ + //CGAL_FUNCTOR_INIT_IGNORE(Construct_LA_vector) + CGAL_FUNCTOR_INIT_STORE(Construct_LA_vector) + typedef R_ R; + typedef typename R::Constructor Constructor; + typedef typename Get_type::type RT; + typedef typename Get_type::type FT; + typedef typename R::Vector_ result_type; + typedef typename R_::Default_ambient_dimension Dimension; + result_type operator()(int d)const{ + CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); + return typename Constructor::Dimension()(d); + } + result_type operator()()const{ + return typename Constructor::Dimension()((std::max)(0,this->kernel().dimension())); + } + result_type operator()(int d, Zero_ const&)const{ + CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); + return typename Constructor::Dimension()(d); + } + result_type operator()(Zero_ const&)const{ + // Makes no sense for an unknown dimension. + return typename Constructor::Dimension()(this->kernel().dimension()); + } + result_type operator()(result_type const& v)const{ + return v; + } +#ifdef CGAL_CXX11 + result_type operator()(result_type&& v)const{ + return std::move(v); + } +#endif +#ifdef CGAL_CXX11 + template + typename std::enable_if::value && + boost::is_same, Dimension>::value, + result_type>::type + operator()(U&&...u)const{ + return typename Constructor::Values()(std::forward(u)...); + } + //template::value>::type,class=typename std::enable_if<(sizeof...(U)==static_dim+1)>::type,class=void> + template + typename std::enable_if::value && + boost::is_same, Dimension>::value, + result_type>::type + operator()(U&&...u)const{ + return Apply_to_last_then_rest()(typename Constructor::Values_divide(),std::forward(u)...); + } +#else + using internal::Construct_LA_vector_::operator(); +#endif + template inline + typename boost::enable_if,result_type>::type operator() + (Iter f,Iter g,Cartesian_tag t)const + { + return this->operator()((int)std::distance(f,g),f,g,t); + } + template inline + typename boost::enable_if,result_type>::type operator() + (int d,Iter f,Iter g,Cartesian_tag)const + { + CGAL_assertion(d==std::distance(f,g)); + CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); + return typename Constructor::Iterator()(d,f,g); + } + template inline + typename boost::enable_if,result_type>::type operator() + (Iter f,Iter g,Homogeneous_tag)const + { + --g; + return this->operator()((int)std::distance(f,g),f,g,*g); + } + template inline + typename boost::enable_if,result_type>::type operator() + (int d,Iter f,Iter g,Homogeneous_tag)const + { + --g; + return this->operator()(d,f,g,*g); + } + template inline + typename boost::enable_if,result_type>::type operator() + (Iter f,Iter g)const + { + // Shouldn't it try comparing dist(f,g) to the dimension if it is known? + return this->operator()(f,g,typename R::Rep_tag()); + } + template inline + typename boost::enable_if,result_type>::type operator() + (int d,Iter f,Iter g)const + { + return this->operator()(d,f,g,typename R::Rep_tag()); + } + + // Last homogeneous coordinate given separately + template inline + typename boost::enable_if,result_type>::type operator() + (int d,Iter f,Iter g,NT const&l)const + { + CGAL_assertion(d==std::distance(f,g)); + CGAL_assertion(check_dimension_eq(d,this->kernel().dimension())); + // RT? better be safe for now + return typename Constructor::Iterator()(d,CGAL::make_transforming_iterator(f,Divide(l)),CGAL::make_transforming_iterator(g,Divide(l))); + } + template inline + typename boost::enable_if,result_type>::type operator() + (Iter f,Iter g,NT const&l)const + { + return this->operator()((int)std::distance(f,g),f,g,l); + } +}; + +template struct Compute_cartesian_coordinate { + CGAL_FUNCTOR_INIT_IGNORE(Compute_cartesian_coordinate) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename R::Vector_ first_argument_type; + typedef int second_argument_type; + typedef Tag_true Is_exact; +#ifdef CGAL_CXX11 + typedef decltype(std::declval()[0]) result_type; +#else + typedef RT const& result_type; + // RT const& doesn't work with some LA (Eigen2 for instance) so we + // should use plain RT or find a way to detect this. +#endif + + result_type operator()(first_argument_type const& v,int i)const{ + return v[i]; + } +}; + +template struct Construct_cartesian_const_iterator { + CGAL_FUNCTOR_INIT_IGNORE(Construct_cartesian_const_iterator) + typedef R_ R; + typedef typename R::Vector_ argument_type; + typedef typename R::LA_vector S_; + typedef typename R::Point_cartesian_const_iterator result_type; + // same as Vector + typedef Tag_true Is_exact; + + result_type operator()(argument_type const& v,Begin_tag)const{ + return S_::vector_begin(v); + } + result_type operator()(argument_type const& v,End_tag)const{ + return S_::vector_end(v); + } +}; + +template struct Midpoint { + CGAL_FUNCTOR_INIT_IGNORE(Midpoint) + typedef R_ R; + typedef typename Get_type::type first_argument_type; + typedef typename Get_type::type second_argument_type; + typedef typename Get_type::type result_type; + + result_type operator()(result_type const& a, result_type const& b)const{ + return (a+b)/2; + } +}; + +template struct Sum_of_vectors { + CGAL_FUNCTOR_INIT_IGNORE(Sum_of_vectors) + typedef R_ R; + typedef typename Get_type::type first_argument_type; + typedef typename Get_type::type second_argument_type; + typedef typename Get_type::type result_type; + + result_type operator()(result_type const& a, result_type const& b)const{ + return a+b; + } +}; + +template struct Difference_of_vectors { + CGAL_FUNCTOR_INIT_IGNORE(Difference_of_vectors) + typedef R_ R; + typedef typename Get_type::type first_argument_type; + typedef typename Get_type::type second_argument_type; + typedef typename Get_type::type result_type; + + result_type operator()(result_type const& a, result_type const& b)const{ + return a-b; + } +}; + +template struct Opposite_vector { + CGAL_FUNCTOR_INIT_IGNORE(Opposite_vector) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_type::type argument_type; + + result_type operator()(result_type const& v)const{ + return -v; + } +}; + +template struct Scalar_product { + CGAL_FUNCTOR_INIT_IGNORE(Scalar_product) + typedef R_ R; + typedef typename R::LA_vector LA; + typedef typename Get_type::type result_type; + typedef typename Get_type::type first_argument_type; + typedef typename Get_type::type second_argument_type; + + result_type operator()(first_argument_type const& a, second_argument_type const& b)const{ + return LA::dot_product(a,b); + } +}; + +template struct Squared_distance_to_origin_stored { + CGAL_FUNCTOR_INIT_IGNORE(Squared_distance_to_origin_stored) + typedef R_ R; + typedef typename R::LA_vector LA; + typedef typename Get_type::type result_type; + typedef typename Get_type::type argument_type; + + result_type operator()(argument_type const& a)const{ + return LA::squared_norm(a); + } +}; + +template struct Squared_distance_to_origin_via_dotprod { + CGAL_FUNCTOR_INIT_IGNORE(Squared_distance_to_origin_via_dotprod) + typedef R_ R; + typedef typename R::LA_vector LA; + typedef typename Get_type::type result_type; + typedef typename Get_type::type argument_type; + + result_type operator()(argument_type const& a)const{ + return LA::dot_product(a,a); + } +}; + +template struct Orientation_of_vectors { + CGAL_FUNCTOR_INIT_IGNORE(Orientation_of_vectors) + typedef R_ R; + typedef typename R::Vector_cartesian_const_iterator first_argument_type; + typedef typename R::Vector_cartesian_const_iterator second_argument_type; + typedef typename Get_type::type result_type; + typedef typename R::LA_vector LA; + + template + result_type operator()(Iter const& f, Iter const& e) const { + return LA::determinant_of_iterators_to_vectors(f,e); + } +}; + +template struct Orientation_of_points { + CGAL_FUNCTOR_INIT_IGNORE(Orientation_of_points) + typedef R_ R; + typedef typename R::Point_cartesian_const_iterator first_argument_type; + typedef typename R::Point_cartesian_const_iterator second_argument_type; + typedef typename Get_type::type result_type; + typedef typename R::LA_vector LA; + + template + result_type operator()(Iter const& f, Iter const& e) const { + return LA::determinant_of_iterators_to_points(f,e); + } +}; + +template struct PV_dimension { + CGAL_FUNCTOR_INIT_IGNORE(PV_dimension) + typedef R_ R; + typedef typename R::Vector_ argument_type; + typedef int result_type; + typedef typename R::LA_vector LA; + typedef Tag_true Is_exact; + + template + result_type operator()(T const& v) const { + return LA::size_of_vector(v); + } +}; + +template struct Identity_functor { + CGAL_FUNCTOR_INIT_IGNORE(Identity_functor) + template + T const& operator()(T const&t) const { return t; } +}; + +} +} // namespace CGAL +#endif // CGAL_CARTESIAN_LA_FUNCTORS_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h new file mode 100644 index 00000000..641bf8ae --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_base.h @@ -0,0 +1,40 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_BASE_H +#define CGAL_KERNEL_D_CARTESIAN_BASE_H + +#include +#include +#include + +namespace CGAL { +#define CGAL_BASE \ + Cartesian_LA_base_d< FT_, Dim_ > +template < typename FT_, typename Dim_, typename Derived_=Default> +struct Cartesian_base_d : public CGAL_BASE +{ + CGAL_CONSTEXPR Cartesian_base_d(){} + CGAL_CONSTEXPR Cartesian_base_d(int d):CGAL_BASE(d){} +}; +#undef CGAL_BASE + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_CARTESIAN_BASE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h new file mode 100644 index 00000000..e09c72d0 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_change_FT.h @@ -0,0 +1,117 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H +#define CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H + +#include +#include +#include +#include + +namespace CGAL { + +template < typename Base_, typename FT_, typename LA_=CGAL::LA_eigen > +struct Cartesian_change_FT_base : public + Base_ +{ + CGAL_CONSTEXPR Cartesian_change_FT_base(){} + CGAL_CONSTEXPR Cartesian_change_FT_base(int d):Base_(d){} + + typedef Cartesian_change_FT_base Self; + typedef Base_ Kernel_base; + typedef LA_ LA; + + template struct Type : Inherit_type {}; + template struct Type { typedef FT_ type; }; + template struct Type { typedef FT_ type; }; + + typedef NT_converter::type,FT_> FT_converter; + typedef transforming_iterator Point_cartesian_const_iterator; + typedef transforming_iterator Vector_cartesian_const_iterator; + //FIXME: use Iterator_list! + /* + template::value_tag,FT_tag>::value> + struct Iterator : Get_type {}; + template struct Iterator { + typedef transforming_iterator::type> type; + }; + */ + + template + struct Construct_cartesian_const_iterator_ { + typedef typename Get_functor::type Functor_base; + Construct_cartesian_const_iterator_(){} + Construct_cartesian_const_iterator_(Self const&r):f(r){} + Functor_base f; + typedef Type_ result_type; + template + result_type operator()(T const& v, Begin_tag)const{ + return make_transforming_iterator(f(v,Begin_tag()),FT_converter()); + } + template + result_type operator()(T const& v, End_tag)const{ + return make_transforming_iterator(f(v,End_tag()),FT_converter()); + } + }; + typedef Construct_cartesian_const_iterator_,Point_cartesian_const_iterator> Construct_point_cartesian_const_iterator; + typedef Construct_cartesian_const_iterator_,Vector_cartesian_const_iterator> Construct_vector_cartesian_const_iterator; + + template + struct Compute_cartesian_coordinate { + typedef typename Get_functor::type Functor_base; + Compute_cartesian_coordinate(){} + Compute_cartesian_coordinate(Self const&r):f(r){} + Functor_base f; + typedef FT_ result_type; + template + result_type operator()(Obj_ const& v,int i)const{ + return FT_converter()(f(v,i)); + } + }; + + template::type> struct Functor : + Inherit_functor { }; + template struct Functor { }; + template struct Functor { }; + template struct Functor { + typedef Compute_cartesian_coordinate type; + }; + template struct Functor { + typedef Compute_cartesian_coordinate type; + }; + template struct Functor,D,Construct_iterator_tag> { + typedef Construct_point_cartesian_const_iterator type; + }; + template struct Functor,D,Construct_iterator_tag> { + typedef Construct_vector_cartesian_const_iterator type; + }; +}; + +template < typename Base_, typename FT_> +struct Cartesian_change_FT : public + Cartesian_change_FT_base +{ + CGAL_CONSTEXPR Cartesian_change_FT(){} + CGAL_CONSTEXPR Cartesian_change_FT(int d):Cartesian_change_FT_base(d){} +}; + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_CARTESIAN_CHANGE_FT_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h new file mode 100644 index 00000000..ef8921db --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_complete.h @@ -0,0 +1,33 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_COMPLETE_H +#define CGAL_KERNEL_D_CARTESIAN_COMPLETE_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#endif // CGAL_KERNEL_D_CARTESIAN_COMPLETE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h new file mode 100644 index 00000000..179e97bf --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_K.h @@ -0,0 +1,79 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_FILTER_K_H +#define CGAL_KERNEL_D_CARTESIAN_FILTER_K_H + +#include +#include +#include +#include +#include + +namespace CGAL { + +template < typename Base_, typename AK_, typename EK_ > +struct Cartesian_filter_K : public Base_, + private Store_kernel, private Store_kernel2 +{ + CGAL_CONSTEXPR Cartesian_filter_K(){} + CGAL_CONSTEXPR Cartesian_filter_K(int d):Base_(d){} + //FIXME: or do we want an instance of AK and EK belonging to this kernel, + //instead of a reference to external ones? + CGAL_CONSTEXPR Cartesian_filter_K(AK_ const&a,EK_ const&b):Base_(),Store_kernel(a),Store_kernel2(b){} + CGAL_CONSTEXPR Cartesian_filter_K(int d,AK_ const&a,EK_ const&b):Base_(d),Store_kernel(a),Store_kernel2(b){} + typedef Base_ Kernel_base; + typedef AK_ AK; + typedef EK_ EK; + typedef typename Store_kernel::reference_type AK_rt; + AK_rt approximate_kernel()const{return this->kernel();} + typedef typename Store_kernel2::reference2_type EK_rt; + EK_rt exact_kernel()const{return this->kernel2();} + + // MSVC is too dumb to perform the empty base optimization. + typedef boost::mpl::and_< + internal::Do_not_store_kernel, + internal::Do_not_store_kernel, + internal::Do_not_store_kernel > Do_not_store_kernel; + + //TODO: C2A/C2E could be able to convert *this into this->kernel() or this->kernel2(). + typedef KernelD_converter C2A; + typedef KernelD_converter C2E; + + // fix the types + // TODO: only fix some types, based on some criterion? + template struct Type : Get_type {}; + + template::type> struct Functor : + Inherit_functor {}; + template struct Functor { + typedef typename Get_functor::type AP; + typedef typename Get_functor::type EP; + typedef Filtered_predicate2 type; + }; +// TODO: +// template struct Functor : +// Kernel_base::template Functor {}; +// TODO: +// detect when Less_cartesian_coordinate doesn't need filtering +}; + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_CARTESIAN_FILTER_K_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h new file mode 100644 index 00000000..c390a55c --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_filter_NT.h @@ -0,0 +1,93 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H +#define CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H + +#include +#include +#include + +namespace CGAL { + +template < typename Base_ > +struct Cartesian_filter_NT : public Base_ +{ + CGAL_CONSTEXPR Cartesian_filter_NT(){} + CGAL_CONSTEXPR Cartesian_filter_NT(int d):Base_(d){} + typedef Base_ Kernel_base; + typedef Cartesian_change_FT K1; + typedef typename internal::Exact_field_selector::type>::Type Exact_nt; + typedef Cartesian_change_FT K2; + + template::type> struct Functor : + Inherit_functor {}; + template struct Functor { + struct type { + //TODO: use compression (derive from a compressed_pair?) + typedef typename Get_functor::type P1; P1 p1; + typedef typename Get_functor::type P2; P2 p2; + typedef typename P2::result_type result_type; + type(){} + type(Cartesian_filter_NT const&k):p1(reinterpret_cast(k)),p2(reinterpret_cast(k)){} + //FIXME: if predicate's constructor takes a kernel as argument, how do we translate that? reinterpret_cast is really ugly and possibly unsafe. + +#ifdef CGAL_CXX11 + template result_type operator()(U&&...u)const{ + { + Protect_FPU_rounding p; + try { + typename P1::result_type res=p1(u...); // don't forward as u may be reused + if(is_certain(res)) return get_certain(res); + } catch (Uncertain_conversion_exception) {} + } + return p2(std::forward(u)...); + } +#else + result_type operator()()const{ // does it make sense to have 0 argument? + { + Protect_FPU_rounding p; + try { + typename P1::result_type res=p1(); + if(is_certain(res)) return get_certain(res); + } catch (Uncertain_conversion_exception) {} + } + return p2(); + } +#define CGAL_CODE(Z,N,_) template result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t))const{ \ + { \ + Protect_FPU_rounding p; \ + try { \ + typename P1::result_type res=p1(BOOST_PP_ENUM_PARAMS(N,t)); \ + if(is_certain(res)) return get_certain(res); \ + } catch (Uncertain_conversion_exception) {} \ + } \ + return p2(BOOST_PP_ENUM_PARAMS(N,t)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + +#endif + }; + }; +}; + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_CARTESIAN_FILTER_NT_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h new file mode 100644 index 00000000..179f7319 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_per_dimension.h @@ -0,0 +1,33 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_CARTESIAN_PER_DIM_H +#define CGAL_KD_CARTESIAN_PER_DIM_H +#include +#include +#include + +// Should probably disappear. + +namespace CGAL { +template +struct Cartesian_per_dimension : public R_ {}; +} + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h new file mode 100644 index 00000000..693e962a --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Cartesian_static_filters.h @@ -0,0 +1,95 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_CARTESIAN_STATIC_FILTERS_H +#define CGAL_KD_CARTESIAN_STATIC_FILTERS_H +#include +#include +#include // bug, should be included by the next one +#include +#include + +namespace CGAL { +namespace SFA { // static filter adapter +// Note that this would be quite a bit simpler without stateful kernels +template struct Orientation_of_points_2 : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation_of_points_2) + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + typedef typename Get_type::type FT; + typedef typename Get_functor::type CC; + typedef typename Get_functor::type Orientation_base; + // TODO: Move this out for easy reuse + struct Adapter { + struct Point_2 { + R_ const&r; CC const&c; Point const& p; + Point_2(R_ const&r_, CC const&c_, Point const&p_):r(r_),c(c_),p(p_){} + // use result_of instead? + typename CC::result_type x()const{return c(p,0);} + typename CC::result_type y()const{return c(p,1);} + }; + struct Vector_2 {}; + struct Circle_2 {}; + struct Orientation_2 { + typedef typename Orientation_of_points_2::result_type result_type; + result_type operator()(Point_2 const&A, Point_2 const&B, Point_2 const&C)const{ + Point const* t[3]={&A.p,&B.p,&C.p}; + return Orientation_base(A.r)(make_transforming_iterator(t+0),make_transforming_iterator(t+3)); + } + }; + }; + template result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ + CC c(this->kernel()); + Point const& A=*f; + Point const& B=*++f; + Point const& C=*++f; + CGAL_assertion(++f==e); + typedef typename Adapter::Point_2 P; + return typename internal::Static_filters_predicates::Orientation_2()(P(this->kernel(),c,A),P(this->kernel(),c,B),P(this->kernel(),c,C)); + } +}; +} + +template +struct Cartesian_static_filters : public R_ { + CGAL_CONSTEXPR Cartesian_static_filters(){} + CGAL_CONSTEXPR Cartesian_static_filters(int d):R_(d){} +}; + +template +struct Cartesian_static_filters, R_, Derived_> : public R_ { + CGAL_CONSTEXPR Cartesian_static_filters(){} + CGAL_CONSTEXPR Cartesian_static_filters(int d):R_(d){} + typedef Cartesian_static_filters, R_, Derived_> Self; + typedef typename Default::Get::type Derived; + template struct Functor : Inherit_functor {}; + template struct Functor { + typedef + //typename boost::mpl::if_ < + //boost::is_same, + //typename Get_functor::type, + SFA::Orientation_of_points_2 + // >::type + type; + }; +}; + +} + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h new file mode 100644 index 00000000..43015d24 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Coaffine.h @@ -0,0 +1,330 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_COAFFINE_H +#define CGAL_KD_COAFFINE_H +#include +#include +#include +#include +#include + +namespace CGAL { +namespace CartesianDKernelFunctors { +struct Flat_orientation { + std::vector proj; + std::vector rest; + bool reverse; +}; + +// For debugging purposes +inline std::ostream& operator<< (std::ostream& o, Flat_orientation const& f) { + o << "Proj: "; + for(std::vector::const_iterator i=f.proj.begin(); + i!=f.proj.end(); ++i) + o << *i << ' '; + o << "\nRest: "; + for(std::vector::const_iterator i=f.rest.begin(); + i!=f.rest.end(); ++i) + o << *i << ' '; + o << "\nInv: " << f.reverse; + return o << '\n'; +} + +namespace internal { +namespace coaffine { +template +inline void debug_matrix(std::ostream& o, Mat const&mat) { + for(int i=0;i struct Construct_flat_orientation : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_flat_orientation) + typedef R_ R; + typedef typename Get_type::type FT; + typedef typename Get_type::type Point; + typedef typename Increment_dimension::type Dplusone; + typedef typename R::LA::template Rebind_dimension::Other LA; + typedef typename LA::Square_matrix Matrix; + typedef typename Get_functor::type CCC; + typedef typename Get_functor::type PD; + typedef Flat_orientation result_type; + + // This implementation is going to suck. Maybe we should push the + // functionality into LA. And we should check (in debug mode) that + // the points are affinely independent. + template + result_type operator()(Iter f, Iter e)const{ + Iter f_save = f; + PD pd (this->kernel()); + CCC ccc (this->kernel()); + int dim = pd(*f); + Matrix coord (dim+1, dim+1); // use distance(f,e)? This matrix doesn't need to be square. + int col = 0; + Flat_orientation o; + std::vector& proj=o.proj; + std::vector& rest=o.rest; rest.reserve(dim+1); + for(int i=0; i p; + try + { + // No forward here, the arguments may still be needed + Ares res = ap(c2a(args)...); + if (is_certain(res)) + return get_certain(res); + } + catch (Uncertain_conversion_exception) {} + } + CGAL_BRANCH_PROFILER_BRANCH(tmp); + Protect_FPU_rounding p(CGAL_FE_TONEAREST); + return ep(c2e(std::forward(args))...); + } +#else + +#define CGAL_VAR(Z,N,C) C(a##N) +#define CGAL_CODE(Z,N,_) \ + template \ + result_type \ + operator()(BOOST_PP_ENUM_BINARY_PARAMS(N, A, const& a)) const \ + { \ + CGAL_BRANCH_PROFILER(std::string(" failures/calls to : ") + std::string(CGAL_PRETTY_FUNCTION), tmp); \ + { \ + Protect_FPU_rounding p; \ + try \ + { \ + Ares res = ap(BOOST_PP_ENUM(N,CGAL_VAR,c2a)); \ + if (is_certain(res)) \ + return get_certain(res); \ + } \ + catch (Uncertain_conversion_exception) {} \ + } \ + CGAL_BRANCH_PROFILER_BRANCH(tmp); \ + Protect_FPU_rounding p(CGAL_FE_TONEAREST); \ + return ep(BOOST_PP_ENUM(N,CGAL_VAR,c2e)); \ + } + BOOST_PP_REPEAT_FROM_TO(1, 10, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR + +#endif +}; + +} //namespace CGAL + +#endif // CGAL_FILTERED_PREDICATE2_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h new file mode 100644 index 00000000..a8896976 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/KernelD_converter.h @@ -0,0 +1,199 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_CONVERTER_H +#define CGAL_KERNEL_D_CARTESIAN_CONVERTER_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace CGAL { +namespace internal { +// Reverses order, but that shouldn't matter. +template struct Map_taglist_to_typelist : + Map_taglist_to_typelist::type + ::template add::type> +{}; +template struct Map_taglist_to_typelist > : typeset<> {}; +} + +template > +struct Object_converter { + typedef Object result_type; + template + result_type operator()(Object const& o, F const& f) const { + typedef typename List::head H; + if (H const* ptr = object_cast(&o)) + return make_object(f(*ptr)); + else + return Object_converter()(o,f); + } +}; +template<> +struct Object_converter > { + typedef Object result_type; + template + result_type operator()(Object const&,F const&)const { + CGAL_error_msg("Cartesiand_converter is unable to determine what is wrapped in the Object"); + return Object(); + } +}; + + + //TODO: special case when K1==K2 (or they are very close?) +template +class KernelD_converter_ +: public KernelD_converter_ +{ + typedef typename List::head Tag_; + typedef typename List::tail Rest; + typedef KernelD_converter_ Base; + typedef typename Get_type::type K1_Obj; + typedef typename Get_type::type K2_Obj; + typedef typename Get_functor >::type K1_Conv; + typedef KO_converter KOC; + typedef CGAL_BOOSTD is_same no_converter; + typedef typename internal::Map_taglist_to_typelist::type::template contains duplicate; + + // Disable the conversion in some cases: + struct Do_not_use{}; + + // Explicit calls to boost::mpl functions to avoid parenthesis + // warning on some versions of GCC + typedef typename boost::mpl::if_ < + // If Point==Vector, keep only one conversion + boost::mpl::or_, + // For iterator objects, the default is make_transforming_iterator + boost::mpl::bool_<(iterator_tag_traits::is_iterator && no_converter::value)> >, + Do_not_use,K1_Obj>::type argument_type; + //typedef typename KOC::argument_type K1_Obj; + //typedef typename KOC::result_type K2_Obj; + public: + using Base::operator(); // don't use directly, just make it accessible to the next level + K2_Obj helper(K1_Obj const& o,CGAL_BOOSTD true_type)const{ + return KOC()(this->myself().kernel(),this->myself().kernel2(),this->myself(),o); + } + K2_Obj helper(K1_Obj const& o,CGAL_BOOSTD false_type)const{ + return K1_Conv(this->myself().kernel())(this->myself().kernel2(),this->myself(),o); + } + K2_Obj operator()(argument_type const& o)const{ + return helper(o,no_converter()); + } + template struct result:Base::template result{}; + template struct result {typedef K2_Obj type;}; +}; + +template +class KernelD_converter_ > { + public: + struct Do_not_use2{}; + void operator()(Do_not_use2)const{} + template struct result; + Final_& myself(){return *static_cast(this);} + Final_ const& myself()const{return *static_cast(this);} +}; + + +// TODO: use the intersection of Kn::Object_list. +template::type +//typeset::add::type/*::add::type*/ +> class KernelD_converter + : public Store_kernel, public Store_kernel2, + public KernelD_converter_,K1,K2,List_> +{ + typedef KernelD_converter Self; + typedef Self Final_; + typedef KernelD_converter_ Base; + typedef typename Get_type::type FT1; + typedef typename Get_type::type FT2; + typedef NT_converter NTc; + NTc c; // TODO: compressed storage as this is likely empty and the converter gets passed around (and stored in iterators) + + public: + KernelD_converter(){} + KernelD_converter(K1 const&a,K2 const&b):Store_kernel(a),Store_kernel2(b){} + + // For boost::result_of, used in transforming_iterator + template::value?42:0> struct result:Base::template result{}; + template struct result { + typedef transforming_iterator type; + }; + template struct result{typedef K2 type;}; + template struct result{typedef int type;}; + // Ideally the next 2 would come with Point_tag and Vector_tag, but that's hard... + template struct result{typedef Origin type;}; + template struct result{typedef Null_vector type;}; + template struct result{typedef Object type;}; + template struct result{typedef FT2 type;}; + + using Base::operator(); + typename Store_kernel2::reference2_type operator()(K1 const&)const{return this->kernel2();} + int operator()(int i)const{return i;} + Origin operator()(Origin const&o)const{return o;} + Null_vector operator()(Null_vector const&v)const{return v;} + FT2 operator()(FT1 const&x)const{return c(x);} + //RT2 operator()(typename First_if_different::Type const&x)const{return cr(x);} + + typename Get_type::type const& + operator()(typename Get_type::type const&o)const + { return o; } // Both kernels should have the same, returning a reference should warn if not. + + template + transforming_iterator,It>::type> + operator()(It const& it) const { + return make_transforming_iterator(it,*this); + } + + template + //TODO: use decltype in C++11 instead of result + std::vector::type> + operator()(const std::vector& v) const { + return std::vector::type>(operator()(v.begin()),operator()(v.begin())); + } + + //TODO: convert std::list and other containers? + + Object + operator()(const Object &obj) const + { + typedef typename internal::Map_taglist_to_typelist::type Possibilities; + //TODO: add Empty, vector, etc to the list. + return Object_converter()(obj,*this); + } + + //TODO: convert boost::variant + +}; + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_CARTESIAN_CONVERTER_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h new file mode 100644 index 00000000..fa30dff0 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_2_interface.h @@ -0,0 +1,104 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_KERNEL_2_INTERFACE_H +#define CGAL_KD_KERNEL_2_INTERFACE_H + +#include +#include +#include +#include + + +namespace CGAL { +template struct Kernel_2_interface : public Base_ { + typedef Base_ Base; + typedef Kernel_2_interface Kernel; + typedef typename Get_type::type RT; + typedef typename Get_type::type FT; + typedef typename Get_type::type Boolean; + typedef typename Get_type::type Sign; + typedef typename Get_type::type Comparison_result; + typedef typename Get_type::type Orientation; + typedef typename Get_type::type Oriented_side; + typedef typename Get_type::type Bounded_side; + typedef typename Get_type::type Angle; + typedef typename Get_type::type Point_2; + typedef typename Get_type::type Vector_2; + typedef typename Get_type::type Segment_2; + typedef cpp0x::tuple Triangle_2; // triangulation insists... + template struct Help_2p_i { + typedef typename Get_functor::type LT; + typedef typename LT::result_type result_type; + LT lt; + Help_2p_i(Kernel const&k):lt(k){} + result_type operator()(Point_2 const&a, Point_2 const&b) { + return lt(a,b,i); + } + }; + typedef Help_2p_i Less_x_2; + typedef Help_2p_i Less_y_2; + typedef Help_2p_i Compare_x_2; + typedef Help_2p_i Compare_y_2; + struct Compare_distance_2 { + typedef typename Get_functor::type CD; + typedef typename CD::result_type result_type; + CD cd; + Compare_distance_2(Kernel const&k):cd(k){} + result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c) { + return cd(a,b,c); + } + result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c, Point_2 const&d) { + return cd(a,b,c,d); + } + }; + struct Orientation_2 { + typedef typename Get_functor::type O; + typedef typename O::result_type result_type; + O o; + Orientation_2(Kernel const&k):o(k){} + result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c) { + //return o(a,b,c); + Point_2 const* t[3]={&a,&b,&c}; + return o(make_transforming_iterator(t+0),make_transforming_iterator(t+3)); + + } + }; + struct Side_of_oriented_circle_2 { + typedef typename Get_functor::type SOS; + typedef typename SOS::result_type result_type; + SOS sos; + Side_of_oriented_circle_2(Kernel const&k):sos(k){} + result_type operator()(Point_2 const&a, Point_2 const&b, Point_2 const&c, Point_2 const&d) { + //return sos(a,b,c,d); + Point_2 const* t[4]={&a,&b,&c,&d}; + return sos(make_transforming_iterator(t+0),make_transforming_iterator(t+4)); + } + }; + Less_x_2 less_x_2_object()const{ return Less_x_2(*this); } + Less_y_2 less_y_2_object()const{ return Less_y_2(*this); } + Compare_x_2 compare_x_2_object()const{ return Compare_x_2(*this); } + Compare_y_2 compare_y_2_object()const{ return Compare_y_2(*this); } + Compare_distance_2 compare_distance_2_object()const{ return Compare_distance_2(*this); } + Orientation_2 orientation_2_object()const{ return Orientation_2(*this); } + Side_of_oriented_circle_2 side_of_oriented_circle_2_object()const{ return Side_of_oriented_circle_2(*this); } +}; +} + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h new file mode 100644 index 00000000..96076aa8 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_3_interface.h @@ -0,0 +1,102 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_KERNEL_3_INTERFACE_H +#define CGAL_KD_KERNEL_3_INTERFACE_H + +#include +#include +#include +#include + + +namespace CGAL { +template struct Kernel_3_interface : public Base_ { + typedef Base_ Base; + typedef Kernel_3_interface Kernel; + typedef typename Get_type::type RT; + typedef typename Get_type::type FT; + typedef typename Get_type::type Boolean; + typedef typename Get_type::type Sign; + typedef typename Get_type::type Comparison_result; + typedef typename Get_type::type Orientation; + typedef typename Get_type::type Oriented_side; + typedef typename Get_type::type Bounded_side; + typedef typename Get_type::type Angle; + typedef typename Get_type::type Point_3; + typedef typename Get_type::type Vector_3; + typedef typename Get_type::type Segment_3; + typedef cpp0x::tuple Triangle_3; // placeholder + typedef cpp0x::tuple Tetrahedron_3; // placeholder + struct Compare_xyz_3 { + typedef typename Get_functor::type CL; + typedef typename CL::result_type result_type; + CL cl; + Compare_xyz_3(Kernel const&k):cl(k){} + result_type operator()(Point_3 const&a, Point_3 const&b) { + return cl(a,b); + } + }; + struct Compare_distance_3 { + typedef typename Get_functor::type CD; + typedef typename CD::result_type result_type; + CD cd; + Compare_distance_3(Kernel const&k):cd(k){} + result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c) { + return cd(a,b,c); + } + result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d) { + return cd(a,b,c,d); + } + }; + struct Orientation_3 { + typedef typename Get_functor::type O; + typedef typename O::result_type result_type; + O o; + Orientation_3(Kernel const&k):o(k){} + result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d) { + //return o(a,b,c,d); + Point_3 const* t[4]={&a,&b,&c,&d}; + return o(make_transforming_iterator(t+0),make_transforming_iterator(t+4)); + + } + }; + struct Side_of_oriented_sphere_3 { + typedef typename Get_functor::type SOS; + typedef typename SOS::result_type result_type; + SOS sos; + Side_of_oriented_sphere_3(Kernel const&k):sos(k){} + result_type operator()(Point_3 const&a, Point_3 const&b, Point_3 const&c, Point_3 const&d, Point_3 const&e) { + //return sos(a,b,c,d); + Point_3 const* t[5]={&a,&b,&c,&d,&e}; + return sos(make_transforming_iterator(t+0),make_transforming_iterator(t+5)); + } + }; + + // I don't have the Coplanar predicates (yet) + + + Compare_xyz_3 compare_xyz_3_object()const{ return Compare_xyz_3(*this); } + Compare_distance_3 compare_distance_3_object()const{ return Compare_distance_3(*this); } + Orientation_3 orientation_3_object()const{ return Orientation_3(*this); } + Side_of_oriented_sphere_3 side_of_oriented_sphere_3_object()const{ return Side_of_oriented_sphere_3(*this); } +}; +} + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h new file mode 100644 index 00000000..dd888005 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_d_interface.h @@ -0,0 +1,298 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_KERNEL_D_INTERFACE_H +#define CGAL_KD_KERNEL_D_INTERFACE_H + +#include +#include +#include +#include + + +namespace CGAL { +template struct Kernel_d_interface : public Base_ { + CGAL_CONSTEXPR Kernel_d_interface(){} + CGAL_CONSTEXPR Kernel_d_interface(int d):Base_(d){} + + typedef Base_ Base; + typedef Kernel_d_interface Kernel; + typedef Base_ R_; // for the macros + typedef typename Get_type::type RT; + typedef typename Get_type::type FT; + typedef typename Get_type::type Boolean; + typedef typename Get_type::type Sign; + typedef typename Get_type::type Comparison_result; + typedef typename Get_type::type Orientation; + typedef typename Get_type::type Oriented_side; + typedef typename Get_type::type Bounded_side; + typedef typename Get_type::type Angle; + typedef typename Get_type::type Flat_orientation_d; + typedef typename Get_type::type Point_d; + typedef typename Get_type::type Vector_d; + typedef typename Get_type::type Segment_d; + typedef typename Get_type::type Sphere_d; + typedef typename Get_type::type Hyperplane_d; + typedef Vector_d Direction_d; + typedef typename Get_type::type Line_d; + typedef typename Get_type::type Ray_d; + typedef typename Get_type::type Iso_box_d; + typedef typename Get_type::type Aff_transformation_d; + typedef typename Get_type::type Weighted_point_d; + typedef typename Get_functor::type Compute_coordinate_d; + typedef typename Get_functor::type Compare_lexicographically_d; + typedef typename Get_functor::type Equal_d; + typedef typename Get_functor::type Less_lexicographically_d; + typedef typename Get_functor::type Less_or_equal_lexicographically_d; + // FIXME: and vectors? + typedef typename Get_functor::type Orientation_d; + typedef typename Get_functor::type Less_coordinate_d; + typedef typename Get_functor::type Point_dimension_d; + typedef typename Get_functor::type Side_of_oriented_sphere_d; + typedef typename Get_functor::type Power_side_of_power_sphere_d; + typedef typename Get_functor::type Power_center_d; + typedef typename Get_functor::type Power_distance_d; + typedef typename Get_functor::type Contained_in_affine_hull_d; + typedef typename Get_functor::type Construct_flat_orientation_d; + typedef typename Get_functor::type In_flat_orientation_d; + typedef typename Get_functor::type In_flat_side_of_oriented_sphere_d; + typedef typename Get_functor::type In_flat_power_side_of_power_sphere_d; + typedef typename Get_functor::type Point_to_vector_d; + typedef typename Get_functor::type Vector_to_point_d; + typedef typename Get_functor::type Translated_point_d; + typedef typename Get_functor::type Scaled_vector_d; + typedef typename Get_functor::type Difference_of_vectors_d; + typedef typename Get_functor::type Difference_of_points_d; + //typedef typename Get_functor >::type Construct_point_d; + struct Construct_point_d : private Store_kernel { + typedef Kernel R_; // for the macro + CGAL_FUNCTOR_INIT_STORE(Construct_point_d) + typedef typename Get_functor >::type CP; + typedef Point_d result_type; + Point_d operator()(Weighted_point_d const&wp)const{ + return typename Get_functor::type(this->kernel())(wp); + } +#ifdef CGAL_CXX11 + Point_d operator()(Weighted_point_d &wp)const{ + return typename Get_functor::type(this->kernel())(wp); + } + Point_d operator()(Weighted_point_d &&wp)const{ + return typename Get_functor::type(this->kernel())(std::move(wp)); + } + Point_d operator()(Weighted_point_d const&&wp)const{ + return typename Get_functor::type(this->kernel())(std::move(wp)); + } + template +# if __cplusplus >= 201402L + decltype(auto) +# else + Point_d +# endif + operator()(T&&...t)const{ + return CP(this->kernel())(std::forward(t)...); + //return CP(this->kernel())(t...); + } +#else +# define CGAL_CODE(Z,N,_) template \ + Point_d operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t))const{ \ + return CP(this->kernel())(BOOST_PP_ENUM_PARAMS(N,t)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +# undef CGAL_CODE + Point_d operator()()const{ \ + return CP(this->kernel())(); \ + } +#endif + }; + typedef typename Get_functor >::type Construct_vector_d; + typedef typename Get_functor >::type Construct_segment_d; + typedef typename Get_functor >::type Construct_sphere_d; + typedef typename Get_functor >::type Construct_hyperplane_d; + typedef Construct_vector_d Construct_direction_d; + typedef typename Get_functor >::type Construct_line_d; + typedef typename Get_functor >::type Construct_ray_d; + typedef typename Get_functor >::type Construct_iso_box_d; + typedef typename Get_functor >::type Construct_aff_transformation_d; + typedef typename Get_functor >::type Construct_weighted_point_d; + typedef typename Get_functor::type Midpoint_d; + struct Component_accessor_d : private Store_kernel { + typedef Kernel R_; // for the macro + CGAL_FUNCTOR_INIT_STORE(Component_accessor_d) + int dimension(Point_d const&p){ + return this->kernel().point_dimension_d_object()(p); + } + FT cartesian(Point_d const&p, int i){ + return this->kernel().compute_coordinate_d_object()(p,i); + } + RT homogeneous(Point_d const&p, int i){ + if (i == dimension(p)) + return 1; + return cartesian(p, i); + } + }; + struct Construct_cartesian_const_iterator_d : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_cartesian_const_iterator_d) + typedef typename Get_functor >::type CPI; + typedef typename Get_functor >::type CVI; + // FIXME: The following sometimes breaks compilation. The typedef below forces instantiation of this, which forces Point_d, which itself (in the wrapper) needs the derived kernel to tell it what the base kernel is, and that's a cycle. The exact circumstances are not clear, g++ and clang++ are ok in both C++03 and C++11, it is only clang in C++11 without CGAL_CXX11 that breaks. For now, rely on result_type. + //typedef typename CGAL::decay::type>::type result_type; + typedef typename CGAL::decay::type result_type; + // Kernel_d requires a common iterator type for points and vectors + // TODO: provide this mixed functor in preKernel? + //CGAL_static_assertion((boost::is_same::type>::type, result_type>::value)); + CGAL_static_assertion((boost::is_same::type, result_type>::value)); + template + result_type operator()(Point_d const&p, Tag_ t)const{ + return CPI(this->kernel())(p,t); + } + template + result_type operator()(typename First_if_different::Type const&v, Tag_ t)const{ + return CVI(this->kernel())(v,t); + } + + template + result_type operator()(Obj const&o)const{ + return operator()(o, Begin_tag()); + } + result_type operator()(Point_d const&p, int)const{ + return operator()(p, End_tag()); + } + result_type operator()(typename First_if_different::Type const&v, int)const{ + return operator()(v, End_tag()); + } + }; + struct Compute_squared_radius_d : private Store_kernel { + typedef Kernel R_; // for the macro + CGAL_FUNCTOR_INIT_STORE(Compute_squared_radius_d) + typedef FT result_type; + template FT operator()(CGAL_FORWARDABLE(S) s)const{ + return typename Get_functor::type(this->kernel())(CGAL_FORWARD(S,s)); + } + template FT operator()(I b, I e)const{ + return typename Get_functor::type(this->kernel())(b,e); + } + }; + typedef typename Construct_cartesian_const_iterator_d::result_type Cartesian_const_iterator_d; + typedef typename Get_functor::type Squared_distance_d; + typedef typename Get_functor::type Squared_length_d; + typedef typename Get_functor::type Scalar_product_d; + typedef typename Get_functor::type Affine_rank_d; + typedef typename Get_functor::type Affinely_independent_d; + typedef typename Get_functor::type Contained_in_linear_hull_d; + typedef typename Get_functor::type Contained_in_simplex_d; + typedef typename Get_functor::type Has_on_positive_side_d; + typedef typename Get_functor::type Linear_rank_d; + typedef typename Get_functor::type Linearly_independent_d; + typedef typename Get_functor::type Oriented_side_d; + typedef typename Get_functor::type Side_of_bounded_sphere_d; + + typedef typename Get_functor::type Center_of_sphere_d; + typedef Center_of_sphere_d Construct_center_d; // RangeSearchTraits + typedef typename Get_functor::type Construct_circumcenter_d; + typedef typename Get_functor::type Value_at_d; + typedef typename Get_functor::type Point_of_sphere_d; + typedef typename Get_functor::type Orthogonal_vector_d; + typedef typename Get_functor::type Linear_base_d; + typedef typename Get_functor::type Construct_min_vertex_d; + typedef typename Get_functor::type Construct_max_vertex_d; + + typedef typename Get_functor::type Compute_weight_d; + typedef typename Get_functor::type Point_drop_weight_d; + + //TODO: + //typedef ??? Intersect_d; + + + Compute_coordinate_d compute_coordinate_d_object()const{ return Compute_coordinate_d(*this); } + Has_on_positive_side_d has_on_positive_side_d_object()const{ return Has_on_positive_side_d(*this); } + Compare_lexicographically_d compare_lexicographically_d_object()const{ return Compare_lexicographically_d(*this); } + Equal_d equal_d_object()const{ return Equal_d(*this); } + Less_lexicographically_d less_lexicographically_d_object()const{ return Less_lexicographically_d(*this); } + Less_or_equal_lexicographically_d less_or_equal_lexicographically_d_object()const{ return Less_or_equal_lexicographically_d(*this); } + Less_coordinate_d less_coordinate_d_object()const{ return Less_coordinate_d(*this); } + Orientation_d orientation_d_object()const{ return Orientation_d(*this); } + Oriented_side_d oriented_side_d_object()const{ return Oriented_side_d(*this); } + Point_dimension_d point_dimension_d_object()const{ return Point_dimension_d(*this); } + Point_of_sphere_d point_of_sphere_d_object()const{ return Point_of_sphere_d(*this); } + Side_of_oriented_sphere_d side_of_oriented_sphere_d_object()const{ return Side_of_oriented_sphere_d(*this); } + Power_side_of_power_sphere_d power_side_of_power_sphere_d_object()const{ return Power_side_of_power_sphere_d(*this); } + Power_center_d power_center_d_object()const{ return Power_center_d(*this); } + Power_distance_d power_distance_d_object()const{ return Power_distance_d(*this); } + Side_of_bounded_sphere_d side_of_bounded_sphere_d_object()const{ return Side_of_bounded_sphere_d(*this); } + Contained_in_affine_hull_d contained_in_affine_hull_d_object()const{ return Contained_in_affine_hull_d(*this); } + Contained_in_linear_hull_d contained_in_linear_hull_d_object()const{ return Contained_in_linear_hull_d(*this); } + Contained_in_simplex_d contained_in_simplex_d_object()const{ return Contained_in_simplex_d(*this); } + Construct_flat_orientation_d construct_flat_orientation_d_object()const{ return Construct_flat_orientation_d(*this); } + In_flat_orientation_d in_flat_orientation_d_object()const{ return In_flat_orientation_d(*this); } + In_flat_side_of_oriented_sphere_d in_flat_side_of_oriented_sphere_d_object()const{ return In_flat_side_of_oriented_sphere_d(*this); } + In_flat_power_side_of_power_sphere_d in_flat_power_side_of_power_sphere_d_object()const{ return In_flat_power_side_of_power_sphere_d(*this); } + Point_to_vector_d point_to_vector_d_object()const{ return Point_to_vector_d(*this); } + Vector_to_point_d vector_to_point_d_object()const{ return Vector_to_point_d(*this); } + Translated_point_d translated_point_d_object()const{ return Translated_point_d(*this); } + Scaled_vector_d scaled_vector_d_object()const{ return Scaled_vector_d(*this); } + Difference_of_vectors_d difference_of_vectors_d_object()const{ return Difference_of_vectors_d(*this); } + Difference_of_points_d difference_of_points_d_object()const{ return Difference_of_points_d(*this); } + Affine_rank_d affine_rank_d_object()const{ return Affine_rank_d(*this); } + Affinely_independent_d affinely_independent_d_object()const{ return Affinely_independent_d(*this); } + Linear_base_d linear_base_d_object()const{ return Linear_base_d(*this); } + Linear_rank_d linear_rank_d_object()const{ return Linear_rank_d(*this); } + Linearly_independent_d linearly_independent_d_object()const{ return Linearly_independent_d(*this); } + Midpoint_d midpoint_d_object()const{ return Midpoint_d(*this); } + Value_at_d value_at_d_object()const{ return Value_at_d(*this); } + /// Intersect_d intersect_d_object()const{ return Intersect_d(*this); } + Component_accessor_d component_accessor_d_object()const{ return Component_accessor_d(*this); } + Orthogonal_vector_d orthogonal_vector_d_object()const{ return Orthogonal_vector_d(*this); } + Construct_cartesian_const_iterator_d construct_cartesian_const_iterator_d_object()const{ return Construct_cartesian_const_iterator_d(*this); } + Construct_point_d construct_point_d_object()const{ return Construct_point_d(*this); } + Construct_vector_d construct_vector_d_object()const{ return Construct_vector_d(*this); } + Construct_segment_d construct_segment_d_object()const{ return Construct_segment_d(*this); } + Construct_sphere_d construct_sphere_d_object()const{ return Construct_sphere_d(*this); } + Construct_hyperplane_d construct_hyperplane_d_object()const{ return Construct_hyperplane_d(*this); } + Compute_squared_radius_d compute_squared_radius_d_object()const{ return Compute_squared_radius_d(*this); } + Squared_distance_d squared_distance_d_object()const{ return Squared_distance_d(*this); } + Squared_length_d squared_length_d_object()const{ return Squared_length_d(*this); } + Scalar_product_d scalar_product_d_object()const{ return Scalar_product_d(*this); } + Center_of_sphere_d center_of_sphere_d_object()const{ return Center_of_sphere_d(*this); } + Construct_circumcenter_d construct_circumcenter_d_object()const{ return Construct_circumcenter_d(*this); } + Construct_direction_d construct_direction_d_object()const{ return Construct_direction_d(*this); } + Construct_line_d construct_line_d_object()const{ return Construct_line_d(*this); } + Construct_ray_d construct_ray_d_object()const{ return Construct_ray_d(*this); } + Construct_iso_box_d construct_iso_box_d_object()const{ return Construct_iso_box_d(*this); } + Construct_aff_transformation_d construct_aff_transformation_d_object()const{ return Construct_aff_transformation_d(*this); } + Construct_min_vertex_d construct_min_vertex_d_object()const{ return Construct_min_vertex_d(*this); } + Construct_max_vertex_d construct_max_vertex_d_object()const{ return Construct_max_vertex_d(*this); } + Construct_weighted_point_d construct_weighted_point_d_object()const{ return Construct_weighted_point_d(*this); } + + Compute_weight_d compute_weight_d_object()const{ return Compute_weight_d(*this); } + Point_drop_weight_d point_drop_weight_d_object()const{ return Point_drop_weight_d(*this); } + + // Dummies for those required functors missing a concept. + typedef Null_functor Position_on_line_d; + Position_on_line_d position_on_line_d_object()const{return Null_functor();} + typedef Null_functor Barycentric_coordinates_d; + Barycentric_coordinates_d barycentric_coordinates_d_object()const{return Null_functor();} + + /* Not provided because they don't make sense here: + Lift_to_paraboloid_d + Project_along_d_axis_d + */ +}; +} + +#endif // CGAL_KD_KERNEL_D_INTERFACE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h new file mode 100644 index 00000000..99918ed2 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Kernel_object_converter.h @@ -0,0 +1,134 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_KO_CONVERTER_H +#define CGAL_KD_KO_CONVERTER_H +#include +#include +#include // First_if_different +#include +namespace CGAL { +template struct KO_converter; +//TODO: It would probably be better if this was a Misc Functor in K1. +// This way K1 could chose how it wants to present its points (sparse +// iterator?) and derived classes would inherit it. + +namespace internal { +template +struct Point_converter_help { + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const { + typename Get_functor >::type i(k1); + typename Get_functor >::type cp(k2); + return cp(conv(i(p,Begin_tag())),conv(i(p,End_tag()))); + } +}; +#ifdef CGAL_CXX11 +// This doesn't seem so useful, the compiler should be able to handle +// the iterators just as efficiently. +template +struct Point_converter_help,K1,K2> { + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + template + result_type help(Indices, K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const { + typename Get_functor::type cc(k1); + typename Get_functor >::type cp(k2); + return cp(conv(cc(p,I))...); + } + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& p) const { + return help(typename N_increasing_indices::type(),k1,k2,conv,p); + } +}; +#endif +} +template struct KO_converter +: internal::Point_converter_help +{}; + +template struct KO_converter{ + typedef typename Get_type::type K1_Vector; + + // Disabling is now done in KernelD_converter + // // can't use vector without at least a placeholder point because of this + // typedef typename K1:: Point K1_Point; + // typedef typename First_if_different::Type argument_type; + + typedef K1_Vector argument_type; + typedef typename Get_type::type result_type; + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& v) const { + typename Get_functor >::type i(k1); + typename Get_functor >::type cp(k2); + return cp(conv(i(v,Begin_tag())),conv(i(v,End_tag()))); + } +}; + +template struct KO_converter{ + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const { + typename Get_functor::type f(k1); + typename Get_functor >::type cs(k2); + return cs(conv(f(s,0)),conv(f(s,1))); + } +}; + +template struct KO_converter{ + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& h) const { + typename Get_functor::type ov(k1); + typename Get_functor::type ht(k1); + typename Get_functor >::type ch(k2); + return ch(conv(ov(h)),conv(ht(h))); + } +}; + +template struct KO_converter{ + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const { + typename Get_functor::type cos(k1); + typename Get_functor::type sr(k1); + typename Get_functor >::type cs(k2); + return cs(conv(cos(s)),conv(sr(s))); + } +}; + +template struct KO_converter{ + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + template + result_type operator()(K1 const& k1, K2 const& k2, C const& conv, argument_type const& s) const { + typename Get_functor::type pdw(k1); + typename Get_functor::type pw(k1); + typename Get_functor >::type cwp(k2); + return cwp(conv(pdw(s)),conv(pw(s))); + } +}; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h new file mode 100644 index 00000000..ddbdc37b --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/LA.h @@ -0,0 +1,175 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_LA_EIGEN_H +#define CGAL_LA_EIGEN_H +#include +#ifndef CGAL_EIGEN3_ENABLED +#error Requires Eigen +#endif +#include +#include +#include +#include +#include +#include + +namespace CGAL { + +//FIXME: where could we use Matrix_base instead of Matrix? +// Dim_ real dimension +// Max_dim_ upper bound on the dimension +template struct LA_eigen { + typedef NT_ NT; + typedef Dim_ Dimension; + typedef Max_dim_ Max_dimension; + enum { dimension = Eigen_dimension::value }; + enum { max_dimension = Eigen_dimension::value }; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef LA_eigen< NT, D2, D3 > Other; + }; + template struct Property : boost::false_type {}; + template struct Property : boost::true_type {}; + template struct Property : boost::true_type {}; + template struct Property : boost::true_type {}; + + typedef Eigen::Matrix::value,1,Eigen::ColMajor|Eigen::AutoAlign,Eigen_dimension::value,1> Vector; + typedef Eigen::Matrix Dynamic_vector; + typedef Construct_eigen Construct_vector; + +#if (EIGEN_WORLD_VERSION>=3) + typedef NT const* Vector_const_iterator; +#else + typedef Iterator_from_indices Vector_const_iterator; +#endif + + templatestatic Vector_const_iterator vector_begin(Vec_ const&a){ +#if (EIGEN_WORLD_VERSION>=3) + return &a[0]; +#else + return Vector_const_iterator(a,0); +#endif + } + + templatestatic Vector_const_iterator vector_end(Vec_ const&a){ +#if (EIGEN_WORLD_VERSION>=3) + // FIXME: Isn't that dangerous if a is an expression and not a concrete vector? + return &a[0]+a.size(); +#else + return Vector_const_iterator(a,a.size()); +#endif + } + + typedef Eigen::Matrix Square_matrix; + typedef Eigen::Matrix Dynamic_matrix; + //TODO: don't pass on the values of Max_* for an expensive NT + // typedef ... Constructor + // typedef ... Accessor +#if 0 + private: + template class Canonicalize_vector { + typedef typename Dimension_eigen::type S1; + typedef typename Dimension_eigen::type S2; + public: + typedef typename Vector::type type; + }; + public: +#endif + + templatestatic int size_of_vector(Vec_ const&v){ + return (int)v.size(); + } + + templatestatic NT dot_product(Vec_ const&a,Vec_ const&b){ + return a.dot(b); + } + + template static int rows(Vec_ const&v) { + return (int)v.rows(); + } + template static int columns(Vec_ const&v) { + return (int)v.cols(); + } + + template static NT determinant(Mat_ const&m,bool=false){ + return m.determinant(); + } + + template static typename + Same_uncertainty_nt::type + sign_of_determinant(Mat_ const&m,bool=false) + { + return CGAL::sign(m.determinant()); + } + + template static int rank(Mat_ const&m){ + // return m.rank(); + // This one uses sqrt so cannot be used with Gmpq + // TODO: use different algo for different NT? + // Eigen::ColPivHouseholderQR decomp(m); + Eigen::FullPivLU decomp(m); + // decomp.setThreshold(0); + return static_cast(decomp.rank()); + } + + // m*a==b + template + static void solve(DV&a, DM const&m, V const& b){ + //a = m.colPivHouseholderQr().solve(b); + a = m.fullPivLu().solve(b); + } + template + static bool solve_and_check(DV&a, DM const&m, V const& b){ + //a = m.colPivHouseholderQr().solve(b); + a = m.fullPivLu().solve(b); + return b.isApprox(m*a); + } + + static Dynamic_matrix basis(Dynamic_matrix const&m){ + return m.fullPivLu().image(m); + } + + template static Vector homogeneous_add(Vec1 const&a,Vec2 const&b){ + //TODO: use compile-time size when available + int d=a.size(); + Vector v(d); + v << b[d-1]*a.topRows(d-1)+a[d-1]*b.topRows(d-1), a[d-1]*b[d-1]; + return v; + } + + template static Vector homogeneous_sub(Vec1 const&a,Vec2 const&b){ + int d=a.size(); + Vector v(d); + v << b[d-1]*a.topRows(d-1)-a[d-1]*b.topRows(d-1), a[d-1]*b[d-1]; + return v; + } + + template static std::pair homogeneous_dot_product(Vec1 const&a,Vec2 const&b){ + int d=a.size(); + return make_pair(a.topRows(d-1).dot(b.topRows(d-1)), a[d-1]*b[d-1]); + } + +}; +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h new file mode 100644 index 00000000..3636996f --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/LA_eigen/constructors.h @@ -0,0 +1,162 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_LA_EIGEN_CONSTRUCTORS_H +#define CGAL_LA_EIGEN_CONSTRUCTORS_H +#include + +#if defined(BOOST_MSVC) +# pragma warning(push) +# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I' + // http://lists.boost.org/boost-users/2014/11/83291.php +#endif + +#ifndef CGAL_EIGEN3_ENABLED +#error Requires Eigen +#endif +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace CGAL { + template struct Construct_eigen { + typedef Vector_ result_type; + typedef typename Vector_::Scalar NT; + + private: + static void check_dim(int CGAL_assertion_code(d)){ + CGAL_assertion_code(int m = result_type::MaxSizeAtCompileTime;) + CGAL_assertion((m == Eigen::Dynamic) || (d <= m)); + } + public: + + struct Dimension { + // Initialize with NaN if possible? + result_type operator()(int d) const { + check_dim(d); + return result_type(d); + } + }; + + struct Iterator { + template + result_type operator()(int d,Iter const& f,Iter const& e) const { + check_dim(d); + CGAL_assertion(d==std::distance(f,e)); + result_type a(d); + // TODO: check the right way to do this + std::copy(f,e,&a[0]); + return a; + } + }; + +#if 0 + struct Iterator_add_one { + template + result_type operator()(int d,Iter const& f,Iter const& e) const { + check_dim(d); + CGAL_assertion(d==std::distance(f,e)+1); + result_type a(d); + std::copy(f,e,&a[0]); + a[d-1]=1; + return a; + } + }; +#endif + + struct Iterator_and_last { + template + result_type operator()(int d,Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const { + check_dim(d); + CGAL_assertion(d==std::distance(f,e)+1); + result_type a(d); + std::copy(f,e,&a[0]); + a[d-1]=CGAL_FORWARD(T,t); + return a; + } + }; + +#ifdef CGAL_CXX11 + struct Initializer_list { + // Fix T==NT? + template + result_type operator()(std::initializer_list l) const { + return Iterator()(l.size(),l.begin(),l.end()); + } + }; +#endif + + struct Values { +#ifdef CGAL_CXX11 + // TODO avoid going through Initializer_list which may cause extra copies. Possibly use forward_as_tuple. + template + result_type operator()(U&&...u) const { + check_dim(sizeof...(U)); // TODO: use static_assert + return Initializer_list()({forward_safe(u)...}); + } +#else + +#define CGAL_CODE(Z,N,_) result_type operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ + check_dim(N); \ + result_type a(N); \ + a << BOOST_PP_ENUM_PARAMS(N,t); \ + return a; \ +} +BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) +#undef CGAL_CODE + +#endif + }; + + struct Values_divide { +#ifdef CGAL_CXX11 + template + result_type operator()(H const&h,U&&...u) const { + check_dim(sizeof...(U)); // TODO: use static_assert + return Initializer_list()({Rational_traits().make_rational(std::forward(u),h)...}); + } +#else + +#define CGAL_VAR(Z,N,_) ( Rational_traits().make_rational( t##N ,h) ) +#define CGAL_CODE(Z,N,_) template result_type \ + operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ + check_dim(N); \ + result_type a(N); \ + a << BOOST_PP_ENUM(N,CGAL_VAR,); \ + return a; \ + } + BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR + +#endif + }; + }; +} +#if defined(BOOST_MSVC) +# pragma warning(pop) +#endif + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h new file mode 100644 index 00000000..9ecc2b63 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Lazy_cartesian.h @@ -0,0 +1,188 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_LAZY_CARTESIAN_H +#define CGAL_KERNEL_D_LAZY_CARTESIAN_H + +#include +#include +#include +#include +#include +#include +#include + +namespace CGAL { + +template +struct Nth_iterator_element : private Store_kernel { + Nth_iterator_element(){} + Nth_iterator_element(K const&k):Store_kernel(k){} + typedef typename Get_type::value_tag>::type result_type; + template result_type operator()(CGAL_FORWARDABLE(U) u, int i) const { + typename Get_functor >::type ci(this->kernel()); + return *cpp0x::next(ci(CGAL_FORWARD(U,u),Begin_tag()),i); + } +}; + //typedef typename Functor::nth_element>::type nth_elem; +template::has_nth_element> +struct Select_nth_element_functor { + typedef Nth_iterator_element type; +}; +template +struct Select_nth_element_functor : + Get_functor::nth_element> {}; + +namespace internal { + template + struct Lazy_construction_maybe_nt { + typedef Lazy_construction type; + }; + template + struct Lazy_construction_maybe_nt { + typedef Lazy_construction_nt type; + }; +} + +template +struct Lazy_cartesian_types +{ + typedef typename typeset_intersection< + typename AK_::Object_list, + typename EK_::Object_list + >::type Object_list; + + typedef typename typeset_intersection< + typename AK_::Iterator_list, + typename EK_::Iterator_list + >::type Iterator_list; + + template ::type> struct Type {}; + template struct Type { + typedef Lazy< + typename Get_type::type, + typename Get_type::type, + typename Get_type::type, + E2A_> type; + }; + template struct Type { + typedef CGAL::Lazy_exact_nt::type> type; + }; + + template struct Iterator { + typedef typename iterator_tag_traits::value_tag Vt; + typedef typename Type::type V; + typedef typename Select_nth_element_functor::type AF; + typedef typename Select_nth_element_functor::type EF; + + typedef typename internal::Lazy_construction_maybe_nt< + Kernel_, AF, EF, is_NT_tag::value + >::type nth_elem; + + typedef Iterator_from_indices< + const typename Type::container>::type, + const V, V, nth_elem + > type; + }; +}; + +template +struct Lazy_cartesian : Dimension_base, + Lazy_cartesian_types > +{ + //CGAL_CONSTEXPR Lazy_cartesian(){} + //CGAL_CONSTEXPR Lazy_cartesian(int d):Base_(d){} + + //TODO: Do we want to store an AK and an EK? Or just references? + //FIXME: references would be better I guess. + //TODO: In any case, make sure that we don't end up storing this kernel for + //nothing (it is not empty but references empty kernels or something) + AK_ ak; EK_ ek; + AK_ const& approximate_kernel()const{return ak;} + EK_ const& exact_kernel()const{return ek;} + + typedef Lazy_cartesian Self; + typedef Lazy_cartesian_types Base; + //typedef typename Default::Get::type Kernel; + typedef Self Kernel; + typedef AK_ Approximate_kernel; + typedef EK_ Exact_kernel; + typedef E2A_ E2A; + typedef Approx_converter C2A; + typedef Exact_converter C2E; + + typedef typename Exact_kernel::Rep_tag Rep_tag; + typedef typename Exact_kernel::Kernel_tag Kernel_tag; + typedef typename Exact_kernel::Default_ambient_dimension Default_ambient_dimension; + typedef typename Exact_kernel::Max_ambient_dimension Max_ambient_dimension; + //typedef typename Exact_kernel::Flat_orientation Flat_orientation; + // Check that Approximate_kernel agrees with all that... + + template::type> struct Functor { + typedef Null_functor type; + }; + //FIXME: what do we do with D here? + template struct Functor { + typedef typename Get_functor::type FA; + typedef typename Get_functor::type FE; + typedef Filtered_predicate2 type; + }; + template struct Functor { + typedef typename Get_functor::type FA; + typedef typename Get_functor::type FE; + typedef Lazy_construction_nt type; + }; + template struct Functor { + typedef typename Get_functor::type FA; + typedef typename Get_functor::type FE; + typedef Lazy_construction type; + }; + + //typedef typename Iterator::type Point_cartesian_const_iterator; + //typedef typename Iterator::type Vector_cartesian_const_iterator; + + template + struct Construct_iter : private Store_kernel { + Construct_iter(){} + Construct_iter(Kernel const&k):Store_kernel(k){} + //FIXME: pass the kernel to the functor in the iterator + typedef U result_type; + template + result_type operator()(T const& t,Begin_tag)const{ + return result_type(t,0,this->kernel()); + } + template + result_type operator()(T const& t,End_tag)const{ + return result_type(t,Self().dimension(),this->kernel()); + } + }; + template struct Functor { + typedef Construct_iter::type>::type> type; + }; + + + //TODO: what about other functors of the Misc category? + // for Point_dimension, we should apply it to the approximate point + // for printing, we should??? just not do printing this way? +}; + + +} //namespace CGAL + +#endif // CGAL_KERNEL_D_LAZY_CARTESIAN_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h new file mode 100644 index 00000000..6d9f070f --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Aff_transformation.h @@ -0,0 +1,59 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_TYPE_AFF_TRANSFORMATION_H +#define CGAL_KD_TYPE_AFF_TRANSFORMATION_H +#include +#include +#include + +// Dummy, that's all the Kernel_d concept requires, so a useful class will wait. + +namespace CGAL { +template +struct Aff_transformation { + typedef R_ R; +}; +namespace CartesianDKernelFunctors { +template struct Construct_aff_transformation { + CGAL_FUNCTOR_INIT_IGNORE(Construct_aff_transformation) + typedef R_ R; + typedef typename Get_type::type result_type; +#ifdef CGAL_CXX11 + template + result_type operator()(T&&...)const{return result_type();} +#else + result_type operator()()const{ + return result_type(); + } +#define CGAL_CODE(Z,N,_) template \ + result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const& BOOST_PP_INTERCEPT))const{ \ + return result_type(); \ + } + BOOST_PP_REPEAT_FROM_TO(1, 9, CGAL_CODE, _ ) +#undef CGAL_CODE + +#endif +}; +} +CGAL_KD_DEFAULT_TYPE(Aff_transformation_tag,(CGAL::Aff_transformation),(),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_aff_transformation),(Aff_transformation_tag),()); + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h new file mode 100644 index 00000000..14e35b01 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Hyperplane.h @@ -0,0 +1,159 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_TYPE_HYPERPLANE_H +#define CGAL_KD_TYPE_HYPERPLANE_H +#include +#include +#include +#include +#include +namespace CGAL { +template class Hyperplane { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Vector_; + Vector_ v_; + FT_ s_; + + public: + Hyperplane(Vector_ const&v, FT_ const&s): v_(v), s_(s) {} + // TODO: Add a piecewise constructor? + + Vector_ const& orthogonal_vector()const{return v_;} + FT_ translation()const{return s_;} +}; +namespace CartesianDKernelFunctors { +template struct Construct_hyperplane : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_hyperplane) + typedef typename Get_type::type result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type Vector; + typedef typename Get_type::type FT; + private: + struct One { + typedef int result_type; + templateint const& operator()(T const&)const{ + static const int one = 1; + return one; + } + }; + public: + + result_type operator()(Vector const&a, FT const&b)const{ + return result_type(a,b); + } + // Not really needed + result_type operator()()const{ + typename Get_functor >::type cv(this->kernel()); + return result_type(cv(),0); + } + + template + result_type through(Iter f, Iter e)const{ + typedef typename R_::LA LA; + typedef typename R_::Default_ambient_dimension D1; + typedef typename R_::Max_ambient_dimension D2; + typedef typename Increment_dimension::type D1i; + typedef typename Increment_dimension::type D2i; + + typedef Eigen::Matrix::value, Eigen_dimension::value, + Eigen::ColMajor|Eigen::AutoAlign, Eigen_dimension::value, Eigen_dimension::value> Matrix; + typedef Eigen::Matrix::value, 1, + Eigen::ColMajor|Eigen::AutoAlign, Eigen_dimension::value, 1> Vec; + typename Get_functor::type c(this->kernel()); + typename Get_functor >::type cv(this->kernel()); + typename Get_functor::type pd(this->kernel()); + + Point const& p0=*f; + int d = pd(p0); + Matrix m(d,d+1); + for(int j=0;j lu(m); + Vec res = lu.kernel().col(0); + return this->operator()(cv(d,LA::vector_begin(res),LA::vector_end(res)-1),res(d)); + } + template + result_type operator()(Iter f, Iter e, Point const&p, CGAL::Oriented_side s=ON_ORIENTED_BOUNDARY)const{ + result_type ret = through(f, e); + // I don't really like using ON_ORIENTED_BOUNDARY to mean that we don't care, we might as well not pass 'p' at all. + if (s == ON_ORIENTED_BOUNDARY) + return ret; + typename Get_functor::type os(this->kernel()); + CGAL::Oriented_side o = os(ret, p); + if (o == ON_ORIENTED_BOUNDARY || o == s) + return ret; + typename Get_functor::type ov(this->kernel()); + typename Get_functor >::type cv(this->kernel()); + return this->operator()(ov(ret.orthogonal_vector()), -ret.translation()); + } +}; +template struct Orthogonal_vector { + CGAL_FUNCTOR_INIT_IGNORE(Orthogonal_vector) + typedef typename Get_type::type Hyperplane; + typedef typename Get_type::type const& result_type; + result_type operator()(Hyperplane const&s)const{ + return s.orthogonal_vector(); + } +}; +template struct Hyperplane_translation { + CGAL_FUNCTOR_INIT_IGNORE(Hyperplane_translation) + typedef typename Get_type::type Hyperplane; + typedef typename Get_type::type result_type; + // TODO: Is_exact? + result_type operator()(Hyperplane const&s)const{ + return s.translation(); + } +}; +template struct Value_at : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Value_at) + typedef typename Get_type::type Hyperplane; + typedef typename Get_type::type Vector; + typedef typename Get_type::type Point; + typedef typename Get_type::type FT; + typedef FT result_type; + typedef typename Get_functor::type Dot; + typedef typename Get_functor::type P2V; + result_type operator()(Hyperplane const&h, Point const&p)const{ + Dot dot(this->kernel()); + P2V p2v(this->kernel()); + return dot(h.orthogonal_vector(),p2v(p)); + // Use Orthogonal_vector to make it generic? + // Copy the code from Scalar_product to avoid p2v? + } +}; +} +//TODO: Add a condition that the hyperplane type is the one from this file. +CGAL_KD_DEFAULT_TYPE(Hyperplane_tag,(CGAL::Hyperplane),(Vector_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_hyperplane),(Vector_tag,Hyperplane_tag),(Opposite_vector_tag,Oriented_side_tag)); +CGAL_KD_DEFAULT_FUNCTOR(Orthogonal_vector_tag,(CartesianDKernelFunctors::Orthogonal_vector),(Vector_tag,Hyperplane_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Hyperplane_translation_tag,(CartesianDKernelFunctors::Hyperplane_translation),(Hyperplane_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Value_at_tag,(CartesianDKernelFunctors::Value_at),(Point_tag,Vector_tag,Hyperplane_tag),(Scalar_product_tag,Point_to_vector_tag)); +} // namespace CGAL +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h new file mode 100644 index 00000000..d053f351 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Iso_box.h @@ -0,0 +1,88 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNELD_TYPES_ISO_BOX_H +#define CGAL_KERNELD_TYPES_ISO_BOX_H +#include +#include +#include +#include +#include +namespace CGAL { +template class Iso_box { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Point_; + typedef std::pair Data_; + Data_ data; + public: + Iso_box(){} + Iso_box(Point_ const&a, Point_ const&b): data(a,b) {} + Point_ min BOOST_PREVENT_MACRO_SUBSTITUTION ()const{ + return data.first; + } + Point_ max BOOST_PREVENT_MACRO_SUBSTITUTION ()const{ + return data.second; + } +}; +namespace CartesianDKernelFunctors { + template struct Construct_iso_box : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_iso_box) + typedef typename Get_type::type result_type; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type Cp_; + typedef typename Get_functor >::type Ci_; + result_type operator()(Point const&a, Point const&b)const{ + Cp_ cp(this->kernel()); + Ci_ ci(this->kernel()); + return result_type(cp( + make_transforming_pair_iterator(ci(a,Begin_tag()), ci(b,Begin_tag()), Min()), + make_transforming_pair_iterator(ci(a,End_tag()), ci(b,End_tag()), Min())), + cp( + make_transforming_pair_iterator(ci(a,Begin_tag()), ci(b,Begin_tag()), Max()), + make_transforming_pair_iterator(ci(a,End_tag()), ci(b,End_tag()), Max()))); + } + }; + + template struct Construct_min_vertex { + CGAL_FUNCTOR_INIT_IGNORE(Construct_min_vertex) + typedef typename Get_type::type argument_type; + //TODO: make result_type a reference + typedef typename Get_type::type result_type; + result_type operator()(argument_type const&b)const{ + return b.min BOOST_PREVENT_MACRO_SUBSTITUTION (); + } + }; + template struct Construct_max_vertex { + CGAL_FUNCTOR_INIT_IGNORE(Construct_max_vertex) + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + result_type operator()(argument_type const&b)const{ + return b.max BOOST_PREVENT_MACRO_SUBSTITUTION (); + } + }; +} +//TODO (other types as well) only enable these functors if the Iso_box type is the one defined in this file... +CGAL_KD_DEFAULT_TYPE(Iso_box_tag,(CGAL::Iso_box),(Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_iso_box),(Iso_box_tag,Point_tag),(Construct_ttag,Construct_ttag)); +CGAL_KD_DEFAULT_FUNCTOR(Construct_min_vertex_tag,(CartesianDKernelFunctors::Construct_min_vertex),(Iso_box_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_max_vertex_tag,(CartesianDKernelFunctors::Construct_max_vertex),(Iso_box_tag),()); +} // namespace CGAL + +#endif // CGAL_KERNELD_TYPES_ISO_BOX_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h new file mode 100644 index 00000000..6a09571c --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Line.h @@ -0,0 +1,66 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNELD_TYPES_LINE_H +#define CGAL_KERNELD_TYPES_LINE_H +#include +#include +#include +namespace CGAL { +template class Line { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Point_; + typedef std::pair Data_; + Data_ data; + public: + Line(){} + Line(Point_ const&a, Point_ const&b): data(a,b) {} + Point_ point(int i)const{ + if(i==0) return data.first; + if(i==1) return data.second; + throw "not implemented"; + } + Line opposite()const{ + return Line(data.second,data.first); + } +}; +namespace CartesianDKernelFunctors { + template struct Construct_line : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_line) + typedef typename Get_type::type result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type Vector; + typedef typename Get_functor::type Tp_; + //typedef typename Get_functor::type Dp_; + //typedef typename Get_functor::type Sv_; + result_type operator()(Point const&a, Point const&b)const{ + return result_type(a,b); + } + result_type operator()(Point const&a, typename First_if_different::Type const&b)const{ + Tp_ tp(this->kernel()); + return result_type(a,tp(a,b)); + } + }; +} +CGAL_KD_DEFAULT_TYPE(Line_tag,(CGAL::Line),(Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_line),(Line_tag,Point_tag,Vector_tag),(Translated_point_tag)); + +} // namespace CGAL + +#endif // CGAL_KERNELD_TYPES_LINE_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h new file mode 100644 index 00000000..be845e76 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Ray.h @@ -0,0 +1,66 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNELD_TYPES_RAY_H +#define CGAL_KERNELD_TYPES_RAY_H +#include +#include +#include +namespace CGAL { +template class Ray { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Point_; + typedef typename Get_type::type Vector_; + typedef std::pair Data_; + Data_ data; + public: + Ray(){} + Ray(Point_ const&a, Vector_ const&b): data(a,b) {} + Point_ source()const{ + return data.first; + } + // FIXME: return a R_::Direction? + Vector_ direction()const{ + return data.second; + } +}; +namespace CartesianDKernelFunctors { + template struct Construct_ray : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_ray) + typedef typename Get_type::type result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type Vector; + typedef typename Get_functor::type Dp_; + //typedef typename Get_functor::type Tp_; + //typedef typename Get_functor::type Sv_; + result_type operator()(Point const&a, Vector const&b)const{ + return result_type(a,b); + } + result_type operator()(Point const&a, typename First_if_different::Type const&b)const{ + Dp_ dp(this->kernel()); + return result_type(a,dp(b,a)); + } + }; +} +CGAL_KD_DEFAULT_TYPE(Ray_tag,(CGAL::Ray),(Point_tag,Vector_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_ray),(Point_tag,Ray_tag,Vector_tag),(Difference_of_points_tag)); + +} // namespace CGAL + +#endif // CGAL_KERNELD_TYPES_RAY_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h new file mode 100644 index 00000000..38361c2b --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Segment.h @@ -0,0 +1,121 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNELD_SEGMENTD_H +#define CGAL_KERNELD_SEGMENTD_H +#include +#include +#include +namespace CGAL { +template class Segment { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Point_; + //typedef typename R_::Vector Vector_; + //typedef typename Get_functor >::type Cv_; +// typedef typename R_::Squared_distance Csd_; + typedef std::pair Data_; + Data_ data; + public: + //typedef Segmentd Segment; +#ifdef CGAL_CXX11 + //FIXME: don't forward directly, piecewise_constuct should call the point construction functor (I guess? or is it unnecessary?) + template::type...>,std::tuple>::value>::type> + Segment(U&&...u):data(std::forward(u)...){} +#else + Segment(){} + Segment(Point_ const&a, Point_ const&b): data(a,b) {} + //template + //Segment(A const&,T1 const&t1,T2 const&t2) +#endif + Point_ source()const{return data.first;} + Point_ target()const{return data.second;} + Point_ operator[](int i)const{ + if((i%2)==0) + return source(); + else + return target(); + } + Segment opposite()const{ + return Segment(target(),source()); + } + //Vector_ vector()const{ + // return Cv_()(data.first,data.second); + //} +// FT_ squared_length()const{ +// return Csd_()(data.first,data.second); +// } +}; + +namespace CartesianDKernelFunctors { + +template struct Construct_segment : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_segment) + typedef R_ R; + typedef typename Get_type::type Point; + typedef typename Get_type::type Segment; + typedef typename Get_functor >::type CP; + typedef Segment result_type; + result_type operator()(Point const&a, Point const&b)const{ + return result_type(a,b); + } + // Not really needed, especially since it forces us to store the kernel + result_type operator()()const{ + Point p = typename Get_functor >::type (this->kernel()) (); + return result_type (p, p); + } + // T should only be std::piecewise_construct_t, but we shouldn't fail if it doesn't exist. + template + result_type operator()(CGAL_FORWARDABLE(T),CGAL_FORWARDABLE(U) u,CGAL_FORWARDABLE(V) v)const{ + CP cp(this->kernel()); + result_type r = {{ + call_on_tuple_elements(cp, CGAL_FORWARD(U,u)), + call_on_tuple_elements(cp, CGAL_FORWARD(V,v)) }}; + return r; + } +}; + +// This should be part of Construct_point, according to Kernel_23 conventions +template struct Segment_extremity { + CGAL_FUNCTOR_INIT_IGNORE(Segment_extremity) + typedef R_ R; + typedef typename Get_type::type Point; + typedef typename Get_type::type Segment; + typedef Point result_type; + result_type operator()(Segment const&s, int i)const{ + if(i==0) return s.source(); + CGAL_assertion(i==1); + return s.target(); + } +#ifdef CGAL_CXX11 + result_type operator()(Segment &&s, int i)const{ + if(i==0) return std::move(s.source()); + CGAL_assertion(i==1); + return std::move(s.target()); + } +#endif +}; +} // CartesianDKernelFunctors + +CGAL_KD_DEFAULT_TYPE(Segment_tag,(CGAL::Segment),(Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_segment),(Segment_tag,Point_tag),(Construct_ttag)); +CGAL_KD_DEFAULT_FUNCTOR(Segment_extremity_tag,(CartesianDKernelFunctors::Segment_extremity),(Segment_tag,Point_tag),()); + +} // namespace CGAL + +#endif // CGAL_KERNELD_SEGMENTD_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h new file mode 100644 index 00000000..114410b4 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Sphere.h @@ -0,0 +1,132 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_TYPE_SPHERE_H +#define CGAL_KD_TYPE_SPHERE_H +#include +#include +namespace CGAL { +template class Sphere { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Point_; + Point_ c_; + FT_ r2_; + + public: + Sphere(Point_ const&p, FT_ const&r2): c_(p), r2_(r2) {} + // TODO: Add a piecewise constructor? + + Point_ const& center()const{return c_;} + FT_ const& squared_radius()const{return r2_;} +}; + +namespace CartesianDKernelFunctors { +template struct Construct_sphere : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_sphere) + typedef typename Get_type::type result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type FT; + result_type operator()(Point const&a, FT const&b)const{ + return result_type(a,b); + } + // Not really needed + result_type operator()()const{ + typename Get_functor >::type cp(this->kernel()); + return result_type(cp(),0); + } + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type cc(this->kernel()); + typename Get_functor::type sd(this->kernel()); + + // It should be possible to avoid copying the center by moving this code to a constructor. + Point center = cc(f, e); + FT const& r2 = sd(center, *f); + return this->operator()(CGAL_MOVE(center), r2); + } +}; + +template struct Center_of_sphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Center_of_sphere) + typedef typename Get_type::type Sphere; + // No reference because of the second overload + typedef typename Get_type::type result_type; + + result_type const& operator()(Sphere const&s)const{ + return s.center(); + } + + template + result_type operator()(Iter b, Iter e)const{ + typename Get_functor >::type cs(this->kernel()); + return operator()(cs(b,e)); // computes the radius needlessly + } +}; + +template struct Squared_radius { + CGAL_FUNCTOR_INIT_IGNORE(Squared_radius) + typedef typename Get_type::type Sphere; + typedef typename Get_type::type const& result_type; + // TODO: Is_exact? + result_type operator()(Sphere const&s)const{ + return s.squared_radius(); + } +}; + +// FIXME: Move it to the generic functors, using the two above and conditional to the existence of sqrt(FT) +template struct Point_of_sphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Point_of_sphere) + typedef R_ R; + typedef typename Get_type::type FT; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_type::type Sphere; + typedef typename Get_functor >::type CP; + typedef typename Get_functor >::type CI; + typedef typename Get_functor::type PD; + typedef Point result_type; + typedef Sphere first_argument_type; + typedef int second_argument_type; + struct Trans : std::binary_function { + FT const& r_; int idx; bool sgn; + Trans (int n, FT const& r, bool b) : r_(r), idx(n), sgn(b) {} + FT operator()(FT const&x, int i)const{ + return (i == idx) ? sgn ? x + r_ : x - r_ : x; + } + }; + result_type operator()(Sphere const&s, int i)const{ + CI ci(this->kernel()); + PD pd(this->kernel()); + typedef boost::counting_iterator Count; + Point const&c = s.center(); + int d=pd(c); + bool last = (i == d); + FT r = sqrt(s.squared_radius()); + Trans t(last ? 0 : i, r, !last); + return CP(this->kernel())(make_transforming_pair_iterator(ci(c,Begin_tag()),Count(0),t),make_transforming_pair_iterator(ci(c,End_tag()),Count(d),t)); + } +}; +} +CGAL_KD_DEFAULT_TYPE(Sphere_tag,(CGAL::Sphere),(Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_sphere),(Sphere_tag,Point_tag),(Construct_ttag,Compute_point_cartesian_coordinate_tag,Squared_distance_tag,Squared_distance_to_origin_tag,Point_dimension_tag)); +CGAL_KD_DEFAULT_FUNCTOR(Center_of_sphere_tag,(CartesianDKernelFunctors::Center_of_sphere),(Sphere_tag,Point_tag),(Construct_ttag)); +CGAL_KD_DEFAULT_FUNCTOR(Squared_radius_tag,(CartesianDKernelFunctors::Squared_radius),(Sphere_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Point_of_sphere_tag,(CartesianDKernelFunctors::Point_of_sphere),(Sphere_tag,Point_tag),(Construct_ttag, Construct_ttag)); +} // namespace CGAL +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h new file mode 100644 index 00000000..1caf8701 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Types/Weighted_point.h @@ -0,0 +1,205 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_TYPE_WP_H +#define CGAL_KD_TYPE_WP_H +#include +#include +namespace CGAL { +namespace KerD { +template class Weighted_point { + typedef typename Get_type::type FT_; + typedef typename Get_type::type Point_; + Point_ c_; + FT_ w_; + + public: + Weighted_point(Point_ const&p, FT_ const&w): c_(p), w_(w) {} + // TODO: Add a piecewise constructor? + + Point_ const& point()const{return c_;} + FT_ const& weight()const{return w_;} +}; +} + +namespace CartesianDKernelFunctors { +template struct Construct_weighted_point : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_weighted_point) + typedef typename Get_type::type result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type FT; + result_type operator()(Point const&a, FT const&b)const{ + return result_type(a,b); + } + // Not really needed + result_type operator()()const{ + typename Get_functor >::type cp(this->kernel()); + return result_type(cp(),0); + } +}; + +template struct Point_drop_weight { + CGAL_FUNCTOR_INIT_IGNORE(Point_drop_weight) + typedef typename Get_type::type argument_type; + typedef typename Get_type::type const& result_type; + // Returning a reference is fragile + + result_type operator()(argument_type const&s)const{ + return s.point(); + } +}; + +template struct Point_weight { + CGAL_FUNCTOR_INIT_IGNORE(Point_weight) + typedef typename Get_type::type argument_type; + typedef typename Get_type::type result_type; + + result_type operator()(argument_type const&s)const{ + return s.weight(); + } +}; + +template struct Power_distance : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Power_distance) + typedef typename Get_type::type first_argument_type; + typedef first_argument_type second_argument_type; + typedef typename Get_type::type result_type; + + result_type operator()(first_argument_type const&a, second_argument_type const&b)const{ + typename Get_functor::type pdw(this->kernel()); + typename Get_functor::type pw(this->kernel()); + typename Get_functor::type sd(this->kernel()); + return sd(pdw(a),pdw(b))-pw(a)-pw(b); + } +}; +template struct Power_distance_to_point : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Power_distance_to_point) + typedef typename Get_type::type first_argument_type; + typedef typename Get_type::type second_argument_type; + typedef typename Get_type::type result_type; + + result_type operator()(first_argument_type const&a, second_argument_type const&b)const{ + typename Get_functor::type pdw(this->kernel()); + typename Get_functor::type pw(this->kernel()); + typename Get_functor::type sd(this->kernel()); + return sd(pdw(a),b)-pw(a); + } +}; + +template struct Power_side_of_power_sphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Power_side_of_power_sphere) + typedef R_ R; + typedef typename Get_type::type result_type; + + template + result_type operator()(Iter const& f, Iter const& e, Pt const& p0) const { + typename Get_functor::type ptr(this->kernel()); + typename Get_functor::type pdw(this->kernel()); + typename Get_functor::type pw(this->kernel()); + return ptr ( + make_transforming_iterator (f, pdw), + make_transforming_iterator (e, pdw), + make_transforming_iterator (f, pw), + pdw (p0), + pw (p0)); + } +}; + +template struct In_flat_power_side_of_power_sphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(In_flat_power_side_of_power_sphere) + typedef R_ R; + typedef typename Get_type::type result_type; + + template + result_type operator()(Fo const& fo, Iter const& f, Iter const& e, Pt const& p0) const { + typename Get_functor::type ptr(this->kernel()); + typename Get_functor::type pdw(this->kernel()); + typename Get_functor::type pw(this->kernel()); + return ptr ( + fo, + make_transforming_iterator (f, pdw), + make_transforming_iterator (e, pdw), + make_transforming_iterator (f, pw), + pdw (p0), + pw (p0)); + } +}; + +// Construct a point at (weighted) distance 0 from all the input +template struct Power_center : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Power_center) + typedef typename Get_type::type WPoint; + typedef WPoint result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type FT; + template + result_type operator()(Iter f, Iter e)const{ + // 2*(x-y).c == (x^2-wx^2)-(y^2-wy^2) + typedef typename R_::LA LA; + typedef typename LA::Square_matrix Matrix; + typedef typename LA::Vector Vec; + typedef typename LA::Construct_vector CVec; + typename Get_functor::type c(this->kernel()); + typename Get_functor >::type cp(this->kernel()); + typename Get_functor::type pd(this->kernel()); + typename Get_functor::type sdo(this->kernel()); + typename Get_functor::type pdp(this->kernel()); + typename Get_functor::type pdw(this->kernel()); + typename Get_functor::type pw(this->kernel()); + typename Get_functor >::type cwp(this->kernel()); + + WPoint const& wp0 = *f; + Point const& p0 = pdw(wp0); + int d = pd(p0); + FT const& n0 = sdo(p0) - pw(wp0); + Matrix m(d,d); + Vec b = typename CVec::Dimension()(d); + // Write the point coordinates in lines. + int i; + for(i=0; ++f!=e; ++i) { + WPoint const& wp=*f; + Point const& p=pdw(wp); + FT const& np = sdo(p) - pw(wp); + for(int j=0;j),(Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Construct_ttag,(CartesianDKernelFunctors::Construct_weighted_point),(Weighted_point_tag,Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Point_drop_weight_tag,(CartesianDKernelFunctors::Point_drop_weight),(Weighted_point_tag,Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Point_weight_tag,(CartesianDKernelFunctors::Point_weight),(Weighted_point_tag,Point_tag),()); +CGAL_KD_DEFAULT_FUNCTOR(Power_side_of_power_sphere_tag,(CartesianDKernelFunctors::Power_side_of_power_sphere),(Weighted_point_tag),(Power_side_of_power_sphere_raw_tag,Point_drop_weight_tag,Point_weight_tag)); +CGAL_KD_DEFAULT_FUNCTOR(In_flat_power_side_of_power_sphere_tag,(CartesianDKernelFunctors::In_flat_power_side_of_power_sphere),(Weighted_point_tag),(In_flat_power_side_of_power_sphere_raw_tag,Point_drop_weight_tag,Point_weight_tag)); +CGAL_KD_DEFAULT_FUNCTOR(Power_distance_tag,(CartesianDKernelFunctors::Power_distance),(Weighted_point_tag,Point_tag),(Squared_distance_tag,Point_drop_weight_tag,Point_weight_tag)); +CGAL_KD_DEFAULT_FUNCTOR(Power_distance_to_point_tag,(CartesianDKernelFunctors::Power_distance_to_point),(Weighted_point_tag,Point_tag),(Squared_distance_tag,Point_drop_weight_tag,Point_weight_tag)); +CGAL_KD_DEFAULT_FUNCTOR(Power_center_tag,(CartesianDKernelFunctors::Power_center),(Weighted_point_tag,Point_tag),(Compute_point_cartesian_coordinate_tag,Construct_ttag,Construct_ttag,Point_dimension_tag,Squared_distance_to_origin_tag,Point_drop_weight_tag,Point_weight_tag,Power_distance_to_point_tag)); +} // namespace CGAL +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h new file mode 100644 index 00000000..0ad9bb36 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/array.h @@ -0,0 +1,165 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_ARRAY_H +#define CGAL_VECTOR_ARRAY_H +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + + + +namespace CGAL { + +// May not be safe to use with dim!=max_dim. +// In that case, we should store the real dim next to the array. +template struct Array_vector { + typedef NT_ NT; + typedef Dim_ Dimension; + typedef Max_dim_ Max_dimension; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef Array_vector< NT, D2, D3 > Other; + }; + template struct Property : boost::false_type {}; + + static const unsigned d_=Max_dim_::value; + CGAL_static_assertion(d_ != (unsigned)UNKNOWN_DIMENSION); + + typedef cpp0x::array Vector; + struct Construct_vector { + struct Dimension { + // Initialize with NaN if possible? + Vector operator()(unsigned CGAL_assertion_code(d)) const { + CGAL_assertion(d<=d_); + return Vector(); + } + }; + + struct Iterator { + template + Vector operator()(unsigned CGAL_assertion_code(d),Iter const& f,Iter const& e) const { + CGAL_assertion(d==(unsigned) std::distance(f,e)); + CGAL_assertion(d<=d_); + //TODO: optimize for forward iterators + Vector a; + std::copy(f,e,a.begin()); + return a; + } + }; + +#if 0 + struct Iterator_add_one { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e) const { + CGAL_assertion(d==std::distance(f,e)+1); + CGAL_assertion(d<=d_); + //TODO: optimize + Vector a; + std::copy(f,e,a.begin()); + a.back()=1; + return a; + } + }; +#endif + + struct Iterator_and_last { + template + Vector operator()(unsigned CGAL_assertion_code(d),Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const { + CGAL_assertion(d==std::distance(f,e)+1); + CGAL_assertion(d<=d_); + //TODO: optimize for forward iterators + Vector a; + std::copy(f,e,a.begin()); + a.back()=CGAL_FORWARD(T,t); + return a; + } + }; + + struct Values { +#ifdef CGAL_CXX11 + template + Vector operator()(U&&...u) const { + static_assert(sizeof...(U)<=d_,"too many arguments"); + Vector a={{forward_safe(u)...}}; + return a; + } +#else + +#define CGAL_CODE(Z,N,_) Vector operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ + CGAL_assertion(N<=d_); \ + Vector a={{BOOST_PP_ENUM_PARAMS(N,t)}}; \ + return a; \ +} +BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) +#undef CGAL_CODE + +#endif + }; + + struct Values_divide { +#ifdef CGAL_CXX11 + template + Vector operator()(H const& h,U&&...u) const { + static_assert(sizeof...(U)<=d_,"too many arguments"); + Vector a={{Rational_traits().make_rational(std::forward(u),h)...}}; + return a; + } +#else + +#define CGAL_VAR(Z,N,_) Rational_traits().make_rational( t##N , h) +#define CGAL_CODE(Z,N,_) template Vector \ + operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ + CGAL_assertion(N<=d_); \ + Vector a={{BOOST_PP_ENUM(N,CGAL_VAR,_)}}; \ + return a; \ + } + BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR + +#endif + }; + }; + + typedef NT const* Vector_const_iterator; + static Vector_const_iterator vector_begin(Vector const&a){ + return &a[0]; + } + static Vector_const_iterator vector_end(Vector const&a){ + return &a[0]+d_; // Don't know the real size + } + static unsigned size_of_vector(Vector const&){ + return d_; // Don't know the real size + } + +}; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h new file mode 100644 index 00000000..954a3c1b --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/avx4.h @@ -0,0 +1,213 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_AVX4_H +#define CGAL_VECTOR_AVX4_H + +#if !defined __AVX__ || (__GNUC__ * 100 + __GNUC_MINOR__ < 408) +#error Requires AVX and gcc 4.8+ +#endif +#include + +#include +#include +#include // CGAL::Sign +#include // CGAL::sign + + + +namespace CGAL { + + struct Avx_vector_4 { + typedef double NT; + typedef Dimension_tag<4> Dimension; + typedef Dimension_tag<4> Max_dimension; + // No Rebind_dimension, this is a building block + template struct Property : boost::false_type {}; + template struct Property + : boost::true_type {}; + /* MAYBE? + template struct Property + : boost::true_type {}; + */ + template struct Property + : boost::true_type {}; + template struct Property + : boost::true_type {}; + template struct Property + : boost::true_type {}; + + typedef __m256d Vector; + struct Construct_vector { + struct Dimension { + // Initialize with NaN? + Vector operator()(unsigned d) const { + CGAL_assertion(d==4); + return Vector(); + } + }; + + struct Iterator { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e) const { + CGAL_assertion(d==4); + double x0 = *f; + double x1 = *++f; + double x2 = *++f; + double x3 = *++f; + CGAL_assertion(++f==e); + Vector a = { x0, x1, x2, x3 }; + return a; + } + }; + + struct Iterator_and_last { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const { + CGAL_assertion(d==4); + double x0 = *f; + double x1 = *++f; + double x2 = *++f; + CGAL_assertion(++f==e); + Vector a = { x0, x1, x2, t }; + return a; + } + }; + + struct Values { + Vector operator()(double a,double b,double c,double d) const { + Vector r = { a, b, c, d }; + return r; + } + }; + + struct Values_divide { + Vector operator()(double h,double a,double b,double c,double d) const { + // {a,b,c,d}/{h,h,h,h} should be roughly the same + Vector r = { a/h, b/h, c/h, d/h }; + return r; + } + }; + }; + + public: + typedef double const* Vector_const_iterator; + static inline Vector_const_iterator vector_begin(Vector const&a){ + return (Vector_const_iterator)(&a); + } + static inline Vector_const_iterator vector_end(Vector const&a){ + return (Vector_const_iterator)(&a)+4; + } + static inline unsigned size_of_vector(Vector){ + return 4; + } + static inline double dot_product(__m256d x, __m256d y){ + __m256d p=x*y; + __m256d z=_mm256_hadd_pd(p,p); + return z[0]+z[2]; + } + private: + static inline __m256d avx_sym(__m256d x){ +#if 0 + return __builtin_shuffle(x,(__m256i){2,3,0,1}); +#else + return _mm256_permute2f128_pd(x,x,1); +#endif + } + static inline __m256d avx_left(__m256d x){ +#if 0 + return __builtin_shuffle(x,(__m256i){1,2,3,0}); +#else +#ifdef __AVX2__ + return _mm256_permute4x64_pd(x,1+2*4+3*16+0*64); +#else + __m256d s = _mm256_permute2f128_pd(x,x,1); + return _mm256_shuffle_pd(x,s,5); +#endif +#endif + } + static inline __m256d avx_right(__m256d x){ +#if 0 + return __builtin_shuffle(x,(__m256i){3,0,1,2}); +#else +#ifdef __AVX2__ + return _mm256_permute4x64_pd(x,3+0*4+1*16+2*64); +#else + __m256d s = _mm256_permute2f128_pd(x,x,1); + return _mm256_shuffle_pd(s,x,5); +#endif +#endif + } + static inline double avx_altprod(__m256d x, __m256d y){ + __m256d p=x*y; + __m256d z=_mm256_hsub_pd(p,p); + return z[0]+z[2]; + } + public: + static double + determinant_of_vectors(Vector a, Vector b, Vector c, Vector d) { + __m256d x=a*avx_left(b)-avx_left(a)*b; + __m256d yy=a*avx_sym(b); + __m256d y=yy-avx_sym(yy); + __m256d z0=x*avx_sym(c); + __m256d z1=avx_left(x)*c; + __m256d z2=y*avx_left(c); + __m256d z=z0+z1-z2; + return avx_altprod(z,avx_right(d)); + } + static CGAL::Sign + sign_of_determinant_of_vectors(Vector a, Vector b, Vector c, Vector d) { + return CGAL::sign(determinant_of_vectors(a,b,c,d)); + } + + private: + static inline __m256d avx3_right(__m256d x){ +#if 0 + return __builtin_shuffle(x,(__m256i){2,0,1,3}); // can replace 3 with anything +#else +#ifdef __AVX2__ + return _mm256_permute4x64_pd(x,2+0*4+1*16+3*64); +#else + __m256d s = _mm256_permute2f128_pd(x,x,1); + return _mm256_shuffle_pd(s,x,12); +#endif +#endif + } + public: + static inline double dot_product_omit_last(__m256d x, __m256d y){ + __m256d p=x*y; + __m128d q=_mm256_extractf128_pd(p,0); + double z=_mm_hadd_pd(q,q)[0]; + return z+p[2]; + } + // Note: without AVX2, is it faster than the scalar computation? + static double + determinant_of_vectors_omit_last(Vector a, Vector b, Vector c) { + __m256d x=a*avx3_right(b)-avx3_right(a)*b; + return dot_product_omit_last(c,avx3_right(x)); + } + static CGAL::Sign + sign_of_determinant_of_vectors_omit_last(Vector a, Vector b, Vector c) { + return CGAL::sign(determinant_of_vectors_omit_last(a,b,c)); + } + + }; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h new file mode 100644 index 00000000..b8efbe28 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_iterator_to_vectors.h @@ -0,0 +1,76 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_DET_ITER_PTS_ITER_VEC_H +#define CGAL_VECTOR_DET_ITER_PTS_ITER_VEC_H +#include +#include +#include +#include + +namespace CGAL { + +template ::value, + bool = LA::template Property::value> +struct Add_determinant_of_iterator_to_points_from_iterator_to_vectors : LA { + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_iterator_to_vectors Other; + }; +}; + +template +struct Add_determinant_of_iterator_to_points_from_iterator_to_vectors + : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_iterator_to_vectors Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + // TODO: use std::minus, boost::bind, etc + template struct Minus_fixed { + T const& a; + Minus_fixed(T const&a_):a(a_){} + T operator()(T const&b)const{return b-a;} + }; + template + static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Minus_fixed f(a); + return LA::determinant_of_iterator_to_vectors(make_transforming_iterator(first,f),make_transforming_iterator(end,f)); + } + template + static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Minus_fixed f(a); + return LA::sign_of_determinant_of_iterator_to_vectors(make_transforming_iterator(first,f),make_transforming_iterator(end,f)); + } +}; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h new file mode 100644 index 00000000..71a31d81 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_points_from_points.h @@ -0,0 +1,211 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_DET_ITER_PTS_PTS_H +#define CGAL_VECTOR_DET_ITER_PTS_PTS_H +#include +#include + +namespace CGAL { + +template ::value, + bool = LA::template Property::value> +struct Add_determinant_of_iterator_to_points_from_points : LA { + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_points Other; + }; +}; + +//FIXME: Use variadics and boost so it works in any dimension. +template +struct Add_determinant_of_iterator_to_points_from_points +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_points Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; CGAL_assertion(++first==end); + return LA::determinant_of_points(a,b,c); + } + template + static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_points(a,b,c); + } +}; + +template +struct Add_determinant_of_iterator_to_points_from_points +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_points Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; CGAL_assertion(++first==end); + return LA::determinant_of_points(a,b,c,d); + } + template + static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_points(a,b,c,d); + } +}; + +template +struct Add_determinant_of_iterator_to_points_from_points +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_points Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; CGAL_assertion(++first==end); + return LA::determinant_of_points(a,b,c,d,e); + } + template + static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_points(a,b,c,d,e); + } +}; + +template +struct Add_determinant_of_iterator_to_points_from_points +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_points Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; ++first; + Vector const&f=*first; CGAL_assertion(++first==end); + return LA::determinant_of_points(a,b,c,d,e,f); + } + template + static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; ++first; + Vector const&f=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_points(a,b,c,d,e,f); + } +}; + +template +struct Add_determinant_of_iterator_to_points_from_points +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_points_from_points Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; ++first; + Vector const&f=*first; ++first; + Vector const&g=*first; CGAL_assertion(++first==end); + return LA::determinant_of_points(a,b,c,d,e,f,g); + } + template + static Sign sign_of_determinant_of_iterator_to_points(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; ++first; + Vector const&f=*first; ++first; + Vector const&g=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_points(a,b,c,d,e,f,g); + } +}; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h new file mode 100644 index 00000000..f096d6c7 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_iterator_to_vectors_from_vectors.h @@ -0,0 +1,201 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_DET_ITER_VEC_VEC_H +#define CGAL_VECTOR_DET_ITER_VEC_VEC_H +#include +#include + +namespace CGAL { + +template ::value, + bool = LA::template Property::value> +struct Add_determinant_of_iterator_to_vectors_from_vectors : LA { + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; + }; +}; + +//FIXME: Use variadics and boost so it works in any dimension. +template +struct Add_determinant_of_iterator_to_vectors_from_vectors +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; CGAL_assertion(++first==end); + return LA::determinant_of_vectors(a,b); + } + template + static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_vectors(a,b); + } +}; + +template +struct Add_determinant_of_iterator_to_vectors_from_vectors +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; CGAL_assertion(++first==end); + return LA::determinant_of_vectors(a,b,c); + } + template + static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_vectors(a,b,c); + } +}; + +template +struct Add_determinant_of_iterator_to_vectors_from_vectors +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; CGAL_assertion(++first==end); + return LA::determinant_of_vectors(a,b,c,d); + } + template + static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_vectors(a,b,c,d); + } +}; + +template +struct Add_determinant_of_iterator_to_vectors_from_vectors +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; CGAL_assertion(++first==end); + return LA::determinant_of_vectors(a,b,c,d,e); + } + template + static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_vectors(a,b,c,d,e); + } +}; + +template +struct Add_determinant_of_iterator_to_vectors_from_vectors +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_iterator_to_vectors_from_vectors Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + template + static NT determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; ++first; + Vector const&f=*first; CGAL_assertion(++first==end); + return LA::determinant_of_vectors(a,b,c,d,e,f); + } + template + static Sign sign_of_determinant_of_iterator_to_vectors(Iter const&first, Iter const&end){ + Vector const&a=*first; ++first; + Vector const&b=*first; ++first; + Vector const&c=*first; ++first; + Vector const&d=*first; ++first; + Vector const&e=*first; ++first; + Vector const&f=*first; CGAL_assertion(++first==end); + return LA::sign_of_determinant_of_vectors(a,b,c,d,e,f); + } +}; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h new file mode 100644 index 00000000..7ddb73c3 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_points_from_vectors.h @@ -0,0 +1,164 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_DETPTS_H +#define CGAL_VECTOR_DETPTS_H +#include +#include + +namespace CGAL { + +template ::value, + bool = LA::template Property::value + && LA::template Property::value> +struct Add_determinant_of_points_from_vectors_and_minus : LA { + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_points_from_vectors_and_minus Other; + }; +}; + +//FIXME: Use variadics and boost so it works in any dimension. +template +struct Add_determinant_of_points_from_vectors_and_minus +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_points_from_vectors_and_minus Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT determinant_of_points(Vector const&a, Vector const&b, + Vector const&c){ + return LA::determinant_of_vectors(b-a,c-a); + } + static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, + Vector const&c){ + return LA::sign_of_determinant_of_vectors(b-a,c-a); + } +}; + +template +struct Add_determinant_of_points_from_vectors_and_minus +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_points_from_vectors_and_minus Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d){ + return LA::determinant_of_vectors(b-a,c-a,d-a); + } + static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d){ + return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a); + } +}; + +template +struct Add_determinant_of_points_from_vectors_and_minus +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_points_from_vectors_and_minus Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e){ + return LA::determinant_of_vectors(b-a,c-a,d-a,e-a); + } + static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e){ + return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a); + } +}; + +template +struct Add_determinant_of_points_from_vectors_and_minus +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_points_from_vectors_and_minus Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f){ + return LA::determinant_of_vectors(b-a,c-a,d-a,e-a,f-a); + } + static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f){ + return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a,f-a); + } +}; + +template +struct Add_determinant_of_points_from_vectors_and_minus +, Max_dim_, false, true> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef Add_determinant_of_points_from_vectors_and_minus Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f, + Vector const&g){ + return LA::determinant_of_vectors(b-a,c-a,d-a,e-a,f-a,g-a); + } + static Sign sign_of_determinant_of_points(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f, + Vector const&g){ + return LA::sign_of_determinant_of_vectors(b-a,c-a,d-a,e-a,f-a,g-a); + } +}; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h new file mode 100644 index 00000000..64eafe69 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim.h @@ -0,0 +1,58 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_DETVEC_SMALL_H +#define CGAL_VECTOR_DETVEC_SMALL_H +#include +#include +#include + +#define CGAL_ALLOWED_INCLUSION 1 + +#define CGAL_CLASS Add_determinant_of_vectors_small_dim +#define CGAL_TAG Has_determinant_of_vectors_tag +#define CGAL_FUNC determinant_of_vectors +#define CGAL_SIGN_FUNC sign_of_determinant_of_vectors +#define CGAL_SHIFT 0 + +#include + +#undef CGAL_CLASS +#undef CGAL_TAG +#undef CGAL_FUNC +#undef CGAL_SIGN_FUNC +#undef CGAL_SHIFT + +#define CGAL_CLASS Add_determinant_of_vectors_omit_last_small_dim +#define CGAL_TAG Has_determinant_of_vectors_omit_last_tag +#define CGAL_FUNC determinant_of_vectors_omit_last +#define CGAL_SIGN_FUNC sign_of_determinant_of_vectors_omit_last +#define CGAL_SHIFT 1 + +#include + +#undef CGAL_CLASS +#undef CGAL_TAG +#undef CGAL_FUNC +#undef CGAL_SIGN_FUNC +#undef CGAL_SHIFT + +#undef CGAL_ALLOWED_INCLUSION + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h new file mode 100644 index 00000000..b4856742 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/determinant_of_vectors_small_dim_internal.h @@ -0,0 +1,164 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_ALLOWED_INCLUSION +#error Must not include this header directly +#endif +#if !defined(CGAL_TAG) \ + || ! defined(CGAL_CLASS) \ + || ! defined(CGAL_FUNC) \ + || ! defined(CGAL_SIGN_FUNC) \ + || ! defined(CGAL_SHIFT) + +#error Forgot one macro +#endif + +namespace CGAL { + +template ::value> +struct CGAL_CLASS : LA { + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef CGAL_CLASS Other; + }; +}; + +template +struct CGAL_CLASS +, Max_dim_, false> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef CGAL_CLASS Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT CGAL_FUNC(Vector const&a, Vector const&b){ + return CGAL::determinant_of_vectors(a,b); + } + template + static Sign CGAL_SIGN_FUNC(V1 const&a, V2 const&b){ + return CGAL::sign_of_determinant_of_vectors(a,b); + } +}; + +template +struct CGAL_CLASS +, Max_dim_, false> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef CGAL_CLASS Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT CGAL_FUNC(Vector const&a, Vector const&b, + Vector const&c){ + return CGAL::determinant_of_vectors(a,b,c); + } + static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, + Vector const&c){ + return CGAL::sign_of_determinant_of_vectors(a,b,c); + } +}; + +template +struct CGAL_CLASS +, Max_dim_, false> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef CGAL_CLASS Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT CGAL_FUNC(Vector const&a, Vector const&b, + Vector const&c, Vector const&d){ + return CGAL::determinant_of_vectors(a,b,c,d); + } + static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, + Vector const&c, Vector const&d){ + return CGAL::sign_of_determinant_of_vectors(a,b,c,d); + } +}; + +template +struct CGAL_CLASS +, Max_dim_, false> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef CGAL_CLASS Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT CGAL_FUNC(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e){ + return CGAL::determinant_of_vectors(a,b,c,d,e); + } + static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e){ + return CGAL::sign_of_determinant_of_vectors(a,b,c,d,e); + } +}; + +template +struct CGAL_CLASS +, Max_dim_, false> : LA { + typedef typename LA::NT NT; + typedef typename LA::Vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef typename LA::template Rebind_dimension LA2; + typedef CGAL_CLASS Other; + }; + template struct Property : LA::template Property

{}; + template struct Property : + boost::true_type {}; + + static NT CGAL_FUNC(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f){ + return CGAL::determinant_of_vectors(a,b,c,d,e,f); + } + static Sign CGAL_SIGN_FUNC(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f){ + return CGAL::sign_of_determinant_of_vectors(a,b,c,d,e,f); + } +}; + +} diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h new file mode 100644 index 00000000..d4cfeeb1 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/mix.h @@ -0,0 +1,46 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KD_MIX_VECTOR_H +#define CGAL_KD_MIX_VECTOR_H +#include +namespace CGAL { + +template +struct Mix_vector +: Dynamic_::template Rebind_dimension::Other +{ + template + struct Rebind_dimension { + typedef Mix_vector Other; + }; +}; + +template +struct Mix_vector, Max_dim_> +: Static_::template Rebind_dimension, Max_dim_>::Other +{ + template + struct Rebind_dimension { + typedef Mix_vector Other; + }; +}; +} +#endif + diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h new file mode 100644 index 00000000..2a75385c --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/sse2.h @@ -0,0 +1,145 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_SSE2_H +#define CGAL_VECTOR_SSE2_H + +// Check what needs adapting for clang, intel and microsoft +#if !defined __SSE2__ || (__GNUC__ * 100 + __GNUC_MINOR__ < 408) +#error Requires SSE2 and gcc 4.8+ +#endif +#include // FIXME: other platforms call it differently + +#include +#include +#include // CGAL::Sign +#include // CGAL::sign + + + +namespace CGAL { + + struct Sse_vector_2 { + typedef double NT; + typedef Dimension_tag<2> Dimension; + typedef Dimension_tag<2> Max_dimension; + // No Rebind_dimension, this is a building block + template struct Property : boost::false_type {}; + template struct Property + : boost::true_type {}; + /* MAYBE? + template struct Property + : boost::true_type {}; + */ + template struct Property + : boost::true_type {}; + template struct Property + : boost::true_type {}; + + typedef __m128d Vector; + struct Construct_vector { + struct Dimension { + // Initialize with NaN? + Vector operator()(unsigned d) const { + CGAL_assertion(d==2); + return Vector(); + } + }; + + struct Iterator { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e) const { + CGAL_assertion(d==2); + double x0 = *f; + double x1 = *++f; + CGAL_assertion(++f==e); + Vector a = { x0, x1 }; + return a; + } + }; + + struct Iterator_and_last { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const { + CGAL_assertion(d==2); + Vector a = { *f, t }; + CGAL_assertion(++f==e); + return a; + } + }; + + struct Values { + Vector operator()(double a,double b) const { + Vector r = { a, b }; + return r; + } + }; + + struct Values_divide { + Vector operator()(double h,double a,double b) const { + // {a,b}/{h,h} is probably slower + Vector r = { a/h, b/h }; + return r; + } + }; + }; + + typedef double const* Vector_const_iterator; + static inline Vector_const_iterator vector_begin(Vector const&a){ + return (Vector_const_iterator)(&a); + } + static inline Vector_const_iterator vector_end(Vector const&a){ + return (Vector_const_iterator)(&a)+2; + } + static inline unsigned size_of_vector(Vector){ + return 2; + } + public: + + static double determinant_of_vectors(Vector a, Vector b) { + __m128d c = _mm_shuffle_pd (b, b, 1); // b1, b0 + __m128d d = a * c; // a0*b1, a1*b0 +#ifdef __SSE3__ + __m128d e = _mm_hsub_pd (d, d); + return e[0]; +#else + return d[0]-d[1]; +#endif + } + static CGAL::Sign sign_of_determinant_of_vectors(Vector a, Vector b) { + return CGAL::sign(determinant_of_vectors(a,b)); + } + + static double dot_product(Vector a,Vector b){ +#ifdef __SSE4_1__ + return _mm_dp_pd (a, b, 1+16+32)[0]; +#else + __m128d p = a * b; +#if defined __SSE3__ + __m128d s = _mm_hadd_pd (p, p); + return s[0]; +#else + return p[0]+p[1]; +#endif +#endif + }; + }; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h new file mode 100644 index 00000000..b85a3734 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/v2int.h @@ -0,0 +1,181 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_2INT_H +#define CGAL_VECTOR_2INT_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +// What are the pros and cons of having NT be int vs double? + +namespace CGAL { + struct Vector_2_int_prop1 { + typedef double NT; // try lying a bit + typedef int32_t NT1; // what is really stored + typedef int32_t NT1b; // slightly longer + typedef int_fast64_t NT2; // longer type for computations + typedef int_fast64_t NT2b; // slightly longer + bool check_limits(int32_t x){return std::abs(x)<(1<<30);} + // TODO: find nice bounds + }; +#ifdef __SIZEOF_INT128__ + struct Vector_2_int_prop2 { + typedef double NT; + typedef int32_t NT1; + typedef int_fast64_t NT1b; + typedef int_fast64_t NT2; + typedef __int128 NT2b; + bool check_limits(int32_t){return true;} + // take a template/int64_t input and still check the limits? + }; + struct Vector_2_int_prop3 { + typedef long double NT; + typedef int64_t NT1; + typedef int64_t NT1b; + typedef __int128 NT2; + typedef __int128 NT2b; + enum { has_limit=true }; + bool check_limits(int32_t x){return std::abs(x)<(1L<<62);} + // TODO: find nice bounds + }; +#endif + + template + struct Vector_2_int : Prop { + using typename Prop::NT; + using typename Prop::NT1; + using typename Prop::NT1b; + using typename Prop::NT2; + using typename Prop::NT2b; + using Prop::check_limits; + + typedef Dimension_tag<2> Dimension; + typedef Dimension_tag<2> Max_dimension; + // No Rebind_dimension, this is a building block + template struct Property : boost::false_type {}; + //template struct Property + // : boost::true_type {}; + template struct Property + : boost::true_type {}; + //template struct Property + // : boost::true_type {}; + // Advertise somehow that the sign_of_determinant* are exact? + + typedef cpp0x::array Vector; + struct Construct_vector { + struct Dimension { + Vector operator()(unsigned d) const { + CGAL_assertion(d==2); + return Vector(); + } + }; + + // TODO (for all constructors): check that input fits in NT1... + struct Iterator { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e) const { + CGAL_assertion(d==2); + NT1 x0 = *f; + NT1 x1 = *++f; + CGAL_assertion (++f == e); + CGAL_assertion (check_limits(x0) && check_limits(x1)); + Vector a = { x0, x1 }; + return a; + } + }; + + struct Iterator_and_last { + template + Vector operator()(unsigned d,Iter const& f,Iter const& e,double t) const { + CGAL_assertion(d==2); + NT1 x = *f; + CGAL_assertion (++f == e); + CGAL_assertion (check_limits(x) && check_limits(t)); + Vector a = { x, t }; + return a; + } + }; + + struct Values { + Vector operator()(NT1 a,NT1 b) const { + CGAL_assertion (check_limits(a) && check_limits(b)); + Vector r = { a, b }; + return r; + } + }; + + /* + // Maybe safer not to provide it + struct Values_divide { + Vector operator()(double h,double a,double b) const { + Vector r = { a/h, b/h }; + return r; + } + }; + */ + }; + + // Since we lie about NT, be consistent about it + typedef transforming_iterator,NT1 const*> Vector_const_iterator; + static inline Vector_const_iterator vector_begin(Vector const&a){ + return Vector_const_iterator(a.begin()); + } + static inline Vector_const_iterator vector_end(Vector const&a){ + return Vector_const_iterator(a.end()); + } + static inline unsigned size_of_vector(Vector){ + return 2; + } + + // for unsigned NT1, check what changes to do. + // return NT or NT2? + static NT determinant_of_vectors(Vector a, Vector b) { + return CGAL::determinant_of_vectors(a,b); + } + static CGAL::Sign sign_of_determinant_of_vectors(Vector a, Vector b) { + return CGAL::sign_of_determinant_of_vectors(a,b); + } + + static NT determinant_of_points(Vector a, Vector b, Vector c) { + // could be faster to convert to NT directly + NT1b a0=a[0]; NT1b a1=a[1]; + NT1b x0=b[0]-a0; NT1b x1=b[1]-a1; + NT1b y0=c[0]-a0; NT1b y1=c[1]-a1; + return CGAL::determinant(x0,x1,y0,y1); + } + static CGAL::Sign sign_of_determinant_of_points(Vector a, Vector b, Vector c) { + NT1b a0=a[0]; NT1b a1=a[1]; + NT1b x0=b[0]-a0; NT1b x1=b[1]-a1; + NT2b y0=c[0]-a0; NT2b y1=c[1]-a1; + return CGAL::compare(x0*y1,x1*y0); + } + }; + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h new file mode 100644 index 00000000..f9cc4e3c --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Vector/vector.h @@ -0,0 +1,167 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_VECTOR_VECTOR_H +#define CGAL_VECTOR_VECTOR_H +#include +#include +#include +#include +#include +#include +#include +namespace CGAL { + +//Derive from a class that doesn't depend on Dim, or still use Dim for checking? +template struct Vector_vector { + typedef NT_ NT; + typedef Dim_ Dimension; + typedef Max_dim_ Max_dimension; + typedef std::vector Vector; + template< class D2, class D3=D2 > + struct Rebind_dimension { + typedef Vector_vector< NT, D2, D3 > Other; + }; + template struct Property : boost::false_type {}; + + struct Construct_vector { + struct Dimension { + Vector operator()(int d) const { + return Vector(d); + } + }; + + struct Iterator { + template + Vector operator()(int CGAL_assertion_code(d),Iter const& f,Iter const& e) const { + CGAL_assertion(d==std::distance(f,e)); + return Vector(f,e); + } + }; + + // unneeded thanks to Iterator_and_last? +#if 0 + struct Iterator_add_one { + template + Vector operator()(int CGAL_assertion_code(d),Iter const& f,Iter const& e) const { + CGAL_assertion(d==std::distance(f,e)+1); + Vector a; + a.reserve(d+1); + a.insert(a.end(),f,e); + a.push_back(1); + return a; + } + }; +#endif + + struct Iterator_and_last { + template + Vector operator()(int d,Iter const& f,Iter const& e,CGAL_FORWARDABLE(T) t) const { + CGAL_assertion(d==std::distance(f,e)+1); + Vector a; + a.reserve(d+1); + a.insert(a.end(),f,e); + a.push_back(CGAL_FORWARD(T,t)); + return a; + } + }; + + // useless, use a transform_iterator? +#if 0 + struct Iterator_and_last_divide { + template + Vector operator()(int d,Iter f,Iter const& e,T const&t) const { + CGAL_assertion(d==std::distance(f,e)+1); + Vector a; + a.reserve(d+1); + for(;f!=e;++f){ + a.push_back(*f/t); + } + return a; + } + }; +#endif + + struct Values { +#ifdef CGAL_CXX11 + template + Vector operator()(U&&...u) const { + //TODO: check the right number of {}, g++ accepts one and two + Vector a={forward_safe(u)...}; + return a; + } +#else + +#define CGAL_VAR(Z,N,_) a.push_back(t##N); +#define CGAL_CODE(Z,N,_) Vector operator()(BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ + Vector a; \ + a.reserve(N); \ + BOOST_PP_REPEAT(N,CGAL_VAR,) \ + return a; \ +} +BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR + +#endif + }; + + struct Values_divide { +#ifdef CGAL_CXX11 + template + Vector operator()(H const&h,U&&...u) const { + //TODO: do we want to cast at some point? + //e.g. to avoid 1/2 in integers + // ==> use Rational_traits().make_rational(x,y) ? + Vector a={Rational_traits().make_rational(std::forward(u),h)...}; + return a; + } +#else + +#define CGAL_VAR(Z,N,_) a.push_back(Rational_traits().make_rational( t##N ,h)); +#define CGAL_CODE(Z,N,_) template Vector \ + operator()(H const&h, BOOST_PP_ENUM_PARAMS(N,NT const& t)) const { \ + Vector a; \ + a.reserve(N); \ + BOOST_PP_REPEAT(N,CGAL_VAR,) \ + return a; \ + } + BOOST_PP_REPEAT_FROM_TO(1, 11, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR + +#endif + }; + }; + typedef typename Vector::const_iterator Vector_const_iterator; + static Vector_const_iterator vector_begin(Vector const&a){ + return a.begin(); + } + static Vector_const_iterator vector_end(Vector const&a){ + return a.end(); + } + static int size_of_vector(Vector const&a){ + return (int)a.size(); + } +}; + + +} +#endif + diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h new file mode 100644 index 00000000..44e9aa96 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Cartesian_wrap.h @@ -0,0 +1,305 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_CARTESIAN_WRAP_H +#define CGAL_KERNEL_D_CARTESIAN_WRAP_H + +#include +#include + +#if defined(BOOST_MSVC) +# pragma warning(push) +# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I' + // http://lists.boost.org/boost-users/2014/11/83291.php +#endif +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include + +//TODO: do we want to store the kernel ref in the Object wrappers? It would allow for additions and operator[] and things like that to work, but objects would still need to be created by functors. + +namespace CGAL { +namespace internal { +BOOST_MPL_HAS_XXX_TRAIT_DEF(Is_wrapper) +template::value> struct Is_wrapper { + enum { value=false }; + typedef Tag_false type; +}; +template struct Is_wrapper { + typedef typename T::Is_wrapper type; + enum { value=type::value }; +}; + +template::value> struct Is_wrapper_iterator { + enum { value=false }; + typedef Tag_false type; +}; +template struct Is_wrapper_iterator : + Is_wrapper::type>::value_type> +{ }; + +struct Forward_rep { +//TODO: make a good C++0X version with perfect forwarding +//#ifdef CGAL_CXX11 +//template ::type>::value&&!Is_wrapper_iterator::type>::value>::type> +//T&& operator()(typename std::remove_reference::type&& t) const {return static_cast(t);}; +//template ::type>::value&&!Is_wrapper_iterator::type>::value>::type> +//T&& operator()(typename std::remove_reference::type& t) const {return static_cast(t);}; +// +//template ::type>::value>::type> +//typename Type_copy_cvref::type::Rep>::type&& +//operator()(T&& t) const { +// return static_cast::type::Rep>::type&&>(t.rep()); +//}; +// +//template ::type>::value>::type> +//transforming_iterator::type> +//operator()(T&& t) const { +// return make_transforming_iterator(std::forward(t),Forward_rep()); +//}; +//#else +template ::value,bool=Is_wrapper_iterator::value> struct result_; +template struct result_{typedef T const& type;}; +template struct result_{typedef typename decay::type::Rep const& type;}; +template struct result_{typedef transforming_iterator::type> type;}; +template struct result; +template struct result : result_ {}; + +template typename boost::disable_if,Is_wrapper_iterator >,T>::type const& operator()(T const& t) const {return t;} +template typename boost::disable_if,Is_wrapper_iterator >,T>::type& operator()(T& t) const {return t;} + +template typename T::Rep const& operator()(T const& t, typename boost::enable_if >::type* = 0) const {return t.rep();} + +template transforming_iterator,T>::type> operator()(T const& t) const {return make_transforming_iterator(t,Forward_rep());} +//#endif +}; +} + +template ::value> +struct Map_wrapping_type : Get_type {}; +#define CGAL_REGISTER_OBJECT_WRAPPER(X) \ + template \ + struct Map_wrapping_type { \ + typedef Wrap::X##_d type; \ + } +CGAL_REGISTER_OBJECT_WRAPPER(Point); +CGAL_REGISTER_OBJECT_WRAPPER(Vector); +CGAL_REGISTER_OBJECT_WRAPPER(Segment); +CGAL_REGISTER_OBJECT_WRAPPER(Sphere); +CGAL_REGISTER_OBJECT_WRAPPER(Hyperplane); +CGAL_REGISTER_OBJECT_WRAPPER(Weighted_point); +#undef CGAL_REGISTER_OBJECT_WRAPPER + +// Note: this tends to be an all or nothing thing currently, wrapping +// only some types breaks, probably because we don't check whether the +// return type is indeed wrapped. +template < typename Base_ , typename Derived_ = Default > +struct Cartesian_wrap : public Base_ +{ + CGAL_CONSTEXPR Cartesian_wrap(){} + CGAL_CONSTEXPR Cartesian_wrap(int d):Base_(d){} + typedef Base_ Kernel_base; + typedef Cartesian_wrap Self; + // TODO: pass the 2 types Self and Derived to the wrappers, they can use Self for most purposes and Derived only for Kernel_traits' typedef R. + typedef typename Default::Get::type Derived; + // FIXME: The list doesn't belong here. + typedef boost::mpl::vector Wrapped_list; + + template + struct Type : Map_wrapping_type {}; + + //Translate the arguments + template ::type, + bool=Provides_functor::value, + bool=boost::mpl::contains::type>::type::value> + struct Functor { + typedef typename Get_functor::type B; + struct type { + B b; + type(){} + type(Self const&k):b(k){} + typedef typename B::result_type result_type; +#ifdef CGAL_CXX11 + template result_type operator()(U&&...u)const{ + return b(internal::Forward_rep()(u)...); + } +#else +#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) +#define CGAL_CODE(Z,N,_) template result_type \ + operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ + return b(BOOST_PP_ENUM(N,CGAL_VAR,)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE +#undef CGAL_VAR +// In case the last argument needs to be non-const. Fragile... +#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) +#define CGAL_CODE(Z,N,_) template result_type \ + operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u),V&v)const{ \ + return b(BOOST_PP_ENUM(N,CGAL_VAR,),internal::Forward_rep()(v)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,8,CGAL_CODE,_) +#undef CGAL_CODE +#undef CGAL_VAR +#endif + }; + }; + + // Preserve the difference between Null_functor and nothing. + template + struct Functor + : Get_functor {}; + + //Translate both the arguments and the result + //TODO: Check Is_wrapper instead of relying on map_result_tag? + template struct Functor { + typedef typename Get_functor::type B; + struct type { + B b; + type(){} + type(Self const&k):b(k){} + typedef typename map_result_tag::type result_tag; + // FIXME: Self or Derived? + typedef typename Get_type::type result_type; +#ifdef CGAL_CXX11 + template result_type operator()(U&&...u)const{ + return result_type(Eval_functor(),b,internal::Forward_rep()(u)...); + } +#else +#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) +#define CGAL_CODE(Z,N,_) template result_type \ + operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ + return result_type(Eval_functor(),b,BOOST_PP_ENUM(N,CGAL_VAR,)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE +#undef CGAL_VAR +#endif + }; + }; + +}; + +template < typename Base_ > +struct Cartesian_refcount : public Base_ +{ + CGAL_CONSTEXPR Cartesian_refcount(){} + CGAL_CONSTEXPR Cartesian_refcount(int d):Base_(d){} + typedef Base_ Kernel_base; + typedef Cartesian_refcount Self; + + // FIXME: Use object_list, or a list passed as argument, or anything + // automatic. + template struct Type : Get_type {}; +#define CGAL_Kernel_obj(X,Y) \ + template struct Type { typedef Ref_count_obj type; }; + + CGAL_Kernel_obj(Point,point) + CGAL_Kernel_obj(Vector,vector) +#undef CGAL_Kernel_obj + + template struct Dispatch { + //typedef typename map_functor_type::type f_t; + typedef typename map_result_tag::type r_t; + enum { + is_nul = boost::is_same::type,Null_functor>::value, + ret_rcobj = boost::is_same::value || boost::is_same::value + }; + }; + + //Translate the arguments + template::is_nul,bool=Dispatch::ret_rcobj> struct Functor { + typedef typename Get_functor::type B; + struct type { + B b; + type(){} + type(Self const&k):b(k){} + typedef typename B::result_type result_type; +#ifdef CGAL_CXX11 + template result_type operator()(U&&...u)const{ + return b(internal::Forward_rep()(u)...); + } +#else + result_type operator()()const{ + return b(); + } +#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) +#define CGAL_CODE(Z,N,_) template result_type \ + operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ + return b(BOOST_PP_ENUM(N,CGAL_VAR,)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE +#undef CGAL_VAR +#endif + }; + }; + + //Translate both the arguments and the result + template struct Functor { + typedef Null_functor type; + }; + + template struct Functor { + typedef typename Get_functor::type B; + struct type { + B b; + type(){} + type(Self const&k):b(k){} + typedef typename map_result_tag::type result_tag; + typedef typename Get_type::type result_type; +#ifdef CGAL_CXX11 + template result_type operator()(U&&...u)const{ + return result_type(Eval_functor(),b,internal::Forward_rep()(u)...); + } +#else + result_type operator()()const{ + return result_type(Eval_functor(),b); + } +#define CGAL_VAR(Z,N,_) internal::Forward_rep()(u##N) +#define CGAL_CODE(Z,N,_) template result_type \ + operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ + return result_type(Eval_functor(),b,BOOST_PP_ENUM(N,CGAL_VAR,)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE +#undef CGAL_VAR +#endif + }; + }; + +}; + +} //namespace CGAL + +#if defined(BOOST_MSVC) +# pragma warning(pop) +#endif + +#endif // CGAL_KERNEL_D_CARTESIAN_WRAP_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h new file mode 100644 index 00000000..54fd50bd --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Hyperplane_d.h @@ -0,0 +1,131 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_HYPERPLANE_D_H +#define CGAL_WRAPPER_HYPERPLANE_D_H + +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +namespace CGAL { +namespace Wrap { + +template +class Hyperplane_d : public Get_type::type +{ + typedef typename Get_type::type FT_; + typedef typename R_::Kernel_base Kbase; + typedef typename Get_type::type Vector_; + typedef typename Get_functor >::type CHBase; + typedef typename Get_functor::type OVBase; + typedef typename Get_functor::type HTBase; + + typedef Hyperplane_d Self; + CGAL_static_assertion((boost::is_same::type>::value)); + +public: + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + typedef typename Increment_dimension::type Feature_dimension; + + typedef typename Get_type::type Rep; + + const Rep& rep() const + { + return *this; + } + + Rep& rep() + { + return *this; + } + + typedef R_ R; + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Hyperplane_d(U&&...u) + : Rep(CHBase()(std::forward(u)...)){} + +// // called from Construct_point_d +// template explicit Point_d(Eval_functor&&,U&&...u) +// : Rep(Eval_functor(), std::forward(u)...){} + template explicit Hyperplane_d(Eval_functor&&,F&&f,U&&...u) + : Rep(std::forward(f)(std::forward(u)...)){} + +#if 0 + // the new standard may make this necessary + Point_d(Point_d const&)=default; + Point_d(Point_d &);//=default; + Point_d(Point_d &&)=default; +#endif + + // try not to use these + Hyperplane_d(Rep const& v) : Rep(v) {} + Hyperplane_d(Rep& v) : Rep(static_cast(v)) {} + Hyperplane_d(Rep&& v) : Rep(std::move(v)) {} + +#else + + Hyperplane_d() : Rep(CHBase()()) {} + + Hyperplane_d(Rep const& v) : Rep(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Hyperplane_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(CHBase()( \ + BOOST_PP_ENUM_PARAMS(N,t))) {} \ + \ + template \ + Hyperplane_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} + /* + template \ + Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} + */ + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + +#endif + + //TODO: if OVBase returns a reference to a base vector, cast it to a + //reference to a wrapper vector. Ugly but should be safe. + Vector_ orthogonal_vector()const{ + return Vector_(Eval_functor(),OVBase(),rep()); + } + FT_ translation()const{ + return HTBase()(rep()); + } + + +}; + +} //namespace Wrap +} //namespace CGAL + +#endif // CGAL_WRAPPER_SPHERE_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h new file mode 100644 index 00000000..0718c947 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Point_d.h @@ -0,0 +1,284 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_POINT_D_H +#define CGAL_WRAPPER_POINT_D_H + +#include +#include +#include +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +namespace CGAL { +namespace Wrap { + +template +class Point_d : public Get_type::type + // Deriving won't work if the point is just a __m256d. + // Test boost/std::is_class for instance +{ + typedef typename Get_type::type RT_; + typedef typename Get_type::type FT_; + typedef typename R_::Kernel_base Kbase; + typedef typename Get_type::type Vector_; + typedef typename Get_functor >::type CPBase; + typedef typename Get_functor::type CCBase; + typedef typename Get_functor >::type CPI; + + + typedef Point_d Self; + CGAL_static_assertion((boost::is_same::type>::value)); + +public: + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + typedef Dimension_tag<0> Feature_dimension; + + typedef typename Get_type::type Rep; + //typedef typename CGAL::decay::type>::type Cartesian_const_iterator; + + const Rep& rep() const + { + return *this; + } + + Rep& rep() + { + return *this; + } + + typedef R_ R; + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Point_d(U&&...u) + : Rep(CPBase()(std::forward(u)...)){} + +// // called from Construct_point_d +// template explicit Point_d(Eval_functor&&,U&&...u) +// : Rep(Eval_functor(), std::forward(u)...){} + template explicit Point_d(Eval_functor&&,F&&f,U&&...u) + : Rep(std::forward(f)(std::forward(u)...)){} + +#if 0 + // the new standard may make this necessary + Point_d(Point_d const&)=default; + Point_d(Point_d &);//=default; + Point_d(Point_d &&)=default; +#endif + + // try not to use these + Point_d(Rep const& v) : Rep(v) {} + Point_d(Rep& v) : Rep(static_cast(v)) {} + Point_d(Rep&& v) : Rep(std::move(v)) {} + + // this one should be implicit + Point_d(Origin const& v) + : Rep(CPBase()(v)) {} + Point_d(Origin& v) + : Rep(CPBase()(v)) {} + Point_d(Origin&& v) + : Rep(CPBase()(std::move(v))) {} + +#else + + Point_d() : Rep(CPBase()()) {} + + Point_d(Rep const& v) : Rep(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Point_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(CPBase()( \ + BOOST_PP_ENUM_PARAMS(N,t))) {} \ + \ + template \ + Point_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} + /* + template \ + Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} + */ + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + + // this one should be implicit + Point_d(Origin const& o) + : Rep(CPBase()(o)) {} + +#endif + + typename boost::result_of::type cartesian(int i)const{ + return CCBase()(rep(),i); + } + typename boost::result_of::type operator[](int i)const{ + return CCBase()(rep(),i); + } + + typename boost::result_of::type cartesian_begin()const{ + return CPI()(rep(),Begin_tag()); + } + + typename boost::result_of::type cartesian_end()const{ + return CPI()(rep(),End_tag()); + } + + int dimension() const { + typedef typename Get_functor::type PDBase; + return PDBase()(rep()); + } + + /* + Direction_d direction() const + { + return R().construct_direction_d_object()(*this); + } + + Vector_d transform(const Aff_transformation_d &t) const + { + return t.transform(*this); + } + + Vector_d operator/(const RT& c) const + { + return R().construct_divided_vector_d_object()(*this,c); + } + + Vector_d operator/(const typename First_if_different::Type & c) const + { + return R().construct_divided_vector_d_object()(*this,c); + } + + typename Qualified_result_of::type + x() const + { + return R().compute_x_3_object()(*this); + } + + typename Qualified_result_of::type + y() const + { + return R().compute_y_3_object()(*this); + } + + typename Qualified_result_of::type + z() const + { + return R().compute_z_3_object()(*this); + } + + typename Qualified_result_of::type + hx() const + { + return R().compute_hx_3_object()(*this); + } + + typename Qualified_result_of::type + hy() const + { + return R().compute_hy_3_object()(*this); + } + + typename Qualified_result_of::type + hz() const + { + return R().compute_hz_3_object()(*this); + } + + typename Qualified_result_of::type + hw() const + { + return R().compute_hw_3_object()(*this); + } + + typename Qualified_result_of::type + cartesian(int i) const + { + CGAL_kernel_precondition( (i == 0) || (i == 1) || (i == 2) ); + if (i==0) return x(); + if (i==1) return y(); + return z(); + } + + typename Qualified_result_of::type + homogeneous(int i) const + { + CGAL_kernel_precondition( (i >= 0) || (i <= 3) ); + if (i==0) return hx(); + if (i==1) return hy(); + if (i==2) return hz(); + return hw(); + } + + typename Qualified_result_of::type + squared_length() const + { + return R().compute_squared_length_3_object()(*this); + } +*/ +}; +#if 0 +template Point_d::Point_d(Point_d &)=default; +#endif + +//TODO: IO + +template +std::ostream& operator <<(std::ostream& os, const Point_d& p) +{ + typedef typename R_::Kernel_base Kbase; + typedef typename Get_functor >::type CPI; + // Should just be "auto"... + typename CGAL::decay::Rep,Begin_tag) + >::type>::type + b = p.cartesian_begin(), + e = p.cartesian_end(); + os << p.dimension(); + for(; b != e; ++b){ + os << " " << *b; + } + return os; +} + +//template +//Vector_d operator+(const Vector_d& v,const Vector_d& w) const +//{ +// return typename R::template Construct::type()(v,w); +//} +// +//template +//Vector_d operator-(const Vector_d& v,const Vector_d& w) const +//{ +// return typename R::template Construct::type()(v,w); +//} + +} //namespace Wrap +} //namespace CGAL + +#endif // CGAL_WRAPPER_POINT_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h new file mode 100644 index 00000000..f33e14c0 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Ref_count_obj.h @@ -0,0 +1,120 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_REF_COUNT_OBJ_H +#define CGAL_WRAPPER_REF_COUNT_OBJ_H + +#include +#include +#include +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +// no need for a fancy interface here, people can use the Point_d wrapper on +// top. + +namespace CGAL { + +template +class Ref_count_obj +{ + typedef typename R_::Kernel_base Kbase; + typedef typename Get_functor >::type CBase; + + typedef Ref_count_obj Self; + CGAL_static_assertion((boost::is_same::type>::value)); + +public: + typedef R_ R; + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + //typedef Dimension_tag<0> Feature_dimension; + + typedef typename Get_type::type Rep; + typedef Handle_for Data; + +private: + Data data; +public: + + const Rep& rep() const + { + return CGAL::get_pointee_or_identity(data); + } + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Ref_count_obj(U&&...u) + : data(Eval_functor(),CBase(),std::forward(u)...){} + + template explicit Ref_count_obj(Eval_functor&&,F&&f,U&&...u) + : data(Eval_functor(),std::forward(f),std::forward(u)...){} + + // try not to use these + Ref_count_obj(Rep const& v) : data(v) {} + Ref_count_obj(Rep& v) : data(static_cast(v)) {} + Ref_count_obj(Rep&& v) : data(std::move(v)) {} + + // Do we really need this for point? +// // this one should be implicit +// Ref_count_obj(Origin const& v) +// : data(Eval_functor(),CBase(),v) {} +// Ref_count_obj(Origin& v) +// : data(Eval_functor(),CBase(),v) {} +// Ref_count_obj(Origin&& v) +// : data(Eval_functor(),CBase(),std::move(v)) {} + +#else + + Ref_count_obj() : data(Eval_functor(),CBase()) {} + + Ref_count_obj(Rep const& v) : data(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Ref_count_obj(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : data(Eval_functor(),CBase(),BOOST_PP_ENUM_PARAMS(N,t)) {} \ + \ + template \ + Ref_count_obj(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : data(Eval_functor(),f,BOOST_PP_ENUM_PARAMS(N,t)) {} + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + template + Ref_count_obj(Eval_functor,F const& f) + : data(Eval_functor(),f) {} + +// // this one should be implicit +// Ref_count_obj(Origin const& o) +// : data(Eval_functor(),CBase(),o) {} + +#endif + +}; + +} //namespace CGAL + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h new file mode 100644 index 00000000..bfb20a77 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Segment_d.h @@ -0,0 +1,133 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_SEGMENT_D_H +#define CGAL_WRAPPER_SEGMENT_D_H + +#include +#include +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +namespace CGAL { +namespace Wrap { + +template +class Segment_d : public Get_type::type +{ + typedef typename Get_type::type RT_; + typedef typename Get_type::type FT_; + typedef typename R_::Kernel_base Kbase; + typedef typename Get_type::type Point_; + typedef typename Get_functor >::type CPBase; + typedef typename Get_functor >::type CSBase; + typedef typename Get_functor::type CSEBase; + + typedef Segment_d Self; + CGAL_static_assertion((boost::is_same::type>::value)); + +public: + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + typedef Dimension_tag<1> Feature_dimension; + + typedef typename Get_type::type Rep; + + const Rep& rep() const + { + return *this; + } + + Rep& rep() + { + return *this; + } + + typedef R_ R; + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Segment_d(U&&...u) + : Rep(CSBase()(std::forward(u)...)){} + +// // called from Construct_point_d +// template explicit Point_d(Eval_functor&&,U&&...u) +// : Rep(Eval_functor(), std::forward(u)...){} + template explicit Segment_d(Eval_functor&&,F&&f,U&&...u) + : Rep(std::forward(f)(std::forward(u)...)){} + +#if 0 + // the new standard may make this necessary + Point_d(Point_d const&)=default; + Point_d(Point_d &);//=default; + Point_d(Point_d &&)=default; +#endif + + // try not to use these + Segment_d(Rep const& v) : Rep(v) {} + Segment_d(Rep& v) : Rep(static_cast(v)) {} + Segment_d(Rep&& v) : Rep(std::move(v)) {} + +#else + + Segment_d() : Rep(CSBase()()) {} + + Segment_d(Rep const& v) : Rep(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Segment_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(CSBase()( \ + BOOST_PP_ENUM_PARAMS(N,t))) {} \ + \ + template \ + Segment_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} + /* + template \ + Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} + */ + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + +#endif + + //TODO: if CSEBase returns a reference to a base point, cast it to a + //reference to a wrapper point. Ugly but should be safe. + Point_ source()const{ + return Point_(Eval_functor(),CSEBase(),rep(),0); + } + Point_ target()const{ + return Point_(Eval_functor(),CSEBase(),rep(),1); + } + +}; + +} //namespace Wrap +} //namespace CGAL + +#endif // CGAL_WRAPPER_SEGMENT_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h new file mode 100644 index 00000000..87f0c66e --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Sphere_d.h @@ -0,0 +1,130 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_SPHERE_D_H +#define CGAL_WRAPPER_SPHERE_D_H + +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +namespace CGAL { +namespace Wrap { + +template +class Sphere_d : public Get_type::type +{ + typedef typename Get_type::type FT_; + typedef typename R_::Kernel_base Kbase; + typedef typename Get_type::type Point_; + typedef typename Get_functor >::type CSBase; + typedef typename Get_functor::type COSBase; + typedef typename Get_functor::type SRBase; + + typedef Sphere_d Self; + CGAL_static_assertion((boost::is_same::type>::value)); + +public: + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + typedef typename Increment_dimension::type Feature_dimension; + + typedef typename Get_type::type Rep; + + const Rep& rep() const + { + return *this; + } + + Rep& rep() + { + return *this; + } + + typedef R_ R; + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Sphere_d(U&&...u) + : Rep(CSBase()(std::forward(u)...)){} + +// // called from Construct_point_d +// template explicit Point_d(Eval_functor&&,U&&...u) +// : Rep(Eval_functor(), std::forward(u)...){} + template explicit Sphere_d(Eval_functor&&,F&&f,U&&...u) + : Rep(std::forward(f)(std::forward(u)...)){} + +#if 0 + // the new standard may make this necessary + Point_d(Point_d const&)=default; + Point_d(Point_d &);//=default; + Point_d(Point_d &&)=default; +#endif + + // try not to use these + Sphere_d(Rep const& v) : Rep(v) {} + Sphere_d(Rep& v) : Rep(static_cast(v)) {} + Sphere_d(Rep&& v) : Rep(std::move(v)) {} + +#else + + Sphere_d() : Rep(CSBase()()) {} + + Sphere_d(Rep const& v) : Rep(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Sphere_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(CSBase()( \ + BOOST_PP_ENUM_PARAMS(N,t))) {} \ + \ + template \ + Sphere_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} + /* + template \ + Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} + */ + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + +#endif + + //TODO: if COSBase returns a reference to a base point, cast it to a + //reference to a wrapper point. Ugly but should be safe. + Point_ center()const{ + return Point_(Eval_functor(),COSBase(),rep()); + } + FT_ squared_radius()const{ + return SRBase()(rep()); + } + +}; + +} //namespace Wrap +} //namespace CGAL + +#endif // CGAL_WRAPPER_SPHERE_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h new file mode 100644 index 00000000..b7d1f0d0 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Vector_d.h @@ -0,0 +1,266 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_VECTOR_D_H +#define CGAL_WRAPPER_VECTOR_D_H + +#include +#include +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +namespace CGAL { +namespace Wrap { + +template +class Vector_d : public Get_type::type +{ + typedef typename Get_type::type RT_; + typedef typename Get_type::type FT_; + typedef typename R_::Kernel_base Kbase; + typedef typename Get_type::type Point_; + typedef typename Get_functor >::type CVBase; + typedef typename Get_functor::type CCBase; + typedef typename Get_functor >::type CVI; + typedef typename Get_functor::type SLBase; + + typedef Vector_d Self; + CGAL_static_assertion((boost::is_same::type>::value)); + +public: + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + typedef Dimension_tag<0> Feature_dimension; + + //typedef typename R_::Vector_cartesian_const_iterator Cartesian_const_iterator; + typedef typename Get_type::type Rep; + + const Rep& rep() const + { + return *this; + } + + Rep& rep() + { + return *this; + } + + typedef R_ R; + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Vector_d(U&&...u) + : Rep(CVBase()(std::forward(u)...)){} + +// // called from Construct_vector_d +// template explicit Vector_d(Eval_functor&&,U&&...u) +// : Rep(Eval_functor(), std::forward(u)...){} + template explicit Vector_d(Eval_functor&&,F&&f,U&&...u) + : Rep(std::forward(f)(std::forward(u)...)){} + +#if 0 + // the new standard may make this necessary + Vector_d(Vector_d const&)=default; + Vector_d(Vector_d &);//=default; + Vector_d(Vector_d &&)=default; +#endif + + // try not to use these + Vector_d(Rep const& v) : Rep(v) {} + Vector_d(Rep& v) : Rep(static_cast(v)) {} + Vector_d(Rep&& v) : Rep(std::move(v)) {} + + // this one should be implicit + Vector_d(Null_vector const& v) + : Rep(CVBase()(v)) {} + Vector_d(Null_vector& v) + : Rep(CVBase()(v)) {} + Vector_d(Null_vector&& v) + : Rep(CVBase()(std::move(v))) {} + +#else + + Vector_d() : Rep(CVBase()()) {} + + Vector_d(Rep const& v) : Rep(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Vector_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(CVBase()( \ + BOOST_PP_ENUM_PARAMS(N,t))) {} \ + \ + template \ + Vector_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} + /* + template \ + Vector_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} + */ + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + + // this one should be implicit + Vector_d(Null_vector const& v) + : Rep(CVBase()(v)) {} + +#endif + + typename boost::result_of::type cartesian(int i)const{ + return CCBase()(rep(),i); + } + + typename boost::result_of::type operator[](int i)const{ + return CCBase()(rep(),i); + } + + typename boost::result_of::type cartesian_begin()const{ + return CVI()(rep(),Begin_tag()); + } + + typename boost::result_of::type cartesian_end()const{ + return CVI()(rep(),End_tag()); + } + + Vector_d operator-() const + { + return typename Get_functor::type()(*this); + } + + /* + Direction_d direction() const + { + return R().construct_direction_d_object()(*this); + } + + Vector_d transform(const Aff_transformation_d &t) const + { + return t.transform(*this); + } + + Vector_d operator/(const RT& c) const + { + return R().construct_divided_vector_d_object()(*this,c); + } + + Vector_d operator/(const typename First_if_different::Type & c) const + { + return R().construct_divided_vector_d_object()(*this,c); + } + + typename Qualified_result_of::type + x() const + { + return R().compute_x_3_object()(*this); + } + + typename Qualified_result_of::type + y() const + { + return R().compute_y_3_object()(*this); + } + + typename Qualified_result_of::type + z() const + { + return R().compute_z_3_object()(*this); + } + + typename Qualified_result_of::type + hx() const + { + return R().compute_hx_3_object()(*this); + } + + typename Qualified_result_of::type + hy() const + { + return R().compute_hy_3_object()(*this); + } + + typename Qualified_result_of::type + hz() const + { + return R().compute_hz_3_object()(*this); + } + + typename Qualified_result_of::type + hw() const + { + return R().compute_hw_3_object()(*this); + } + + typename Qualified_result_of::type + cartesian(int i) const + { + CGAL_kernel_precondition( (i == 0) || (i == 1) || (i == 2) ); + if (i==0) return x(); + if (i==1) return y(); + return z(); + } + + typename Qualified_result_of::type + homogeneous(int i) const + { + CGAL_kernel_precondition( (i >= 0) || (i <= 3) ); + if (i==0) return hx(); + if (i==1) return hy(); + if (i==2) return hz(); + return hw(); + } + + int dimension() const // bad idea? + { + return rep.dimension(); + } +*/ + typename boost::result_of::type squared_length()const{ + return SLBase()(rep()); + } +}; +#if 0 +template Vector_d::Vector_d(Vector_d &)=default; +#endif + +//TODO: IO + +template +Vector_d operator+(const Vector_d& v,const Vector_d& w) +{ + return typename Get_functor::type()(v,w); +} + +template +Vector_d operator-(const Vector_d& v,const Vector_d& w) +{ + return typename Get_functor::type()(v,w); +} + +} //namespace Wrap +} //namespace CGAL + +#endif // CGAL_WRAPPER_VECTOR_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h new file mode 100644 index 00000000..877eea21 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/Wrapper/Weighted_point_d.h @@ -0,0 +1,129 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_WRAPPER_WEIGHTED_POINT_D_H +#define CGAL_WRAPPER_WEIGHTED_POINT_D_H + +#include +#include +#include +#include +#include +#ifndef CGAL_CXX11 +#include +#endif +#include + +namespace CGAL { +namespace Wrap { + +template +class Weighted_point_d : public Get_type::type +{ + typedef typename Get_type::type FT_; + typedef typename R_::Kernel_base Kbase; + typedef typename Get_type::type Point_; + typedef typename Get_functor >::type CWPBase; + typedef typename Get_functor::type PDWBase; + typedef typename Get_functor::type PWBase; + + typedef Weighted_point_d Self; + BOOST_STATIC_ASSERT((boost::is_same::type>::value)); + +public: + + typedef Tag_true Is_wrapper; + typedef typename R_::Default_ambient_dimension Ambient_dimension; + typedef Dimension_tag<0> Feature_dimension; + + typedef typename Get_type::type Rep; + + const Rep& rep() const + { + return *this; + } + + Rep& rep() + { + return *this; + } + + typedef R_ R; + +#ifdef CGAL_CXX11 + template::type...>,std::tuple >::value>::type> explicit Weighted_point_d(U&&...u) + : Rep(CWPBase()(std::forward(u)...)){} + +// // called from Construct_point_d +// template explicit Point_d(Eval_functor&&,U&&...u) +// : Rep(Eval_functor(), std::forward(u)...){} + template explicit Weighted_point_d(Eval_functor&&,F&&f,U&&...u) + : Rep(std::forward(f)(std::forward(u)...)){} + +#if 0 + // the new standard may make this necessary + Point_d(Point_d const&)=default; + Point_d(Point_d &);//=default; + Point_d(Point_d &&)=default; +#endif + + // try not to use these + Weighted_point_d(Rep const& v) : Rep(v) {} + Weighted_point_d(Rep& v) : Rep(static_cast(v)) {} + Weighted_point_d(Rep&& v) : Rep(std::move(v)) {} + +#else + + Weighted_point_d() : Rep(CWPBase()()) {} + + Weighted_point_d(Rep const& v) : Rep(v) {} // try not to use it + +#define CGAL_CODE(Z,N,_) template \ + explicit Weighted_point_d(BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(CWPBase()( \ + BOOST_PP_ENUM_PARAMS(N,t))) {} \ + \ + template \ + Weighted_point_d(Eval_functor,F const& f,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(f(BOOST_PP_ENUM_PARAMS(N,t))) {} + /* + template \ + Point_d(Eval_functor,BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t)) \ + : Rep(Eval_functor(), BOOST_PP_ENUM_PARAMS(N,t)) {} + */ + + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE + +#endif + + //TODO: use references? + Point_ point()const{ + return Point_(Eval_functor(),PDWBase(),rep()); + } + FT_ weight()const{ + return PWBase()(rep()); + } + +}; + +} //namespace Wrap +} //namespace CGAL + +#endif // CGAL_WRAPPER_SPHERE_D_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h new file mode 100644 index 00000000..5a132ad2 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/function_objects_cartesian.h @@ -0,0 +1,1355 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H +#define CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#ifdef CGAL_CXX11 +#include +#endif + +namespace CGAL { +namespace CartesianDKernelFunctors { +namespace internal { +template struct Dimension_at_most { enum { value = false }; }; +template struct Dimension_at_most,b> { + enum { value = (a <= b) }; +}; +} + +template::value> struct Orientation_of_points : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) + typedef R_ R; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + typedef typename R::LA::Square_matrix Matrix; + + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type pd(this->kernel()); + Point const& p0=*f++; + int d=pd(p0); + Matrix m(d,d); + // FIXME: this writes the vector coordinates in lines ? check all the other uses in this file, this may be wrong for some. + for(int i=0;f!=e;++f,++i) { + Point const& p=*f; + for(int j=0;j,typename R::Default_ambient_dimension>::value>::type> + template =3)>::type> + result_type operator()(U&&...u) const { + return operator()({std::forward(u)...}); + } + + template + result_type operator()(std::initializer_list

l) const { + return operator()(l.begin(),l.end()); + } +#else + //should we make it template to avoid instantiation for wrong dim? + //or iterate outside the class? +#define CGAL_VAR(Z,J,I) m(I,J)=c(p##I,J)-c(x,J); +#define CGAL_VAR2(Z,I,N) BOOST_PP_REPEAT(N,CGAL_VAR,I) +#define CGAL_CODE(Z,N,_) \ + result_type operator()(Point const&x, BOOST_PP_ENUM_PARAMS(N,Point const&p)) const { \ + typename Get_functor::type c(this->kernel()); \ + Matrix m(N,N); \ + BOOST_PP_REPEAT(N,CGAL_VAR2,N) \ + return R::LA::sign_of_determinant(CGAL_MOVE(m)); \ + } + +BOOST_PP_REPEAT_FROM_TO(7, 10, CGAL_CODE, _ ) + // No need to do it for <=6, since that uses a different code path +#undef CGAL_CODE +#undef CGAL_VAR2 +#undef CGAL_VAR +#endif +}; + +#ifdef CGAL_CXX11 +template struct Orientation_of_points,true> : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + templatestruct Help; + templatestruct Help > { + template result_type operator()(C const&c,P const&x,T&&t)const{ + return sign_of_determinant(c(std::get(t),I%d)-c(x,I%d)...); + } + }; + template result_type operator()(P0 const&x,P&&...p)const{ + static_assert(d==sizeof...(P),"Wrong number of arguments"); + typename Get_functor::type c(this->kernel()); + return Help::type>()(c,x,std::forward_as_tuple(std::forward

(p)...)); + } + + + template result_type help2(Dimension_tag, Iter f, Iter const&e, U&&...u)const{ + auto const&p=*f; + return help2(Dimension_tag(),++f,e,std::forward(u)...,p); + } + template result_type help2(Dimension_tag<0>, Iter CGAL_assertion_code(f), Iter const& CGAL_assertion_code(e), U&&...u)const{ + CGAL_assertion(f==e); + return operator()(std::forward(u)...); + } + template + result_type operator()(Iter f, Iter e)const{ + return help2(Dimension_tag(),f,e); + } +}; +#else +#define CGAL_VAR(Z,J,I) c(p##I,J)-x##J +#define CGAL_VAR2(Z,I,N) BOOST_PP_ENUM(N,CGAL_VAR,I) +#define CGAL_VAR3(Z,N,_) Point const&p##N=*++f; +#define CGAL_VAR4(Z,N,_) RT const&x##N=c(x,N); +#define CGAL_CODE(Z,N,_) \ +template struct Orientation_of_points,true> : private Store_kernel { \ + CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) \ + typedef R_ R; \ + typedef typename Get_type::type RT; \ + typedef typename Get_type::type Point; \ + typedef typename Get_type::type result_type; \ + result_type operator()(Point const&x, BOOST_PP_ENUM_PARAMS(N,Point const&p)) const { \ + typename Get_functor::type c(this->kernel()); \ + BOOST_PP_REPEAT(N,CGAL_VAR4,) \ + return sign_of_determinant(BOOST_PP_ENUM(N,CGAL_VAR2,N)); \ + } \ + template \ + result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ \ + Point const&x=*f; \ + BOOST_PP_REPEAT(N,CGAL_VAR3,) \ + CGAL_assertion(++f==e); \ + return operator()(x,BOOST_PP_ENUM_PARAMS(N,p)); \ + } \ +}; + + BOOST_PP_REPEAT_FROM_TO(2, 7, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR4 +#undef CGAL_VAR3 +#undef CGAL_VAR2 +#undef CGAL_VAR + +#endif + +template struct Orientation_of_points,true> : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation_of_points) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + result_type operator()(Point const&x, Point const&y) const { + typename Get_functor::type c(this->kernel()); + // No sign_of_determinant(RT) :-( + return CGAL::compare(c(y,0),c(x,0)); + } + template + result_type operator()(Iter f, Iter CGAL_assertion_code(e))const{ + Point const&x=*f; + Point const&y=*++f; + CGAL_assertion(++f==e); + return operator()(x,y); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Orientation_of_points_tag,(CartesianDKernelFunctors::Orientation_of_points),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Orientation_of_vectors : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation_of_vectors) + typedef R_ R; + typedef typename Get_type::type Vector; + typedef typename Get_type::type result_type; + typedef typename R::LA::Square_matrix Matrix; + + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type vd(this->kernel()); + // FIXME: Uh? Using it on a vector ?! + Vector const& v0=*f; + int d=vd(v0); + Matrix m(d,d); + for(int j=0;j=3)>::type> + result_type operator()(U&&...u) const { + return operator()({std::forward(u)...}); + } + + template + result_type operator()(std::initializer_list l) const { + return operator()(l.begin(),l.end()); + } +#else + //TODO +#endif +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Orientation_of_vectors_tag,(CartesianDKernelFunctors::Orientation_of_vectors),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Linear_rank : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Linear_rank) + typedef R_ R; + typedef typename Get_type::type Vector; + // Computing a sensible Uncertain is not worth it + typedef int result_type; + typedef typename R::LA::Dynamic_matrix Matrix; + + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type vd(this->kernel()); + std::ptrdiff_t n=std::distance(f,e); + if (n==0) return 0; + Vector const& v0 = *f; + // FIXME: Uh? Using it on a vector ?! + int d=vd(v0); + Matrix m(d,n); + for(int j=0;j),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Linearly_independent : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Linearly_independent) + typedef R_ R; + typedef typename Get_type::type result_type; + + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type vd(this->kernel()); + std::ptrdiff_t n=std::distance(f,e); + // FIXME: Uh? Using it on a vector ?! + int d=vd(*f); + if (n>d) return false; + typename Get_functor::type lr(this->kernel()); + return lr(f,e) == n; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Linearly_independent_tag,(CartesianDKernelFunctors::Linearly_independent),(Vector_tag),(Point_dimension_tag,Linear_rank_tag)); + +namespace CartesianDKernelFunctors { +template struct Contained_in_linear_hull : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Contained_in_linear_hull) + typedef R_ R; + typedef typename Get_type::type Vector; + // Computing a sensible Uncertain is not worth it + typedef bool result_type; + typedef typename R::LA::Dynamic_matrix Matrix; + + template + result_type operator()(Iter f, Iter e,V const&w)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type vd(this->kernel()); + std::ptrdiff_t n=std::distance(f,e); + if (n==0) return false; + // FIXME: Uh? Using it on a vector ?! + int d=vd(w); + Matrix m(d,n+1); + for(int i=0; f!=e; ++f,++i){ + Vector const& v = *f; + for(int j=0;j),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Affine_rank : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Affine_rank) + typedef R_ R; + typedef typename Get_type::type Point; + // Computing a sensible Uncertain is not worth it + typedef int result_type; + typedef typename R::LA::Dynamic_matrix Matrix; + + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type pd(this->kernel()); + int n=(int)std::distance(f,e); + if (--n<=0) return n; + Point const& p0 = *f; + int d=pd(p0); + Matrix m(d,n); + for(int i=0; ++f!=e; ++i){ + Point const& p = *f; + for(int j=0;j),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Affinely_independent : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Affinely_independent) + typedef R_ R; + typedef typename Get_type::type result_type; + + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type pd(this->kernel()); + std::ptrdiff_t n=std::distance(f,e); + int d=pd(*f); + if (--n>d) return false; + typename Get_functor::type ar(this->kernel()); + return ar(f,e) == n; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Affinely_independent_tag,(CartesianDKernelFunctors::Affinely_independent),(Point_tag),(Point_dimension_tag,Affine_rank_tag)); + +namespace CartesianDKernelFunctors { +template struct Contained_in_simplex : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Contained_in_simplex) + typedef R_ R; + typedef typename Get_type::type Point; + // Computing a sensible Uncertain<*> is not worth it + // typedef typename Get_type::type result_type; + typedef bool result_type; + typedef typename Increment_dimension::type D1; + typedef typename Increment_dimension::type D2; + typedef typename R::LA::template Rebind_dimension::Other LA; + typedef typename LA::Dynamic_matrix Matrix; + typedef typename LA::Dynamic_vector DynVec; + typedef typename LA::Vector Vec; + + template + result_type operator()(Iter f, Iter e, P const&q)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type pd(this->kernel()); + std::ptrdiff_t n=std::distance(f,e); + if (n==0) return false; + int d=pd(q); + Matrix m(d+1,n); + DynVec a(n); + // FIXME: Should use the proper vector constructor (Iterator_and_last) + Vec b(d+1); + for(int j=0;j),(Point_tag),(Point_dimension_tag,Compute_point_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { + namespace internal { + template + struct Matrix_col_access { + typedef Ref_ result_type; + int col; + Matrix_col_access(int r):col(r){} + template Ref_ operator()(Mat const& m, std::ptrdiff_t row)const{ + return m(row,col); + } + }; + } +template struct Linear_base : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Linear_base) + typedef R_ R; + typedef typename Get_type::type Vector; + typedef typename Get_type::type FT; + typedef void result_type; + typedef typename R::LA::Dynamic_matrix Matrix; + + template + result_type operator()(Iter f, Iter e, Oter&o)const{ + typename Get_functor::type c(this->kernel()); + typename Get_functor::type vd(this->kernel()); + typename Get_functor >::type cv(this->kernel()); + std::ptrdiff_t n=std::distance(f,e); + if (n==0) return; + Vector const& v0 = *f; + // FIXME: Uh? Using it on a vector ?! + int d=vd(v0); + Matrix m(d,n); + for(int j=0;j()(0,0)) +#else + FT +#endif + Ref; + typedef Iterator_from_indices > IFI; + *o++ = cv(IFI(b,0,i),IFI(b,d,i)); + } + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Linear_base_tag,(CartesianDKernelFunctors::Linear_base),(Vector_tag),(Point_dimension_tag,Compute_vector_cartesian_coordinate_tag)); + +#if 0 +namespace CartesianDKernelFunctors { +template::value> struct Orientation : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation) + typedef R_ R; + typedef typename Get_type::type Vector; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type OP; + typedef typename Get_functor::type OV; + + //FIXME!!! + //when Point and Vector are distinct types, the dispatch should be made + //in a way that doesn't instantiate a conversion from Point to Vector + template + result_type operator()(Iter const&f, Iter const& e)const{ + typename Get_functor::type pd(this->kernel()); + typename std::iterator_traits::difference_type d=std::distance(f,e); + int dim=pd(*f); // BAD + if(d==dim) return OV(this->kernel())(f,e); + CGAL_assertion(d==dim+1); + return OP(this->kernel())(f,e); + } + //TODO: version that takes objects directly instead of iterators +}; + +template struct Orientation : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Orientation) + typedef R_ R; + typedef typename Get_type::type Vector; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type OP; + typedef typename Get_functor::type OV; + typedef typename R::LA::Square_matrix Matrix; + + //FIXME!!! + //when Point and Vector are distinct types, the dispatch should be made + //in a way that doesn't instantiate a conversion from Point to Vector + template + typename boost::enable_if,result_type>::type + operator()(Iter const&f, Iter const& e)const{ + return OP(this->kernel())(f,e); + } + template + typename boost::enable_if,result_type>::type + operator()(Iter const&f, Iter const& e)const{ + return OV(this->kernel())(f,e); + } + //TODO: version that takes objects directly instead of iterators +}; +} +#endif + +namespace CartesianDKernelFunctors { +template struct Power_side_of_power_sphere_raw : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Power_side_of_power_sphere_raw) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type FT; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + typedef typename Increment_dimension::type D1; + typedef typename Increment_dimension::type D2; + typedef typename R::LA::template Rebind_dimension::Other LA; + typedef typename LA::Square_matrix Matrix; + + template + result_type operator()(IterP f, IterP const& e, IterW fw, Pt const& p0, Wt const& w0) const { + typedef typename Get_functor::type Sqdo; + typename Get_functor::type c(this->kernel()); + typename Get_functor::type pd(this->kernel()); + + int d=pd(p0); + Matrix m(d+1,d+1); + if(CGAL::Is_stored::value) { + Sqdo sqdo(this->kernel()); + FT const& h0 = sqdo(p0) - w0; + for(int i=0;f!=e;++f,++fw,++i) { + Point const& p=*f; + for(int j=0;j),(Point_tag),(Point_dimension_tag,Squared_distance_to_origin_tag,Compute_point_cartesian_coordinate_tag)); + +// TODO: make Side_of_oriented_sphere call Power_side_of_power_sphere_raw +namespace CartesianDKernelFunctors { +template struct Side_of_oriented_sphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Side_of_oriented_sphere) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + typedef typename Increment_dimension::type D1; + typedef typename Increment_dimension::type D2; + typedef typename R::LA::template Rebind_dimension::Other LA; + typedef typename LA::Square_matrix Matrix; + + template + result_type operator()(Iter f, Iter const& e)const{ + Point const& p0=*f++; // *--e ? + return this->operator()(f,e,p0); + } + + template + result_type operator()(Iter f, Iter const& e, Point const& p0) const { + typedef typename Get_functor::type Sqdo; + typename Get_functor::type c(this->kernel()); + typename Get_functor::type pd(this->kernel()); + + int d=pd(p0); + Matrix m(d+1,d+1); + if(CGAL::Is_stored::value) { + Sqdo sqdo(this->kernel()); + for(int i=0;f!=e;++f,++i) { + Point const& p=*f; + for(int j=0;j=4)>::type> + result_type operator()(U&&...u) const { + return operator()({std::forward(u)...}); + } + + template + result_type operator()(std::initializer_list

l) const { + return operator()(l.begin(),l.end()); + } +#else + //TODO +#endif +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Side_of_oriented_sphere_tag,(CartesianDKernelFunctors::Side_of_oriented_sphere),(Point_tag),(Point_dimension_tag,Squared_distance_to_origin_tag,Compute_point_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Construct_circumcenter : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Construct_circumcenter) + typedef typename Get_type::type Point; + typedef Point result_type; + typedef typename Get_type::type FT; + template + result_type operator()(Iter f, Iter e)const{ + typedef typename Get_type::type Point; + typedef typename R_::LA LA; + typename Get_functor::type c(this->kernel()); + typename Get_functor >::type cp(this->kernel()); + typename Get_functor::type pd(this->kernel()); + typename Get_functor::type sdo(this->kernel()); + + Point const& p0=*f; + int d = pd(p0); + if (d+1 == std::distance(f,e)) + { + // 2*(x-y).c == x^2-y^2 + typedef typename LA::Square_matrix Matrix; + typedef typename LA::Vector Vec; + typedef typename LA::Construct_vector CVec; + FT const& n0 = sdo(p0); + Matrix m(d,d); + Vec b = typename CVec::Dimension()(d); + // Write the point coordinates in lines. + int i; + for(i=0; ++f!=e; ++i) { + Point const& p=*f; + for(int j=0;j::Other LAd; + typedef typename LAd::Square_matrix Matrix; + typedef typename LAd::Vector Vec; + typename Get_functor::type sp(this->kernel()); + int k=static_cast(std::distance(f,e)); + Matrix m(k,k); + Vec b(k); + Vec l(k); + int j,i=0; + for(Iter f2=f;f2!=e;++f2,++i){ + b(i)=m(i,i)=sdo(*f2); + j=0; + for(Iter f3=f;f3!=e;++f3,++j){ + m(j,i)=m(i,j)=sp(*f2,*f3); + } + } + for(i=1;i),(Point_tag),(Construct_ttag,Compute_point_cartesian_coordinate_tag,Scalar_product_tag,Squared_distance_to_origin_tag,Point_dimension_tag)); + +namespace CartesianDKernelFunctors { +template struct Squared_circumradius : Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Squared_circumradius) + typedef typename Get_type::type result_type; + template + result_type operator()(Iter f, Iter e)const{ + typename Get_functor::type cc(this->kernel()); + typename Get_functor::type sd(this->kernel()); + return sd(cc(f, e), *f); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Squared_circumradius_tag,(CartesianDKernelFunctors::Squared_circumradius),(Point_tag),(Construct_circumcenter_tag,Squared_distance_tag)); + +namespace CartesianDKernelFunctors { +// TODO: implement it directly, it should be at least as fast as Side_of_oriented_sphere. +template struct Side_of_bounded_sphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Side_of_bounded_sphere) + typedef R_ R; + typedef typename Get_type::type Point; + typedef typename Get_type::type result_type; + + template + result_type operator()(Iter f, Iter const& e) const { + Point const& p0 = *f++; // *--e ? + typename Get_functor::type pd(this->kernel()); + //FIXME: Doesn't work for non-full dimension. + CGAL_assertion (std::distance(f,e) == pd(p0)+1); + return operator() (f, e, p0); + } + + template + result_type operator()(Iter const& f, Iter const& e, Point const& p0) const { + typename Get_functor::type sos (this->kernel()); + typename Get_functor::type op (this->kernel()); + // enum_cast is not very generic, but since this function isn't supposed to remain like this... + return enum_cast (sos (f, e, p0) * op (f, e)); + } + +#ifdef CGAL_CXX11 + template =4)>::type> + result_type operator()(U&&...u) const { + return operator()({std::forward(u)...}); + } + + template + result_type operator()(std::initializer_list

l) const { + return operator()(l.begin(),l.end()); + } +#else + //TODO +#endif +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Side_of_bounded_sphere_tag,(CartesianDKernelFunctors::Side_of_bounded_sphere),(Point_tag),(Side_of_oriented_sphere_tag,Orientation_of_points_tag)); + +namespace CartesianDKernelFunctors { +template struct Side_of_bounded_circumsphere : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Side_of_bounded_circumsphere) + typedef typename Get_type::type result_type; + + template + result_type operator()(Iter f, Iter const& e, P const& p0) const { + // TODO: Special case when the dimension is full. + typename Get_functor::type cc(this->kernel()); + typename Get_functor::type cd(this->kernel()); + + return enum_cast(cd(cc(f, e), *f, p0)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Side_of_bounded_circumsphere_tag,(CartesianDKernelFunctors::Side_of_bounded_circumsphere),(Point_tag),(Squared_distance_tag,Construct_circumcenter_tag)); + +namespace CartesianDKernelFunctors { +template struct Point_to_vector : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Point_to_vector) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type CV; + typedef typename Get_functor >::type CI; + typedef Vector result_type; + typedef Point argument_type; + result_type operator()(argument_type const&v)const{ + CI ci(this->kernel()); + return CV(this->kernel())(ci(v,Begin_tag()),ci(v,End_tag())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Point_to_vector_tag,(CartesianDKernelFunctors::Point_to_vector),(Point_tag,Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Vector_to_point : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Vector_to_point) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type CP; + typedef typename Get_functor >::type CI; + typedef Point result_type; + typedef Vector argument_type; + result_type operator()(argument_type const&v)const{ + CI ci(this->kernel()); + return CP(this->kernel())(ci(v,Begin_tag()),ci(v,End_tag())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Vector_to_point_tag,(CartesianDKernelFunctors::Vector_to_point),(Point_tag,Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Opposite_vector : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Opposite_vector) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CV; + typedef typename Get_functor >::type CI; + typedef Vector result_type; + typedef Vector argument_type; + result_type operator()(Vector const&v)const{ + CI ci(this->kernel()); + return CV(this->kernel())(make_transforming_iterator(ci(v,Begin_tag()),std::negate()),make_transforming_iterator(ci(v,End_tag()),std::negate())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Opposite_vector_tag,(CartesianDKernelFunctors::Opposite_vector),(Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Scaled_vector : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Scaled_vector) + typedef R_ R; + typedef typename Get_type::type FT; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CV; + typedef typename Get_functor >::type CI; + typedef Vector result_type; + typedef Vector first_argument_type; + typedef FT second_argument_type; + result_type operator()(Vector const&v,FT const& s)const{ + CI ci(this->kernel()); + return CV(this->kernel())(make_transforming_iterator(ci(v,Begin_tag()),Scale(s)),make_transforming_iterator(ci(v,End_tag()),Scale(s))); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Scaled_vector_tag,(CartesianDKernelFunctors::Scaled_vector),(Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Sum_of_vectors : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Sum_of_vectors) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CV; + typedef typename Get_functor >::type CI; + typedef Vector result_type; + typedef Vector first_argument_type; + typedef Vector second_argument_type; + result_type operator()(Vector const&a, Vector const&b)const{ + CI ci(this->kernel()); + return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::plus()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::plus())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Sum_of_vectors_tag,(CartesianDKernelFunctors::Sum_of_vectors),(Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Difference_of_vectors : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Difference_of_vectors) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CV; + typedef typename Get_functor >::type CI; + typedef Vector result_type; + typedef Vector first_argument_type; + typedef Vector second_argument_type; + result_type operator()(Vector const&a, Vector const&b)const{ + CI ci(this->kernel()); + return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::minus()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::minus())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Difference_of_vectors_tag,(CartesianDKernelFunctors::Difference_of_vectors),(Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Translated_point : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Translated_point) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type CP; + typedef typename Get_functor >::type CVI; + typedef typename Get_functor >::type CPI; + typedef Point result_type; + typedef Point first_argument_type; + typedef Vector second_argument_type; + result_type operator()(Point const&a, Vector const&b)const{ + CVI cvi(this->kernel()); + CPI cpi(this->kernel()); + return CP(this->kernel())(make_transforming_pair_iterator(cpi(a,Begin_tag()),cvi(b,Begin_tag()),std::plus()),make_transforming_pair_iterator(cpi(a,End_tag()),cvi(b,End_tag()),std::plus())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Translated_point_tag,(CartesianDKernelFunctors::Translated_point),(Point_tag, Vector_tag),(Construct_ttag, Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Difference_of_points : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Difference_of_points) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CV; + typedef typename Get_functor >::type CI; + typedef Vector result_type; + typedef Point first_argument_type; + typedef Point second_argument_type; + result_type operator()(Point const&a, Point const&b)const{ + CI ci(this->kernel()); + return CV(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),std::minus()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),std::minus())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Difference_of_points_tag,(CartesianDKernelFunctors::Difference_of_points),(Point_tag, Vector_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Midpoint : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Midpoint) + typedef R_ R; + typedef typename Get_type::type FT; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type CP; + typedef typename Get_functor >::type CI; + typedef Point result_type; + typedef Point first_argument_type; + typedef Point second_argument_type; + // There is a division, but it will be cast to RT afterwards anyway, so maybe we could use RT. + struct Average : std::binary_function { + FT operator()(FT const&a, RT const&b)const{ + return (a+b)/2; + } + }; + result_type operator()(Point const&a, Point const&b)const{ + CI ci(this->kernel()); + //Divide half(2); + //return CP(this->kernel())(make_transforming_iterator(make_transforming_pair_iterator(ci.begin(a),ci.begin(b),std::plus()),half),make_transforming_iterator(make_transforming_pair_iterator(ci.end(a),ci.end(b),std::plus()),half)); + return CP(this->kernel())(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),Average()),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),Average())); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Midpoint_tag,(CartesianDKernelFunctors::Midpoint),(Point_tag),(Construct_ttag, Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Squared_length : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Squared_length) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CI; + typedef RT result_type; + typedef Vector argument_type; + result_type operator()(Vector const&a)const{ + CI ci(this->kernel()); + typename Algebraic_structure_traits::Square f; + // TODO: avoid this RT(0)+... + return std::accumulate(make_transforming_iterator(ci(a,Begin_tag()),f),make_transforming_iterator(ci(a,End_tag()),f),RT(0)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Squared_length_tag,(CartesianDKernelFunctors::Squared_length),(Vector_tag),(Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Squared_distance_to_origin : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Squared_distance_to_origin) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type CI; + typedef RT result_type; + typedef Point argument_type; + result_type operator()(Point const&a)const{ + CI ci(this->kernel()); + typename Algebraic_structure_traits::Square f; + // TODO: avoid this RT(0)+... + return std::accumulate(make_transforming_iterator(ci(a,Begin_tag()),f),make_transforming_iterator(ci(a,End_tag()),f),RT(0)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Squared_distance_to_origin_tag,(CartesianDKernelFunctors::Squared_distance_to_origin),(Point_tag),(Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Squared_distance : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Squared_distance) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Point; + typedef typename Get_functor >::type CI; + typedef RT result_type; + typedef Point first_argument_type; + typedef Point second_argument_type; + struct Sq_diff : std::binary_function { + RT operator()(RT const&a, RT const&b)const{ + return CGAL::square(a-b); + } + }; + result_type operator()(Point const&a, Point const&b)const{ + CI ci(this->kernel()); + Sq_diff f; + // TODO: avoid this RT(0)+... + return std::accumulate(make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),f),make_transforming_pair_iterator(ci(a,End_tag()),ci(b,End_tag()),f),RT(0)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Squared_distance_tag,(CartesianDKernelFunctors::Squared_distance),(Point_tag),(Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Scalar_product : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Scalar_product) + typedef R_ R; + typedef typename Get_type::type RT; + typedef typename Get_type::type Vector; + typedef typename Get_functor >::type CI; + typedef RT result_type; + typedef Vector first_argument_type; + typedef Vector second_argument_type; + result_type operator()(Vector const&a, Vector const&b)const{ + CI ci(this->kernel()); + std::multiplies f; + // TODO: avoid this RT(0)+... + return std::accumulate( + make_transforming_pair_iterator(ci(a,Begin_tag()),ci(b,Begin_tag()),f), + make_transforming_pair_iterator(ci(a, End_tag()),ci(b, End_tag()),f), + RT(0)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Scalar_product_tag,(CartesianDKernelFunctors::Scalar_product),(Vector_tag),(Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Compare_distance : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Compare_distance) + typedef R_ R; + typedef typename Get_type::type Point; + typedef typename Get_functor::type CSD; + typedef typename Get_type::type result_type; + typedef Point first_argument_type; + typedef Point second_argument_type; + typedef Point third_argument_type; // why am I doing this already? + typedef Point fourth_argument_type; + result_type operator()(Point const&a, Point const&b, Point const&c)const{ + CSD csd(this->kernel()); + return CGAL_NTS compare(csd(a,b),csd(a,c)); + } + result_type operator()(Point const&a, Point const&b, Point const&c, Point const&d)const{ + CSD csd(this->kernel()); + return CGAL_NTS compare(csd(a,b),csd(c,d)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Compare_distance_tag,(CartesianDKernelFunctors::Compare_distance),(Point_tag),(Squared_distance_tag)); + +namespace CartesianDKernelFunctors { +template struct Less_point_cartesian_coordinate : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Less_point_cartesian_coordinate) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type Cc; + // TODO: This is_exact thing should be reengineered. + // the goal is to have a way to tell: don't filter this + typedef typename CGAL::Is_exact Is_exact; + + template + result_type operator()(V const&a, W const&b, I i)const{ + Cc c(this->kernel()); + return c(a,i)),(),(Compute_point_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Compare_point_cartesian_coordinate : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Compare_point_cartesian_coordinate) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type Cc; + // TODO: This is_exact thing should be reengineered. + // the goal is to have a way to tell: don't filter this + typedef typename CGAL::Is_exact Is_exact; + + template + result_type operator()(V const&a, W const&b, I i)const{ + Cc c(this->kernel()); + return CGAL_NTS compare(c(a,i),c(b,i)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Compare_point_cartesian_coordinate_tag,(CartesianDKernelFunctors::Compare_point_cartesian_coordinate),(),(Compute_point_cartesian_coordinate_tag)); + +namespace CartesianDKernelFunctors { +template struct Compare_lexicographically : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Compare_lexicographically) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor >::type CI; + // TODO: This is_exact thing should be reengineered. + // the goal is to have a way to tell: don't filter this + typedef typename CGAL::Is_exact Is_exact; + + template + result_type operator()(V const&a, W const&b)const{ + CI c(this->kernel()); +#ifdef CGAL_CXX11 + auto +#else + typename CI::result_type +#endif + a_begin=c(a,Begin_tag()), + b_begin=c(b,Begin_tag()), + a_end=c(a,End_tag()); + result_type res; + // can't we do slightly better for Uncertain<*> ? + // after res=...; if(is_uncertain(res))return indeterminate(); + do res=CGAL_NTS compare(*a_begin++,*b_begin++); + while(a_begin!=a_end && res==EQUAL); + return res; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Compare_lexicographically_tag,(CartesianDKernelFunctors::Compare_lexicographically),(),(Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Less_lexicographically : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Less_lexicographically) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type CL; + typedef typename CGAL::Is_exact Is_exact; + + template + result_type operator() (V const&a, W const&b) const { + CL c (this->kernel()); + return c(a,b) < 0; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Less_lexicographically_tag,(CartesianDKernelFunctors::Less_lexicographically),(),(Compare_lexicographically_tag)); + +namespace CartesianDKernelFunctors { +template struct Less_or_equal_lexicographically : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Less_or_equal_lexicographically) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type CL; + typedef typename CGAL::Is_exact Is_exact; + + template + result_type operator() (V const&a, W const&b) const { + CL c (this->kernel()); + return c(a,b) <= 0; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Less_or_equal_lexicographically_tag,(CartesianDKernelFunctors::Less_or_equal_lexicographically),(),(Compare_lexicographically_tag)); + +namespace CartesianDKernelFunctors { +template struct Equal_points : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Equal_points) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor >::type CI; + // TODO: This is_exact thing should be reengineered. + // the goal is to have a way to tell: don't filter this + typedef typename CGAL::Is_exact Is_exact; + + template + result_type operator()(V const&a, W const&b)const{ + CI c(this->kernel()); +#ifdef CGAL_CXX11 + auto +#else + typename CI::result_type +#endif + a_begin=c(a,Begin_tag()), + b_begin=c(b,Begin_tag()), + a_end=c(a,End_tag()); + result_type res = true; + // Is using CGAL::possibly for Uncertain really an optimization? + do res = res & (*a_begin++ == *b_begin++); + while(a_begin!=a_end && possibly(res)); + return res; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Equal_points_tag,(CartesianDKernelFunctors::Equal_points),(),(Construct_ttag)); + +namespace CartesianDKernelFunctors { +template struct Oriented_side : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Oriented_side) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_type::type Point; + typedef typename Get_type::type Hyperplane; + typedef typename Get_type::type Sphere; + typedef typename Get_functor::type VA; + typedef typename Get_functor::type HT; + typedef typename Get_functor::type SD; + typedef typename Get_functor::type SR; + typedef typename Get_functor::type CS; + + result_type operator()(Hyperplane const&h, Point const&p)const{ + HT ht(this->kernel()); + VA va(this->kernel()); + return CGAL::compare(va(h,p),ht(h)); + } + result_type operator()(Sphere const&s, Point const&p)const{ + SD sd(this->kernel()); + SR sr(this->kernel()); + CS cs(this->kernel()); + return CGAL::compare(sd(cs(s),p),sr(s)); + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Oriented_side_tag,(CartesianDKernelFunctors::Oriented_side),(Point_tag,Sphere_tag,Hyperplane_tag),(Value_at_tag,Hyperplane_translation_tag,Squared_distance_tag,Squared_radius_tag,Center_of_sphere_tag)); + +namespace CartesianDKernelFunctors { +template struct Has_on_positive_side : private Store_kernel { + CGAL_FUNCTOR_INIT_STORE(Has_on_positive_side) + typedef R_ R; + typedef typename Get_type::type result_type; + typedef typename Get_functor::type OS; + + template + result_type operator()(Obj const&o, Pt const&p)const{ + OS os(this->kernel()); + return os(o,p) == ON_POSITIVE_SIDE; + } +}; +} + +CGAL_KD_DEFAULT_FUNCTOR(Has_on_positive_side_tag,(CartesianDKernelFunctors::Has_on_positive_side),(),(Oriented_side_tag)); + +} +#include +#endif // CGAL_KERNEL_D_FUNCTION_OBJECTS_CARTESIAN_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h new file mode 100644 index 00000000..c25c4e2b --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_properties.h @@ -0,0 +1,40 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_EXACTNESS_H +#define CGAL_EXACTNESS_H +#include +#include +namespace CGAL { + +#define CGAL_STRAWBERRY(Is_pretty) \ + namespace internal { \ + BOOST_MPL_HAS_XXX_TRAIT_DEF(Is_pretty) \ + } \ + template::value> \ + struct Is_pretty : boost::false_type {}; \ + template \ + struct Is_pretty : T::Is_pretty {} + +CGAL_STRAWBERRY(Is_exact); +CGAL_STRAWBERRY(Is_fast); +CGAL_STRAWBERRY(Is_stored); +#undef CGAL_STRAWBERRY +} +#endif // CGAL_EXACTNESS_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h new file mode 100644 index 00000000..b8e17886 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/functor_tags.h @@ -0,0 +1,363 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_FUNCTOR_TAGS_H +#define CGAL_FUNCTOR_TAGS_H +#include // for Null_tag +#include +#ifdef CGAL_CXX11 +#include +#include +#endif +#include +#include +#include +#include +#include +#include +#include +#include +namespace CGAL { + + // Find a better place for this later + + template struct Get_type + : K::template Type {}; + template struct Get_functor + : K::template Functor {}; +#ifdef CGAL_CXX11 + template using Type = typename Get_type::type; + template using Functor = typename Get_functor::type; +#endif + + class Null_type {~Null_type();}; // no such object should be created + + // To construct iterators + struct Begin_tag {}; + struct End_tag {}; + + // Functor category + struct Predicate_tag {}; + struct Construct_tag {}; + struct Construct_iterator_tag {}; + struct Compute_tag {}; + struct Misc_tag {}; + + struct No_filter_tag {}; + + templatestruct Construct_ttag {}; + templatestruct Convert_ttag {}; + + template struct Get_functor_category { typedef Misc_tag type; }; + template struct Typedef_tag_type; + //template struct Read_tag_type {}; + + template + struct Provides_type + : Has_type_different_from, Null_type> {}; + + template + struct Provides_functor + : Has_type_different_from, Null_functor> {}; + + template::type::value> + struct Provides_functors : boost::mpl::and_ < + Provides_functor::type>, + Provides_functors::type> > {}; + template + struct Provides_functors : boost::true_type {}; + + template::type::value> + struct Provides_types : boost::mpl::and_ < + Provides_type::type>, + Provides_types::type> > {}; + template + struct Provides_types : boost::true_type {}; + + namespace internal { BOOST_MPL_HAS_XXX_TRAIT_NAMED_DEF(has_Type,template Type,false) } + template::value /* false */> + struct Provides_type_i : boost::false_type {}; + template + struct Provides_type_i + : Has_type_different_from, Null_type> {}; + + //// This version does not like Functor + //namespace internal { BOOST_MPL_HAS_XXX_TEMPLATE_NAMED_DEF(has_Functor,Functor,false) } + // This version lets us use non-type template parameters, but fails with older EDG-based compilers (Intel 14). + namespace internal { BOOST_MPL_HAS_XXX_TRAIT_NAMED_DEF(has_Functor,template Functor,false) } + + template::value /* false */> + struct Provides_functor_i : boost::false_type {}; + template + struct Provides_functor_i + : Has_type_different_from, Null_functor> {}; + + // TODO: Refine this a bit. + template ::value, + //bool=Provides_functor_i::value, + bool = internal::has_Functor::value + > + struct Inherit_functor : K::template Functor {}; + template + struct Inherit_functor {}; + + template ::value> + struct Inherit_type : K::template Type {}; + template + struct Inherit_type {}; + + struct Number_tag {}; + struct Discrete_tag {}; + struct Object_tag {}; + template struct Get_type_category { + // The lazy kernel uses it too eagerly, + // so it currently needs a default. + typedef Null_tag type; + }; + +#define CGAL_DECL_OBJ_(X,Y) \ + template \ + struct Typedef_tag_type : Base { typedef Obj X; }; \ + template \ + struct Get_type_category { typedef Y##_tag type; } +#define CGAL_DECL_OBJ(X,Y) struct X##_tag {}; \ + CGAL_DECL_OBJ_(X,Y) + + //namespace has_object { BOOST_MPL_HAS_XXX_TRAIT_DEF(X) } + //template + //struct Provides_tag_type : has_object::has_##X {}; + //template + //struct Read_tag_type { typedef typename Kernel::X type; } + + // Not exactly objects, but the extras can't hurt. + CGAL_DECL_OBJ(FT, Number); + CGAL_DECL_OBJ(RT, Number); + + CGAL_DECL_OBJ(Bool, Discrete); // Boolean_tag is already taken, and is a template :-( + CGAL_DECL_OBJ(Comparison_result, Discrete); + CGAL_DECL_OBJ(Sign, Discrete); + CGAL_DECL_OBJ(Orientation, Discrete); // Note: duplicate with the functor tag! + CGAL_DECL_OBJ(Oriented_side, Discrete); + CGAL_DECL_OBJ(Bounded_side, Discrete); + CGAL_DECL_OBJ(Angle, Discrete); + CGAL_DECL_OBJ(Flat_orientation, Discrete); + + CGAL_DECL_OBJ(Vector, Object); + CGAL_DECL_OBJ(Point, Object); + CGAL_DECL_OBJ(Segment, Object); + CGAL_DECL_OBJ(Sphere, Object); + CGAL_DECL_OBJ(Line, Object); + CGAL_DECL_OBJ(Direction, Object); + CGAL_DECL_OBJ(Hyperplane, Object); + CGAL_DECL_OBJ(Ray, Object); + CGAL_DECL_OBJ(Iso_box, Object); + CGAL_DECL_OBJ(Bbox, Object); + CGAL_DECL_OBJ(Aff_transformation, Object); + CGAL_DECL_OBJ(Weighted_point, Object); +#undef CGAL_DECL_OBJ_ +#undef CGAL_DECL_OBJ + +// Intel fails with those, and they are not so useful. +// CGAL_KD_DEFAULT_TYPE(RT_tag,(typename Get_type::type),(),()); +// CGAL_KD_DEFAULT_TYPE(FT_tag,(CGAL::Quotient::type>),(),()); + +#define CGAL_SMURF2(A,B) CGAL_KD_DEFAULT_TYPE(A##_tag,(typename Same_uncertainty_nt::type>::type),(RT_tag),()) +#define CGAL_SMURF1(A) CGAL_SMURF2(A,CGAL::A) + CGAL_SMURF2(Bool, bool); + CGAL_SMURF1(Sign); + CGAL_SMURF1(Comparison_result); + CGAL_SMURF1(Orientation); + CGAL_SMURF1(Oriented_side); + CGAL_SMURF1(Bounded_side); + CGAL_SMURF1(Angle); +#undef CGAL_SMURF1 +#undef CGAL_SMURF2 + + // TODO: replace with Get_type_category + template struct is_NT_tag { enum { value = false }; }; + template<> struct is_NT_tag { enum { value = true }; }; + template<> struct is_NT_tag { enum { value = true }; }; + + template struct iterator_tag_traits { + enum { is_iterator = false, has_nth_element = false }; + typedef Null_tag value_tag; + }; + +#define CGAL_DECL_COMPUTE(X) struct X##_tag {}; \ + templatestruct Get_functor_category{typedef Compute_tag type;} + CGAL_DECL_COMPUTE(Compute_point_cartesian_coordinate); + CGAL_DECL_COMPUTE(Compute_vector_cartesian_coordinate); + CGAL_DECL_COMPUTE(Compute_homogeneous_coordinate); + CGAL_DECL_COMPUTE(Squared_distance); + CGAL_DECL_COMPUTE(Squared_distance_to_origin); + CGAL_DECL_COMPUTE(Squared_length); + CGAL_DECL_COMPUTE(Squared_radius); + CGAL_DECL_COMPUTE(Squared_circumradius); + CGAL_DECL_COMPUTE(Scalar_product); + CGAL_DECL_COMPUTE(Hyperplane_translation); + CGAL_DECL_COMPUTE(Value_at); + CGAL_DECL_COMPUTE(Point_weight); + CGAL_DECL_COMPUTE(Power_distance); + CGAL_DECL_COMPUTE(Power_distance_to_point); +#undef CGAL_DECL_COMPUTE + +#define CGAL_DECL_ITER_OBJ(X,Y,Z,C) struct X##_tag {}; \ + template<>struct iterator_tag_traits { \ + enum { is_iterator = true, has_nth_element = true }; \ + typedef Y##_tag value_tag; \ + typedef Z##_tag nth_element; \ + typedef C##_tag container; \ + }; \ + template \ + struct Typedef_tag_type : Base { typedef Obj X; } + + //namespace has_object { BOOST_MPL_HAS_XXX_TRAIT_DEF(X) } + //template + //struct Provides_tag_type : has_object::has_##X {}; + //template + //struct Read_tag_type { typedef typename Kernel::X type; } + + CGAL_DECL_ITER_OBJ(Vector_cartesian_const_iterator, FT, Compute_vector_cartesian_coordinate, Vector); + CGAL_DECL_ITER_OBJ(Point_cartesian_const_iterator, FT, Compute_point_cartesian_coordinate, Point); +#undef CGAL_DECL_ITER_OBJ + + templatestruct map_result_tag{typedef Null_type type;}; + templatestruct map_result_tag >{typedef T type;}; + + templatestruct Get_functor_category,B,C> : + boost::mpl::if_c::is_iterator, + Construct_iterator_tag, + Construct_tag> {}; + + // Really? + templatestruct Get_functor_category,B,C>{typedef Misc_tag type;}; + +#define CGAL_DECL_CONSTRUCT(X,Y) struct X##_tag {}; \ + template<>struct map_result_tag{typedef Y##_tag type;}; \ + templatestruct Get_functor_category{typedef Construct_tag type;} + CGAL_DECL_CONSTRUCT(Midpoint,Point); + CGAL_DECL_CONSTRUCT(Center_of_sphere,Point); + CGAL_DECL_CONSTRUCT(Point_of_sphere,Point); + CGAL_DECL_CONSTRUCT(Segment_extremity,Point); + CGAL_DECL_CONSTRUCT(Sum_of_vectors,Vector); + CGAL_DECL_CONSTRUCT(Difference_of_vectors,Vector); + CGAL_DECL_CONSTRUCT(Opposite_vector,Vector); + CGAL_DECL_CONSTRUCT(Scaled_vector,Vector); + CGAL_DECL_CONSTRUCT(Orthogonal_vector,Vector); + CGAL_DECL_CONSTRUCT(Difference_of_points,Vector); + CGAL_DECL_CONSTRUCT(Translated_point,Point); + CGAL_DECL_CONSTRUCT(Point_to_vector,Vector); + CGAL_DECL_CONSTRUCT(Vector_to_point,Point); + CGAL_DECL_CONSTRUCT(Construct_min_vertex,Point); + CGAL_DECL_CONSTRUCT(Construct_max_vertex,Point); + CGAL_DECL_CONSTRUCT(Construct_circumcenter,Point); + CGAL_DECL_CONSTRUCT(Point_drop_weight,Point); + CGAL_DECL_CONSTRUCT(Power_center,Weighted_point); +#undef CGAL_DECL_CONSTRUCT +#if 0 +#define CGAL_DECL_ITER_CONSTRUCT(X,Y) struct X##_tag {}; \ + template<>struct map_result_tag{typedef Y##_tag type;}; \ + template<>struct map_functor_type{typedef Construct_iterator_tag type;} + CGAL_DECL_ITER_CONSTRUCT(Construct_point_cartesian_const_iterator,Point_cartesian_const_iterator); + CGAL_DECL_ITER_CONSTRUCT(Construct_vector_cartesian_const_iterator,Vector_cartesian_const_iterator); +#undef CGAL_DECL_ITER_CONSTRUCT +#endif + + //FIXME: choose a convention: prefix with Predicate_ ? +#define CGAL_DECL_PREDICATE_(X) \ + templatestruct Get_functor_category{typedef Predicate_tag type;} +#define CGAL_DECL_PREDICATE(X) struct X##_tag {}; \ + CGAL_DECL_PREDICATE_(X) + CGAL_DECL_PREDICATE(Less_point_cartesian_coordinate); + CGAL_DECL_PREDICATE(Compare_point_cartesian_coordinate); + CGAL_DECL_PREDICATE(Compare_distance); + CGAL_DECL_PREDICATE(Compare_lexicographically); + CGAL_DECL_PREDICATE(Less_lexicographically); + CGAL_DECL_PREDICATE(Less_or_equal_lexicographically); + CGAL_DECL_PREDICATE(Equal_points); + CGAL_DECL_PREDICATE(Has_on_positive_side); + CGAL_DECL_PREDICATE_(Orientation); // duplicate with the type + CGAL_DECL_PREDICATE_(Oriented_side); // duplicate with the type + CGAL_DECL_PREDICATE(Orientation_of_points); + CGAL_DECL_PREDICATE(Orientation_of_vectors); + CGAL_DECL_PREDICATE(Side_of_oriented_sphere); + CGAL_DECL_PREDICATE(Side_of_bounded_sphere); + CGAL_DECL_PREDICATE(Side_of_bounded_circumsphere); + CGAL_DECL_PREDICATE(Contained_in_affine_hull); + CGAL_DECL_PREDICATE(In_flat_orientation); + CGAL_DECL_PREDICATE(In_flat_side_of_oriented_sphere); + CGAL_DECL_PREDICATE(Construct_flat_orientation); // Making it a predicate is a questionable choice, it should be possible to let it be a construction for some implementations. Not sure how to do that... TODO + CGAL_DECL_PREDICATE(Linear_rank); + CGAL_DECL_PREDICATE(Affine_rank); + CGAL_DECL_PREDICATE(Linearly_independent); + CGAL_DECL_PREDICATE(Affinely_independent); + CGAL_DECL_PREDICATE(Contained_in_linear_hull); + CGAL_DECL_PREDICATE(Contained_in_simplex); + CGAL_DECL_PREDICATE(Power_side_of_power_sphere_raw); + CGAL_DECL_PREDICATE(Power_side_of_power_sphere); + CGAL_DECL_PREDICATE(In_flat_power_side_of_power_sphere_raw); + CGAL_DECL_PREDICATE(In_flat_power_side_of_power_sphere); +#undef CGAL_DECL_PREDICATE + +#define CGAL_DECL_MISC(X) struct X##_tag {}; \ + templatestruct Get_functor_category{typedef Misc_tag type;} + //TODO: split into _begin and _end ? + //CGAL_DECL_MISC(Construct_point_cartesian_const_iterator); + //CGAL_DECL_MISC(Construct_vector_cartesian_const_iterator); + CGAL_DECL_MISC(Point_dimension); + CGAL_DECL_MISC(Vector_dimension); + CGAL_DECL_MISC(Linear_base); // Find a more appropriate category? +#undef CGAL_DECL_MISC + + + // Properties for LA + struct Has_extra_dimension_tag {}; + struct Has_vector_plus_minus_tag {}; + struct Has_vector_scalar_ops_tag {}; + struct Has_dot_product_tag {}; + struct Has_determinant_of_vectors_tag {}; + struct Has_determinant_of_points_tag {}; + struct Has_determinant_of_iterator_to_vectors_tag {}; + struct Has_determinant_of_iterator_to_points_tag {}; + struct Has_determinant_of_vectors_omit_last_tag {}; + struct Stores_squared_norm_tag {}; + + template struct Preserved_by_non_linear_extra_coordinate + : boost::false_type {}; + template<> struct Preserved_by_non_linear_extra_coordinate + : boost::true_type {}; + template<> struct Preserved_by_non_linear_extra_coordinate + : boost::true_type {}; + template<> struct Preserved_by_non_linear_extra_coordinate + : boost::true_type {}; + template<> struct Preserved_by_non_linear_extra_coordinate + : boost::true_type {}; + template<> struct Preserved_by_non_linear_extra_coordinate + : boost::true_type {}; + template<> struct Preserved_by_non_linear_extra_coordinate + : boost::true_type {}; + + // Kernel properties + struct Point_stores_squared_distance_to_origin_tag {}; + +} +#endif // CGAL_FUNCTOR_TAGS_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h new file mode 100644 index 00000000..21858804 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/static_int.h @@ -0,0 +1,61 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_STATIC_INT_H +#define CGAL_STATIC_INT_H +#include + +namespace CGAL { +template struct static_zero { + operator NT() const { return constant(); } +}; +template struct static_one { + operator NT() const { return constant(); } +}; + +template static_zero operator-(static_zero) { return static_zero(); } + +template NT operator+(NT const& x, static_zero) { return x; } +template NT operator+(static_zero, NT const& x) { return x; } +template static_zero operator+(static_zero, static_zero) { return static_zero(); } +template static_one operator+(static_zero, static_one) { return static_one(); } +template static_one operator+(static_one, static_zero) { return static_one(); } + +template NT operator-(NT const& x, static_zero) { return x; } +template NT operator-(static_zero, NT const& x) { return -x; } +template static_zero operator-(static_zero, static_zero) { return static_zero(); } +template static_zero operator-(static_one, static_one) { return static_zero(); } +template static_one operator-(static_one, static_zero) { return static_one(); } + +template NT operator*(NT const& x, static_one) { return x; } +template NT operator*(static_one, NT const& x) { return x; } +template static_zero operator*(NT const&, static_zero) { return static_zero(); } +template static_zero operator*(static_zero, NT const&) { return static_zero(); } +template static_zero operator*(static_zero, static_zero) { return static_zero(); } +template static_one operator*(static_one, static_one) { return static_one(); } +template static_zero operator*(static_zero, static_one) { return static_zero(); } +template static_zero operator*(static_one, static_zero) { return static_zero(); } + +template NT operator/(NT const& x, static_one) { return x; } +template static_zero operator/(static_zero, NT const&) { return static_zero(); } +template static_zero operator/(static_zero, static_one) { return static_zero(); } +template static_one operator/(static_one, static_one) { return static_one(); } + +} +#endif // CGAL_STATIC_INT_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h new file mode 100644 index 00000000..253e1282 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/store_kernel.h @@ -0,0 +1,104 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_STORE_KERNEL_H +#define CGAL_STORE_KERNEL_H + +#include +#include + +namespace CGAL { +namespace internal { +BOOST_MPL_HAS_XXX_TRAIT_DEF(Do_not_store_kernel) +template::value,bool=has_Do_not_store_kernel::value> struct Do_not_store_kernel { + enum { value=false }; + typedef Tag_false type; +}; +template struct Do_not_store_kernel { + enum { value=true }; + typedef Tag_true type; +}; +template struct Do_not_store_kernel { + typedef typename T::Do_not_store_kernel type; + enum { value=type::value }; +}; +} + +template::value> +struct Store_kernel { + Store_kernel(){} + Store_kernel(R_ const&){} + enum { kernel_is_stored = false }; + R_ kernel()const{return R_();} + typedef R_ reference_type; + void set_kernel(R_ const&){} +}; +template +struct Store_kernel { + Store_kernel():rp(0){ + CGAL_warning_msg(true,"I should know my kernel"); + } + Store_kernel(R_ const& r):rp(&r){} + enum { kernel_is_stored = true }; + R_ const& kernel()const{ + CGAL_warning_msg(rp!=0,"I should know my kernel"); + return *rp; + } + typedef R_ const& reference_type; + void set_kernel(R_ const&r){rp=&r;} + private: + R_ const* rp; +}; + +//For a second kernel. TODO: find something more elegant +template::value> +struct Store_kernel2 { + Store_kernel2(){} + Store_kernel2(R_ const&){} + enum { kernel2_is_stored = false }; + R_ kernel2()const{return R_();} + typedef R_ reference2_type; + void set_kernel2(R_ const&){} +}; +template +struct Store_kernel2 { + Store_kernel2(){ + //CGAL_warning_msg(true,"I should know my kernel"); + } + Store_kernel2(R_ const& r):rp(&r){} + enum { kernel2_is_stored = true }; + R_ const& kernel2()const{ + CGAL_warning_msg(rp==0,"I should know my kernel"); + return *rp; + } + typedef R_ const& reference2_type; + void set_kernel2(R_ const&r){rp=&r;} + private: + R_ const* rp; +}; +} +#define CGAL_BASE_INIT(X,Y) \ + X():Y(){} \ + X(R_ const&r):Y(r){} +#define CGAL_FUNCTOR_INIT_STORE(X) CGAL_BASE_INIT(X,Store_kernel) +#define CGAL_FUNCTOR_INIT_IGNORE(X) \ + X(){} \ + X(R_ const&){} + +#endif // CGAL_STORE_KERNEL_H diff --git a/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h b/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h new file mode 100644 index 00000000..238a2230 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/NewKernel_d/utils.h @@ -0,0 +1,306 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_MARCUTILS +#define CGAL_MARCUTILS + +#include + +#if defined(BOOST_MSVC) +# pragma warning(push) +# pragma warning(disable:4003) // not enough actual parameters for macro 'BOOST_PP_EXPAND_I' + // http://lists.boost.org/boost-users/2014/11/83291.php +#endif + +#ifdef CGAL_CXX11 +#include +#include +#define CGAL_FORWARDABLE(T) T&& +#define CGAL_FORWARD(T,t) std::forward(t) +#define CGAL_MOVE(t) std::move(t) +#define CGAL_CONSTEXPR constexpr +#else +#define CGAL_FORWARDABLE(T) T const& +#define CGAL_FORWARD(T,t) t +#define CGAL_MOVE(t) t +#define CGAL_CONSTEXPR +#endif +#include +#include +#include +#include +#include +#include +#include + +#ifdef CGAL_CXX11 +#define CGAL_BOOSTD std:: +#else +#define CGAL_BOOSTD boost:: +#endif + +namespace CGAL { +namespace internal { + BOOST_MPL_HAS_XXX_TRAIT_DEF(type) +} + +template ::value /*false*/> +struct Has_type_different_from : boost::false_type {}; +template +struct Has_type_different_from +: boost::mpl::not_ > {}; + + + template struct Wrap_type { typedef T type; }; + + // tell a function f(a,b,c) that its real argument is a(b,c) + struct Eval_functor {}; + + // forget the first argument. Useful to make something dependant + // (and thus usable in SFINAE), although that's not a great design. + template struct Second_arg { + typedef B type; + }; + + // like std::forward, except for basic types where it does a cast, to + // avoid issues with narrowing conversions +#ifdef CGAL_CXX11 + template inline + typename std::conditional::value&&std::is_arithmetic::type>::value,T,U&&>::type + forward_safe(V&& u) { return std::forward(u); } +#else + template inline U const& forward_safe(U const& u) { + return u; + } +#endif + +#ifdef CGAL_CXX11 + template struct Constructible_from_each; + template struct Constructible_from_each{ + enum { value=std::is_convertible::value&&Constructible_from_each::value }; + }; + template struct Constructible_from_each{ + enum { value=true }; + }; +#else +// currently only used in C++0X code +#endif + + template struct Scale { +#ifndef CGAL_CXX11 + template struct result; + template struct result { + typedef FT type; + }; +#endif + T const& scale; + Scale(T const& t):scale(t){} + template +#ifdef CGAL_CXX11 + auto operator()(FT&& x)const->decltype(scale*std::forward(x)) +#else + FT operator()(FT const& x)const +#endif + { + return scale*CGAL_FORWARD(FT,x); + } + }; + template struct Divide { +#if !defined(CGAL_CXX11) || !defined(BOOST_RESULT_OF_USE_DECLTYPE) + // requires boost > 1.44 + // shouldn't be needed with C++0X + //template struct result; + //template struct result { + // typedef FT type; + //}; + typedef NT result_type; +#endif + T const& scale; + Divide(T const& t):scale(t){} + template +#ifdef CGAL_CXX11 + //FIXME: gcc complains for Gmpq + //auto operator()(FT&& x)const->decltype(Rational_traits().make_rational(std::forward(x),scale)) + NT operator()(FT&& x)const +#else + NT operator()(FT const& x)const +#endif + { + return Rational_traits(). + make_rational(CGAL_FORWARD(FT,x),scale); + } + }; + + template struct has_cheap_constructor : boost::is_arithmetic{}; + template struct has_cheap_constructor > { + enum { value=true }; + }; + + // like std::multiplies but allows mixing types + // in C++11 in doesn't need to be a template + template < class Ret > + struct multiplies { + template +#ifdef CGAL_CXX11 + auto operator()(A&&a,B&&b)const->decltype(std::forward(a)*std::forward(b)) +#else + Ret operator()(A const& a, B const& b)const +#endif + { + return CGAL_FORWARD(A,a)*CGAL_FORWARD(B,b); + } + }; + template < class Ret > + struct division { + template +#ifdef CGAL_CXX11 + auto operator()(A&&a,B&&b)const->decltype(std::forward(a)/std::forward(b)) +#else + Ret operator()(A const& a, B const& b)const +#endif + { + return CGAL_FORWARD(A,a)/CGAL_FORWARD(B,b); + } + }; + +#ifdef CGAL_CXX11 + using std::decay; +#else + template struct decay : boost::remove_cv::type> {}; +#endif + + template struct Type_copy_ref { typedef U type; }; + template struct Type_copy_ref { typedef U& type; }; +#ifdef CGAL_CXX11 + template struct Type_copy_ref { typedef U&& type; }; +#endif + template struct Type_copy_cv { typedef U type; }; + template struct Type_copy_cv { typedef U const type; }; + template struct Type_copy_cv { typedef U volatile type; }; + template struct Type_copy_cv { typedef U const volatile type; }; + + template struct Type_copy_cvref : + Type_copy_ref::type,U>::type> {}; + + struct Dereference_functor { + template struct result{}; + template struct result { + typedef typename std::iterator_traits::reference type; + }; + template typename result::type + operator()(It const&i)const{ + return *i; + } + }; + +#ifdef CGAL_CXX11 + template struct Indices{}; + template struct Next_increasing_indices; + template struct Next_increasing_indices > { + typedef Indices type; + }; + template struct N_increasing_indices { + typedef typename Next_increasing_indices::type>::type type; + }; + template<> struct N_increasing_indices<0> { typedef Indices<> type; }; + namespace internal { + template inline typename std::result_of::type + do_call_on_tuple_elements(F&&f, std::tuple&&t, Indices&&) { + return f(std::get(std::move(t))...); + } + } // internal + template + inline typename std::result_of::type + call_on_tuple_elements(F&&f, std::tuple&&t) { + return internal::do_call_on_tuple_elements(std::forward(f),std::move(t), + typename N_increasing_indices::type()); + } +#else +#define CGAL_VAR(Z,N,_) cpp0x::get(t) +#define CGAL_CODE(Z,N,_) template \ + inline Res call_on_tuple_elements(F const&f, \ + cpp0x::tuple const&t) { \ + return f(BOOST_PP_ENUM(N,CGAL_VAR,)); \ + } + template + inline Res call_on_tuple_elements(F const&f, cpp0x::tuple<>) { + return f(); + } +BOOST_PP_REPEAT_FROM_TO(1, 8, CGAL_CODE, _ ) +#undef CGAL_CODE +#undef CGAL_VAR +#endif + + template struct Factory { + typedef A result_type; +#ifdef CGAL_CXX11 + template result_type operator()(U&&...u)const{ + return A(std::forward(u)...); + } +#else + result_type operator()()const{ + return A(); + } +#define CGAL_CODE(Z,N,_) template \ + result_type operator()(BOOST_PP_ENUM_BINARY_PARAMS(N,U,const&u))const{ \ + return A(BOOST_PP_ENUM_PARAMS(N,u)); \ + } +BOOST_PP_REPEAT_FROM_TO(1, 8, CGAL_CODE, _ ) +#undef CGAL_CODE +#endif + }; +} + +// TODO: make a Cartesian-only variant +// WARNING: do not use the Req* parameters too much, they can cause circular instanciations and are only useful for dispatching. +#define CGAL_STRIP_PAREN_(...) __VA_ARGS__ +#define CGAL_STRIP_PAREN(...) CGAL_STRIP_PAREN_ __VA_ARGS__ +// What to do with O? pass it down to other functors or drop it? +#define CGAL_KD_DEFAULT_FUNCTOR(Tg,Name,ReqTyp,ReqFun) \ + template \ + struct Get_functor::value \ + || !Provides_types >::value \ + || !Provides_functors >::value \ + , int, void>::type> \ + { \ + typedef CGAL_STRIP_PAREN_ Name type; \ + typedef K Bound_kernel; \ + } + +// Not used yet, may need some changes. +#define CGAL_KD_DEFAULT_TYPE(Tg,Name,ReqTyp,ReqFun) \ + template \ + struct Get_type::value \ + || !Provides_types >::value \ + || !Provides_functors >::value \ + , int, void>::type> \ + { \ + typedef CGAL_STRIP_PAREN_ Name type; \ + typedef K Bound_kernel; \ + } + +#if defined(BOOST_MSVC) +# pragma warning(pop) +#endif + +#endif diff --git a/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h b/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h new file mode 100644 index 00000000..111c6ac9 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Regular_triangulation.h @@ -0,0 +1,1169 @@ +// Copyright (c) 2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Clement Jamin + +#ifndef CGAL_REGULAR_TRIANGULATION_H +#define CGAL_REGULAR_TRIANGULATION_H + +#include +#include +#include +#include +#include + +#include + +namespace CGAL { + +template< typename Traits_, typename TDS_ = Default > +class Regular_triangulation +: public Triangulation< + Regular_triangulation_traits_adapter, + typename Default::Get< + TDS_, + Triangulation_data_structure< + typename Regular_triangulation_traits_adapter::Dimension, + Triangulation_vertex >, + Triangulation_full_cell > + > + >::type> +{ + typedef Regular_triangulation_traits_adapter RTTraits; + typedef typename RTTraits::Dimension Maximal_dimension_; + typedef typename Default::Get< + TDS_, + Triangulation_data_structure< + Maximal_dimension_, + Triangulation_vertex, + Triangulation_full_cell + > >::type TDS; + typedef Triangulation Base; + typedef Regular_triangulation Self; + + typedef typename RTTraits::Orientation_d Orientation_d; + typedef typename RTTraits::Power_side_of_power_sphere_d Power_side_of_power_sphere_d; + typedef typename RTTraits::In_flat_power_side_of_power_sphere_d + In_flat_power_side_of_power_sphere_d; + typedef typename RTTraits::Flat_orientation_d Flat_orientation_d; + typedef typename RTTraits::Construct_flat_orientation_d Construct_flat_orientation_d; + +public: // PUBLIC NESTED TYPES + + typedef RTTraits Geom_traits; + typedef typename Base::Triangulation_ds Triangulation_ds; + + typedef typename Base::Vertex Vertex; + typedef typename Base::Full_cell Full_cell; + typedef typename Base::Facet Facet; + typedef typename Base::Face Face; + + typedef Maximal_dimension_ Maximal_dimension; + typedef typename RTTraits::Bare_point_d Bare_point; + typedef typename RTTraits::Weighted_point_d Weighted_point; + + typedef typename Base::Point_const_iterator Point_const_iterator; + typedef typename Base::Vertex_handle Vertex_handle; + typedef typename Base::Vertex_iterator Vertex_iterator; + typedef typename Base::Vertex_const_handle Vertex_const_handle; + typedef typename Base::Vertex_const_iterator Vertex_const_iterator; + + typedef typename Base::Full_cell_handle Full_cell_handle; + typedef typename Base::Full_cell_iterator Full_cell_iterator; + typedef typename Base::Full_cell_const_handle Full_cell_const_handle; + typedef typename Base::Full_cell_const_iterator Full_cell_const_iterator; + typedef typename Base::Finite_full_cell_const_iterator + Finite_full_cell_const_iterator; + + typedef typename Base::size_type size_type; + typedef typename Base::difference_type difference_type; + + typedef typename Base::Locate_type Locate_type; + + //Tag to distinguish Delaunay from Regular triangulations + typedef Tag_true Weighted_tag; + +protected: // DATA MEMBERS + + +public: + + using typename Base::Rotor; + using Base::maximal_dimension; + using Base::are_incident_full_cells_valid; + using Base::coaffine_orientation_predicate; + using Base::reset_flat_orientation; + using Base::current_dimension; + using Base::geom_traits; + using Base::index_of_covertex; + //using Base::index_of_second_covertex; + using Base::rotate_rotor; + using Base::infinite_vertex; + using Base::insert_in_hole; + using Base::is_infinite; + using Base::locate; + using Base::points_begin; + using Base::set_neighbors; + using Base::new_full_cell; + using Base::number_of_vertices; + using Base::orientation; + using Base::tds; + using Base::reorient_full_cells; + using Base::full_cell; + using Base::full_cells_begin; + using Base::full_cells_end; + using Base::finite_full_cells_begin; + using Base::finite_full_cells_end; + using Base::vertices_begin; + using Base::vertices_end; + +private: + + // Wrapper + struct Power_side_of_power_sphere_for_non_maximal_dim_d + { + boost::optional* fop; + Construct_flat_orientation_d cfo; + In_flat_power_side_of_power_sphere_d ifpt; + + Power_side_of_power_sphere_for_non_maximal_dim_d( + boost::optional& x, + Construct_flat_orientation_d const&y, + In_flat_power_side_of_power_sphere_d const&z) + : fop(&x), cfo(y), ifpt(z) {} + + template + CGAL::Orientation operator()(Iter a, Iter b, const Weighted_point & p)const + { + if(!*fop) + *fop=cfo(a,b); + return ifpt(fop->get(),a,b,p); + } + }; + +public: + +// - - - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS + + Regular_triangulation(int dim, const Geom_traits &k = Geom_traits()) + : Base(dim, k) + { + } + + // With this constructor, + // the user can specify a Flat_orientation_d object to be used for + // orienting simplices of a specific dimension + // (= preset_flat_orientation_.first) + // It it used by the dark triangulations created by DT::remove + Regular_triangulation( + int dim, + const std::pair &preset_flat_orientation, + const Geom_traits &k = Geom_traits()) + : Base(dim, preset_flat_orientation, k) + { + } + + ~Regular_triangulation() {} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS + + // Not Documented + Power_side_of_power_sphere_for_non_maximal_dim_d power_side_of_power_sphere_for_non_maximal_dim_predicate() const + { + return Power_side_of_power_sphere_for_non_maximal_dim_d ( + flat_orientation_, + geom_traits().construct_flat_orientation_d_object(), + geom_traits().in_flat_power_side_of_power_sphere_d_object() + ); + } + + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS + + // Warning: these functions are not correct since they do not restore hidden + // vertices + + Full_cell_handle remove(Vertex_handle); + Full_cell_handle remove(const Weighted_point & p, Full_cell_handle hint = Full_cell_handle()) + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, hint); + if( Base::ON_VERTEX == lt ) + { + return remove(s->vertex(f.index(0))); + } + return Full_cell_handle(); + } + + template< typename ForwardIterator > + void remove(ForwardIterator start, ForwardIterator end) + { + while( start != end ) + remove(*start++); + } + + // Not documented + void remove_decrease_dimension(Vertex_handle); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS + + template< typename ForwardIterator > + std::ptrdiff_t insert(ForwardIterator start, ForwardIterator end) + { + size_type n = number_of_vertices(); + typedef std::vector WP_vec; + WP_vec points(start, end); + + spatial_sort(points.begin(), points.end(), geom_traits()); + + Full_cell_handle hint; + for(typename WP_vec::const_iterator p = points.begin(); p != points.end(); ++p ) + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle c = locate (*p, lt, f, ft, hint); + Vertex_handle v = insert (*p, lt, f, ft, c); + + hint = v == Vertex_handle() ? c : v->full_cell(); + } + return number_of_vertices() - n; + } + + Vertex_handle insert(const Weighted_point &, + Locate_type, + const Face &, + const Facet &, + Full_cell_handle); + + Vertex_handle insert(const Weighted_point & p, + Full_cell_handle start = Full_cell_handle()) + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, start); + return insert(p, lt, f, ft, s); + } + + Vertex_handle insert(const Weighted_point & p, Vertex_handle hint) + { + CGAL_assertion( Vertex_handle() != hint ); + return insert(p, hint->full_cell()); + } + + Vertex_handle insert_outside_affine_hull(const Weighted_point &); + Vertex_handle insert_in_conflicting_cell( + const Weighted_point &, Full_cell_handle, + Vertex_handle only_if_this_vertex_is_in_the_cz = Vertex_handle()); + + Vertex_handle insert_if_in_star(const Weighted_point &, + Vertex_handle, + Locate_type, + const Face &, + const Facet &, + Full_cell_handle); + + Vertex_handle insert_if_in_star( + const Weighted_point & p, Vertex_handle star_center, + Full_cell_handle start = Full_cell_handle()) + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, start); + return insert_if_in_star(p, star_center, lt, f, ft, s); + } + + Vertex_handle insert_if_in_star( + const Weighted_point & p, Vertex_handle star_center, + Vertex_handle hint) + { + CGAL_assertion( Vertex_handle() != hint ); + return insert_if_in_star(p, star_center, hint->full_cell()); + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES + + bool is_in_conflict(const Weighted_point &, Full_cell_const_handle) const; + + template< class OrientationPredicate > + Oriented_side perturbed_power_side_of_power_sphere(const Weighted_point &, + Full_cell_const_handle, const OrientationPredicate &) const; + + template< typename OutputIterator > + Facet compute_conflict_zone(const Weighted_point &, Full_cell_handle, OutputIterator) const; + + template < typename OrientationPredicate, typename PowerTestPredicate > + class Conflict_predicate + { + const Self & rt_; + const Weighted_point & p_; + OrientationPredicate ori_; + PowerTestPredicate power_side_of_power_sphere_; + int cur_dim_; + public: + Conflict_predicate( + const Self & rt, + const Weighted_point & p, + const OrientationPredicate & ori, + const PowerTestPredicate & power_side_of_power_sphere) + : rt_(rt), p_(p), ori_(ori), power_side_of_power_sphere_(power_side_of_power_sphere), cur_dim_(rt.current_dimension()) {} + + inline + bool operator()(Full_cell_const_handle s) const + { + bool ok; + if( ! rt_.is_infinite(s) ) + { + Oriented_side power_side_of_power_sphere = power_side_of_power_sphere_(rt_.points_begin(s), rt_.points_begin(s) + cur_dim_ + 1, p_); + if( ON_POSITIVE_SIDE == power_side_of_power_sphere ) + ok = true; + else if( ON_NEGATIVE_SIDE == power_side_of_power_sphere ) + ok = false; + else + ok = ON_POSITIVE_SIDE == rt_.perturbed_power_side_of_power_sphere(p_, s, ori_); + } + else + { + typedef typename Full_cell::Vertex_handle_const_iterator VHCI; + typedef Substitute_point_in_vertex_iterator F; + F spivi(rt_.infinite_vertex(), &p_); + + Orientation o = ori_( + boost::make_transform_iterator(s->vertices_begin(), spivi), + boost::make_transform_iterator(s->vertices_begin() + cur_dim_ + 1, + spivi)); + + if( POSITIVE == o ) + ok = true; + else if( o == NEGATIVE ) + ok = false; + else + ok = (*this)(s->neighbor( s->index( rt_.infinite_vertex() ) )); + } + return ok; + } + }; + + template < typename ConflictPredicate > + class Conflict_traversal_predicate + { + const Self & rt_; + const ConflictPredicate & pred_; + public: + Conflict_traversal_predicate(const Self & rt, const ConflictPredicate & pred) + : rt_(rt), pred_(pred) + {} + inline + bool operator()(const Facet & f) const + { + return pred_(rt_.full_cell(f)->neighbor(rt_.index_of_covertex(f))); + } + }; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY + + bool is_valid(bool verbose = false, int level = 0) const; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MISC + + std::size_t number_of_hidden_vertices() const + { + return m_hidden_points.size(); + } + +private: + + template + bool + does_cell_range_contain_vertex(InputIterator cz_begin, InputIterator cz_end, + Vertex_handle vh) const + { + // Check all vertices + while(cz_begin != cz_end) + { + Full_cell_handle fch = *cz_begin; + for (int i = 0 ; i <= current_dimension() ; ++i) + { + if (fch->vertex(i) == vh) + return true; + } + ++cz_begin; + } + return false; + } + + template + void + process_conflict_zone(InputIterator cz_begin, InputIterator cz_end, + OutputIterator vertices_out) const + { + // Get all vertices + while(cz_begin != cz_end) + { + Full_cell_handle fch = *cz_begin; + for (int i = 0 ; i <= current_dimension() ; ++i) + { + Vertex_handle vh = fch->vertex(i); + if (vh->full_cell() != Full_cell_handle()) + { + (*vertices_out++) = vh; + vh->set_full_cell(Full_cell_handle()); + } + } + ++cz_begin; + } + } + + + template + void + process_cz_vertices_after_insertion(InputIterator vertices_begin, + InputIterator vertices_end) + { + // Get all vertices + while(vertices_begin != vertices_end) + { + Vertex_handle vh = *vertices_begin; + if (vh->full_cell() == Full_cell_handle()) + { + m_hidden_points.push_back(vh->point()); + tds().delete_vertex(vh); + } + ++vertices_begin; + } + } + +private: + // Some internal types to shorten notation + using typename Base::Coaffine_orientation_d; + using Base::flat_orientation_; + typedef Conflict_predicate + Conflict_pred_in_subspace; + typedef Conflict_predicate + Conflict_pred_in_fullspace; + typedef Conflict_traversal_predicate + Conflict_traversal_pred_in_subspace; + typedef Conflict_traversal_predicate + Conflict_traversal_pred_in_fullspace; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MEMBER VARIABLES + std::vector m_hidden_points; + +}; // class Regular_triangulation + + +// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = +// FUNCTIONS THAT ARE MEMBER METHODS: + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS + + +// Warning: this function is not correct since it does not restore hidden +// vertices +template< typename Traits, typename TDS > +typename Regular_triangulation::Full_cell_handle +Regular_triangulation +::remove( Vertex_handle v ) +{ + CGAL_precondition( ! is_infinite(v) ); + CGAL_expensive_precondition( is_vertex(v) ); + + // THE CASE cur_dim == 0 + if( 0 == current_dimension() ) + { + remove_decrease_dimension(v); + return Full_cell_handle(); + } + else if( 1 == current_dimension() ) + { // THE CASE cur_dim == 1 + if( 2 == number_of_vertices() ) + { + remove_decrease_dimension(v); + return Full_cell_handle(); + } + Full_cell_handle left = v->full_cell(); + if( 0 == left->index(v) ) + left = left->neighbor(1); + CGAL_assertion( 1 == left->index(v) ); + Full_cell_handle right = left->neighbor(0); + tds().associate_vertex_with_full_cell(left, 1, right->vertex(1)); + set_neighbors(left, 0, right->neighbor(0), right->mirror_index(0)); + tds().delete_vertex(v); + tds().delete_full_cell(right); + return left; + } + + // THE CASE cur_dim >= 2 + // Gather the finite vertices sharing an edge with |v| + typedef typename Base::template Full_cell_set Simplices; + Simplices simps; + std::back_insert_iterator out(simps); + tds().incident_full_cells(v, out); + typedef std::set Vertex_set; + Vertex_set verts; + Vertex_handle vh; + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + for( int i = 0; i <= current_dimension(); ++i ) + { + vh = (*it)->vertex(i); + if( is_infinite(vh) ) + continue; + if( vh == v ) + continue; + verts.insert(vh); + } + + // After gathering finite neighboring vertices, create their Dark Delaunay triangulation + typedef Triangulation_vertex Dark_vertex_base; + typedef Triangulation_full_cell< + Geom_traits, + internal::Triangulation::Dark_full_cell_data > Dark_full_cell_base; + typedef Triangulation_data_structure Dark_tds; + typedef Regular_triangulation Dark_triangulation; + typedef typename Dark_triangulation::Face Dark_face; + typedef typename Dark_triangulation::Facet Dark_facet; + typedef typename Dark_triangulation::Vertex_handle Dark_v_handle; + typedef typename Dark_triangulation::Full_cell_handle Dark_s_handle; + + // If flat_orientation_ is defined, we give it the Dark triangulation + // so that the orientation it uses for "current_dimension()"-simplices is + // coherent with the global triangulation + Dark_triangulation dark_side( + maximal_dimension(), + flat_orientation_ ? + std::pair(current_dimension(), flat_orientation_.get_ptr()) + : std::pair(std::numeric_limits::max(), NULL) ); + + Dark_s_handle dark_s; + Dark_v_handle dark_v; + typedef std::map Vertex_map; + Vertex_map light_to_dark; + typename Vertex_set::iterator vit = verts.begin(); + while( vit != verts.end() ) + { + dark_v = dark_side.insert((*vit)->point(), dark_s); + dark_s = dark_v->full_cell(); + dark_v->data() = *vit; + light_to_dark[*vit] = dark_v; + ++vit; + } + + if( dark_side.current_dimension() != current_dimension() ) + { + CGAL_assertion( dark_side.current_dimension() + 1 == current_dimension() ); + // Here, the finite neighbors of |v| span a affine subspace of + // dimension one less than the current dimension. Two cases are possible: + if( (size_type)(verts.size() + 1) == number_of_vertices() ) + { + remove_decrease_dimension(v); + return Full_cell_handle(); + } + else + { // |v| is strictly outside the convex hull of the rest of the points. This is an + // easy case: first, modify the finite full_cells, then, delete the infinite ones. + // We don't even need the Dark triangulation. + Simplices infinite_simps; + { + Simplices finite_simps; + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + if( is_infinite(*it) ) + infinite_simps.push_back(*it); + else + finite_simps.push_back(*it); + simps.swap(finite_simps); + } // now, simps only contains finite simplices + // First, modify the finite full_cells: + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + { + int v_idx = (*it)->index(v); + tds().associate_vertex_with_full_cell(*it, v_idx, infinite_vertex()); + } + // Make the handles to infinite full cells searchable + infinite_simps.make_searchable(); + // Then, modify the neighboring relation + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + { + for( int i = 0 ; i <= current_dimension(); ++i ) + { + if (is_infinite((*it)->vertex(i))) + continue; + (*it)->vertex(i)->set_full_cell(*it); + Full_cell_handle n = (*it)->neighbor(i); + // Was |n| a finite full cell prior to removing |v| ? + if( ! infinite_simps.contains(n) ) + continue; + int n_idx = n->index(v); + set_neighbors(*it, i, n->neighbor(n_idx), n->neighbor(n_idx)->index(n)); + } + } + Full_cell_handle ret_s; + // Then, we delete the infinite full_cells + for( typename Simplices::iterator it = infinite_simps.begin(); it != infinite_simps.end(); ++it ) + tds().delete_full_cell(*it); + tds().delete_vertex(v); + return simps.front(); + } + } + else // From here on, dark_side.current_dimension() == current_dimension() + { + dark_side.infinite_vertex()->data() = infinite_vertex(); + light_to_dark[infinite_vertex()] = dark_side.infinite_vertex(); + } + + // Now, compute the conflict zone of v->point() in + // the dark side. This is precisely the set of full_cells + // that we have to glue back into the light side. + Dark_face dark_f(dark_side.maximal_dimension()); + Dark_facet dark_ft; + typename Dark_triangulation::Locate_type lt; + dark_s = dark_side.locate(v->point(), lt, dark_f, dark_ft); + CGAL_assertion( lt != Dark_triangulation::ON_VERTEX + && lt != Dark_triangulation::OUTSIDE_AFFINE_HULL ); + + // |ret_s| is the full_cell that we return + Dark_s_handle dark_ret_s = dark_s; + Full_cell_handle ret_s; + + typedef typename Base::template Full_cell_set Dark_full_cells; + Dark_full_cells conflict_zone; + std::back_insert_iterator dark_out(conflict_zone); + + dark_ft = dark_side.compute_conflict_zone(v->point(), dark_s, dark_out); + // Make the dark simplices in the conflict zone searchable + conflict_zone.make_searchable(); + + // THE FOLLOWING SHOULD MAYBE GO IN TDS. + // Here is the plan: + // 1. Pick any Facet from boundary of the light zone + // 2. Find corresponding Facet on boundary of dark zone + // 3. stitch. + + // 1. Build a facet on the boudary of the light zone: + Full_cell_handle light_s = *simps.begin(); + Facet light_ft(light_s, light_s->index(v)); + + // 2. Find corresponding Dark_facet on boundary of the dark zone + Dark_full_cells dark_incident_s; + for( int i = 0; i <= current_dimension(); ++i ) + { + if( index_of_covertex(light_ft) == i ) + continue; + Dark_v_handle dark_v = light_to_dark[full_cell(light_ft)->vertex(i)]; + dark_incident_s.clear(); + dark_out = std::back_inserter(dark_incident_s); + dark_side.tds().incident_full_cells(dark_v, dark_out); + for(typename Dark_full_cells::iterator it = dark_incident_s.begin(); + it != dark_incident_s.end(); + ++it) + { + (*it)->data().count_ += 1; + } + } + + for( typename Dark_full_cells::iterator it = dark_incident_s.begin(); it != dark_incident_s.end(); ++it ) + { + if( current_dimension() != (*it)->data().count_ ) + continue; + if( ! conflict_zone.contains(*it) ) + continue; + // We found a full_cell incident to the dark facet corresponding to the light facet |light_ft| + int ft_idx = 0; + while( light_s->has_vertex( (*it)->vertex(ft_idx)->data() ) ) + ++ft_idx; + dark_ft = Dark_facet(*it, ft_idx); + break; + } + // Pre-3. Now, we are ready to traverse both boundary and do the stiching. + + // But first, we create the new full_cells in the light triangulation, + // with as much adjacency information as possible. + + // Create new full_cells with vertices + for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) + { + Full_cell_handle new_s = new_full_cell(); + (*it)->data().light_copy_ = new_s; + for( int i = 0; i <= current_dimension(); ++i ) + tds().associate_vertex_with_full_cell(new_s, i, (*it)->vertex(i)->data()); + if( dark_ret_s == *it ) + ret_s = new_s; + } + + // Setup adjacencies inside the hole + for( typename Dark_full_cells::iterator it = conflict_zone.begin(); it != conflict_zone.end(); ++it ) + { + Full_cell_handle new_s = (*it)->data().light_copy_; + for( int i = 0; i <= current_dimension(); ++i ) + if( conflict_zone.contains((*it)->neighbor(i)) ) + tds().set_neighbors(new_s, i, (*it)->neighbor(i)->data().light_copy_, (*it)->mirror_index(i)); + } + + // 3. Stitch + simps.make_searchable(); + typedef std::queue > Queue; + Queue q; + q.push(std::make_pair(light_ft, dark_ft)); + dark_s = dark_side.full_cell(dark_ft); + int dark_i = dark_side.index_of_covertex(dark_ft); + // mark dark_ft as visited: + // TODO try by marking with Dark_v_handle (vertex) + dark_s->neighbor(dark_i)->set_neighbor(dark_s->mirror_index(dark_i), Dark_s_handle()); + while( ! q.empty() ) + { + std::pair p = q.front(); + q.pop(); + light_ft = p.first; + dark_ft = p.second; + light_s = full_cell(light_ft); + int light_i = index_of_covertex(light_ft); + dark_s = dark_side.full_cell(dark_ft); + int dark_i = dark_side.index_of_covertex(dark_ft); + Full_cell_handle light_n = light_s->neighbor(light_i); + set_neighbors(dark_s->data().light_copy_, dark_i, light_n, light_s->mirror_index(light_i)); + for( int di = 0; di <= current_dimension(); ++di ) + { + if( di == dark_i ) + continue; + int li = light_s->index(dark_s->vertex(di)->data()); + Rotor light_r(light_s, li, light_i); + typename Dark_triangulation::Rotor dark_r(dark_s, di, dark_i); + + while( simps.contains(cpp11::get<0>(light_r)->neighbor(cpp11::get<1>(light_r))) ) + light_r = rotate_rotor(light_r); + + while( conflict_zone.contains(cpp11::get<0>(dark_r)->neighbor(cpp11::get<1>(dark_r))) ) + dark_r = dark_side.rotate_rotor(dark_r); + + Dark_s_handle dark_ns = cpp11::get<0>(dark_r); + int dark_ni = cpp11::get<1>(dark_r); + Full_cell_handle light_ns = cpp11::get<0>(light_r); + int light_ni = cpp11::get<1>(light_r); + // mark dark_r as visited: + // TODO try by marking with Dark_v_handle (vertex) + Dark_s_handle outside = dark_ns->neighbor(dark_ni); + Dark_v_handle mirror = dark_ns->mirror_vertex(dark_ni, current_dimension()); + int dn = outside->index(mirror); + if( Dark_s_handle() == outside->neighbor(dn) ) + continue; + outside->set_neighbor(dn, Dark_s_handle()); + q.push(std::make_pair(Facet(light_ns, light_ni), Dark_facet(dark_ns, dark_ni))); + } + } + tds().delete_full_cells(simps.begin(), simps.end()); + tds().delete_vertex(v); + return ret_s; +} + +template< typename Traits, typename TDS > +void +Regular_triangulation +::remove_decrease_dimension(Vertex_handle v) +{ + CGAL_precondition( current_dimension() >= 0 ); + tds().remove_decrease_dimension(v, infinite_vertex()); + // reset the predicates: + reset_flat_orientation(); + if( 1 <= current_dimension() ) + { + Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); + int inf_v_index = inf_v_cell->index(infinite_vertex()); + Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); + Orientation o = orientation(s); + CGAL_assertion( ZERO != o ); + if( NEGATIVE == o ) + reorient_full_cells(); + } +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS + +template< typename Traits, typename TDS > +typename Regular_triangulation::Vertex_handle +Regular_triangulation +::insert(const Weighted_point & p, Locate_type lt, const Face & f, const Facet & ft, Full_cell_handle s) +{ + switch( lt ) + { + case Base::OUTSIDE_AFFINE_HULL: + return insert_outside_affine_hull(p); + break; + case Base::ON_VERTEX: + { + Vertex_handle v = s->vertex(f.index(0)); + typename RTTraits::Compute_weight_d pw = + geom_traits().compute_weight_d_object(); + + if (pw(p) == pw(v->point())) + return v; + // If dim == 0 and the new point has a bigger weight, + // we just replace the point, and the former point gets hidden + else if (current_dimension() == 0) + { + if (pw(p) > pw(v->point())) + { + m_hidden_points.push_back(v->point()); + v->set_point(p); + return v; + } + // Otherwise, the new point is hidden + else + { + m_hidden_points.push_back(p); + return Vertex_handle(); + } + } + // Otherwise, we apply the "normal" algorithm + + // !NO break here! + } + default: + return insert_in_conflicting_cell(p, s); + } +} + +/* +Inserts the point `p` in the regular triangulation. Returns a handle to the +newly created vertex at that position. +\pre The point `p` +must lie outside the affine hull of the regular triangulation. This implies that +`rt`.`current_dimension()` must be smaller than `rt`.`maximal_dimension()`. +*/ +template< typename Traits, typename TDS > +typename Regular_triangulation::Vertex_handle +Regular_triangulation +::insert_outside_affine_hull(const Weighted_point & p) +{ + // we don't use Base::insert_outside_affine_hull(...) because here, we + // also need to reset the side_of_oriented_subsphere functor. + CGAL_precondition( current_dimension() < maximal_dimension() ); + Vertex_handle v = tds().insert_increase_dimension(infinite_vertex()); + // reset the predicates: + reset_flat_orientation(); + v->set_point(p); + if( current_dimension() >= 1 ) + { + Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); + int inf_v_index = inf_v_cell->index(infinite_vertex()); + Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); + Orientation o = orientation(s); + CGAL_assertion( ZERO != o ); + if( NEGATIVE == o ) + reorient_full_cells(); + + // We just inserted the second finite point and the right infinite + // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be + // consistent with the rest of the cells + if (current_dimension() == 1) + { + // Is "inf_v_cell" the right infinite cell? Then inf_v_index should be 1 + if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 + && inf_v_index == 0) + { + inf_v_cell->swap_vertices(current_dimension() - 1, current_dimension()); + } + else + { + inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2); + inf_v_index = inf_v_cell->index(infinite_vertex()); + // Is "inf_v_cell" the right infinite cell? Then inf_v_index should be 1 + if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 + && inf_v_index == 0) + { + inf_v_cell->swap_vertices(current_dimension() - 1, current_dimension()); + } + } + } + } + return v; +} + +template< typename Traits, typename TDS > +typename Regular_triangulation::Vertex_handle +Regular_triangulation +::insert_if_in_star(const Weighted_point & p, + Vertex_handle star_center, + Locate_type lt, + const Face & f, + const Facet & ft, + Full_cell_handle s) +{ + switch( lt ) + { + case Base::OUTSIDE_AFFINE_HULL: + return insert_outside_affine_hull(p); + break; + case Base::ON_VERTEX: + { + Vertex_handle v = s->vertex(f.index(0)); + typename RTTraits::Compute_weight_d pw = + geom_traits().compute_weight_d_object(); + if (pw(p) == pw(v->point())) + return v; + // If dim == 0 and the new point has a bigger weight, + // we replace the point + else if (current_dimension() == 0) + { + if (pw(p) > pw(v->point())) + v->set_point(p); + else + return v; + } + // Otherwise, we apply the "normal" algorithm + + // !NO break here! + } + default: + return insert_in_conflicting_cell(p, s, star_center); + } + + return Vertex_handle(); +} + +/* +[Undocumented function] + +Inserts the point `p` in the regular triangulation. `p` must be +in conflict with the second parameter `c`, which is used as a +starting point for `compute_conflict_zone`. +The function is faster than the standard `insert` function since +it does not need to call `locate`. + +If this insertion creates a vertex, this vertex is returned. + +If `p` coincides with an existing vertex and has a greater weight, +then the existing weighted point becomes hidden and `p` replaces it as vertex +of the triangulation. + +If `p` coincides with an already existing vertex (both point and +weights being equal), then this vertex is returned and the triangulation +remains unchanged. + +Otherwise if `p` does not appear as a vertex of the triangulation, +then it is stored as a hidden point and this method returns the default +constructed handle. + +\pre The point `p` must be in conflict with the full cell `c`. +*/ + +template< typename Traits, typename TDS > +typename Regular_triangulation::Vertex_handle +Regular_triangulation +::insert_in_conflicting_cell(const Weighted_point & p, + Full_cell_handle s, + Vertex_handle only_if_this_vertex_is_in_the_cz) +{ + typedef std::vector Full_cell_h_vector; + + bool in_conflict = is_in_conflict(p, s); + + // If p is not in conflict with s, then p is hidden + // => we don't insert it + if (!in_conflict) + { + m_hidden_points.push_back(p); + return Vertex_handle(); + } + else + { + Full_cell_h_vector cs; // for storing conflicting full_cells. + cs.reserve(64); + std::back_insert_iterator out(cs); + Facet ft = compute_conflict_zone(p, s, out); + + // Check if the CZ contains "only_if_this_vertex_is_in_the_cz" + if (only_if_this_vertex_is_in_the_cz != Vertex_handle() + && !does_cell_range_contain_vertex(cs.begin(), cs.end(), + only_if_this_vertex_is_in_the_cz)) + { + return Vertex_handle(); + } + + // Otherwise, proceed with the insertion + std::vector cz_vertices; + cz_vertices.reserve(64); + process_conflict_zone(cs.begin(), cs.end(), + std::back_inserter(cz_vertices)); + + Vertex_handle ret = insert_in_hole(p, cs.begin(), cs.end(), ft); + + process_cz_vertices_after_insertion(cz_vertices.begin(), cz_vertices.end()); + + return ret; + } +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - GATHERING CONFLICTING SIMPLICES + +// NOT DOCUMENTED +template< typename Traits, typename TDS > +template< typename OrientationPred > +Oriented_side +Regular_triangulation +::perturbed_power_side_of_power_sphere(const Weighted_point & p, Full_cell_const_handle s, + const OrientationPred & ori) const +{ + CGAL_precondition_msg( ! is_infinite(s), "full cell must be finite"); + CGAL_expensive_precondition( POSITIVE == orientation(s) ); + typedef std::vector Points; + Points points(current_dimension() + 2); + int i(0); + for( ; i <= current_dimension(); ++i ) + points[i] = &(s->vertex(i)->point()); + points[i] = &p; + std::sort(points.begin(), points.end(), + internal::Triangulation::Compare_points_for_perturbation(*this)); + typename Points::const_reverse_iterator cut_pt = points.rbegin(); + Points test_points; + while( cut_pt != points.rend() ) + { + if( &p == *cut_pt ) + // because the full_cell "s" is assumed to be positively oriented + return ON_NEGATIVE_SIDE; // we consider |p| to lie outside the sphere + test_points.clear(); + Point_const_iterator spit = points_begin(s); + int adjust_sign = -1; + for( i = 0; i < current_dimension(); ++i ) + { + if( &(*spit) == *cut_pt ) + { + ++spit; + adjust_sign = (((current_dimension() + i) % 2) == 0) ? -1 : +1; + } + test_points.push_back(&(*spit)); + ++spit; + } + test_points.push_back(&p); + + typedef typename CGAL::Iterator_project< + typename Points::iterator, + internal::Triangulation::Point_from_pointer, + const Weighted_point &, const Weighted_point * + > Point_pointer_iterator; + + Orientation ori_value = ori( + Point_pointer_iterator(test_points.begin()), + Point_pointer_iterator(test_points.end())); + + if( ZERO != ori_value ) + return Oriented_side( - adjust_sign * ori_value ); + + ++cut_pt; + } + CGAL_assertion(false); // we should never reach here + return ON_NEGATIVE_SIDE; +} + +template< typename Traits, typename TDS > +bool +Regular_triangulation +::is_in_conflict(const Weighted_point & p, Full_cell_const_handle s) const +{ + CGAL_precondition( 1 <= current_dimension() ); + if( current_dimension() < maximal_dimension() ) + { + Conflict_pred_in_subspace c( + *this, p, + coaffine_orientation_predicate(), + power_side_of_power_sphere_for_non_maximal_dim_predicate()); + return c(s); + } + else + { + Orientation_d ori = geom_traits().orientation_d_object(); + Power_side_of_power_sphere_d side = geom_traits().power_side_of_power_sphere_d_object(); + Conflict_pred_in_fullspace c(*this, p, ori, side); + return c(s); + } +} + +template< typename Traits, typename TDS > +template< typename OutputIterator > +typename Regular_triangulation::Facet +Regular_triangulation +::compute_conflict_zone(const Weighted_point & p, Full_cell_handle s, OutputIterator out) const +{ + CGAL_precondition( 1 <= current_dimension() ); + if( current_dimension() < maximal_dimension() ) + { + Conflict_pred_in_subspace c( + *this, p, + coaffine_orientation_predicate(), + power_side_of_power_sphere_for_non_maximal_dim_predicate()); + Conflict_traversal_pred_in_subspace tp(*this, c); + return tds().gather_full_cells(s, tp, out); + } + else + { + Orientation_d ori = geom_traits().orientation_d_object(); + Power_side_of_power_sphere_d side = geom_traits().power_side_of_power_sphere_d_object(); + Conflict_pred_in_fullspace c(*this, p, ori, side); + Conflict_traversal_pred_in_fullspace tp(*this, c); + return tds().gather_full_cells(s, tp, out); + } +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY + +template< typename Traits, typename TDS > +bool +Regular_triangulation +::is_valid(bool verbose, int level) const +{ + if (!Base::is_valid(verbose, level)) + return false; + + int dim = current_dimension(); + if (dim == maximal_dimension()) + { + for (Finite_full_cell_const_iterator cit = finite_full_cells_begin() ; + cit != finite_full_cells_end() ; ++cit ) + { + Full_cell_const_handle ch = cit.base(); + for(int i = 0; i < dim+1 ; ++i ) + { + // If the i-th neighbor is not an infinite cell + Vertex_handle opposite_vh = + ch->neighbor(i)->vertex(ch->neighbor(i)->index(ch)); + if (!is_infinite(opposite_vh)) + { + Power_side_of_power_sphere_d side = + geom_traits().power_side_of_power_sphere_d_object(); + if (side(Point_const_iterator(ch->vertices_begin()), + Point_const_iterator(ch->vertices_end()), + opposite_vh->point()) == ON_POSITIVE_SIDE) + { + if (verbose) + CGAL_warning_msg(false, "Non-empty sphere"); + return false; + } + } + } + } + } + return true; +} + +} //namespace CGAL + +#endif //CGAL_REGULAR_TRIANGULATION_H diff --git a/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h b/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h new file mode 100644 index 00000000..78bb95a6 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Regular_triangulation_traits_adapter.h @@ -0,0 +1,288 @@ +// Copyright (c) 2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Clement Jamin + +#ifndef CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H +#define CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H + +#include + +#include + +namespace CGAL { + +// Wrapper class to make a model of `RegularTriangulationTraits` easily usable +// by the `Regular_triangulation` class. By using this class: +// - Point_d (used by `Triangulation` and the TDS) becomes a weighted point +// - Predicates and functors such as Less_coordinate_d or Orientation_d +// can be called using weighted points instead of bare points (this is +// needed because `Weighted_point_d` is not convertible to `Point_d`) +// This way, `Triangulation` works perfectly well with weighted points. + +template +class Regular_triangulation_traits_adapter + : public K +{ +public: + typedef K Base; + + // Required by TriangulationTraits + typedef typename K::Dimension Dimension; + typedef typename K::FT FT; + typedef typename K::Flat_orientation_d Flat_orientation_d; + typedef typename K::Weighted_point_d Point_d; + + // Required by RegularTriangulationTraits + typedef typename K::Point_d Bare_point_d; + typedef typename K::Weighted_point_d Weighted_point_d; + typedef typename K::Construct_point_d Construct_point_d; + typedef typename K::Compute_weight_d Compute_weight_d; + typedef typename K::Power_side_of_power_sphere_d Power_side_of_power_sphere_d; + typedef typename K::In_flat_power_side_of_power_sphere_d + In_flat_power_side_of_power_sphere_d; + + //=========================================================================== + // Custom types + //=========================================================================== + + // Required by SpatialSortingTraits_d + class Less_coordinate_d + { + const K &m_kernel; + + public: + typedef bool result_type; + + Less_coordinate_d(const K &kernel) + : m_kernel(kernel) {} + + result_type operator()( + Weighted_point_d const& p, Weighted_point_d const& q, int i) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.less_coordinate_d_object() (cp(p), cp(q), i); + } + }; + + //=========================================================================== + + // Required by TriangulationTraits + class Orientation_d + { + const K &m_kernel; + + public: + typedef Orientation result_type; + + Orientation_d(const K &kernel) + : m_kernel(kernel) {} + + template + result_type operator()(ForwardIterator start, ForwardIterator end) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.orientation_d_object() ( + boost::make_transform_iterator(start, cp), + boost::make_transform_iterator(end, cp) + ); + } + }; + + //=========================================================================== + + // Required by TriangulationTraits + class Construct_flat_orientation_d + { + const K &m_kernel; + + public: + typedef Flat_orientation_d result_type; + + Construct_flat_orientation_d(const K &kernel) + : m_kernel(kernel) {} + + template + result_type operator()(ForwardIterator start, ForwardIterator end) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.construct_flat_orientation_d_object() ( + boost::make_transform_iterator(start, cp), + boost::make_transform_iterator(end, cp) + ); + } + }; + + + //=========================================================================== + + // Required by TriangulationTraits + class In_flat_orientation_d + { + const K &m_kernel; + + public: + typedef Orientation result_type; + + In_flat_orientation_d(const K &kernel) + : m_kernel(kernel) {} + + template + result_type operator()(Flat_orientation_d orient, + ForwardIterator start, ForwardIterator end) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.in_flat_orientation_d_object() ( + orient, + boost::make_transform_iterator(start, cp), + boost::make_transform_iterator(end, cp) + ); + } + }; + + //=========================================================================== + + // Required by TriangulationTraits + class Contained_in_affine_hull_d + { + const K &m_kernel; + + public: + typedef bool result_type; + + Contained_in_affine_hull_d(const K &kernel) + : m_kernel(kernel) {} + + template + result_type operator()(ForwardIterator start, ForwardIterator end, + const Weighted_point_d & p) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.contained_in_affine_hull_d_object() ( + boost::make_transform_iterator(start, cp), + boost::make_transform_iterator(end, cp), + cp(p) + ); + } + }; + + //=========================================================================== + + // Required by TriangulationTraits + class Compare_lexicographically_d + { + const K &m_kernel; + + public: + typedef Comparison_result result_type; + + Compare_lexicographically_d(const K &kernel) + : m_kernel(kernel) {} + + result_type operator()( + const Weighted_point_d & p, const Weighted_point_d & q) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.compare_lexicographically_d_object()(cp(p), cp(q)); + } + }; + + //=========================================================================== + + // Only for Triangulation_off_ostream.h (undocumented) + class Compute_coordinate_d + { + const K &m_kernel; + + public: + typedef FT result_type; + + Compute_coordinate_d(const K &kernel) + : m_kernel(kernel) {} + + result_type operator()( + const Weighted_point_d & p, const int i) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.compute_coordinate_d_object()(cp(p), i); + } + }; + + //=========================================================================== + + // To satisfy SpatialSortingTraits_d + // and also for Triangulation_off_ostream.h (undocumented) + class Point_dimension_d + { + const K &m_kernel; + + public: + typedef int result_type; + + Point_dimension_d(const K &kernel) + : m_kernel(kernel) {} + + result_type operator()( + const Weighted_point_d & p) const + { + Construct_point_d cp = m_kernel.construct_point_d_object(); + return m_kernel.point_dimension_d_object()(cp(p)); + } + }; + + //=========================================================================== + // Object creation + //=========================================================================== + + Less_coordinate_d less_coordinate_d_object() const + { + return Less_coordinate_d(*this); + } + Contained_in_affine_hull_d contained_in_affine_hull_d_object() const + { + return Contained_in_affine_hull_d(*this); + } + Orientation_d orientation_d_object() const + { + return Orientation_d(*this); + } + Construct_flat_orientation_d construct_flat_orientation_d_object() const + { + return Construct_flat_orientation_d(*this); + } + In_flat_orientation_d in_flat_orientation_d_object() const + { + return In_flat_orientation_d(*this); + } + Compare_lexicographically_d compare_lexicographically_d_object() const + { + return Compare_lexicographically_d(*this); + } + Compute_coordinate_d compute_coordinate_d_object() const + { + return Compute_coordinate_d(*this); + } + Point_dimension_d point_dimension_d_object() const + { + return Point_dimension_d(*this); + } +}; + + +} //namespace CGAL + +#endif // CGAL_REGULAR_TRIANGULATION_TRAITS_ADAPTER_H diff --git a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h new file mode 100644 index 00000000..9a6030e5 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_default_storage_policy.h @@ -0,0 +1,99 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H +#define CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H + +#include +#include +#include + +#include + +namespace CGAL { + +// POLICY TAG + +struct TDS_full_cell_default_storage_policy {}; // stores no additional data. Uses XOR trick. + +template< typename V, typename S, typename D, typename StoragePolicy > +struct TFC_data; // TFC = Triangulation Full Cell + +template< typename Vertex_handle, typename Full_cell_handle, typename Dimen > +struct TFC_data< Vertex_handle, Full_cell_handle, Dimen, TDS_full_cell_default_storage_policy > +{ + typedef typename internal::Dimen_plus_one::type Dimen_plus; + typedef typename internal::S_or_D_array< Vertex_handle, Dimen_plus, true > Vertex_handle_array; + typedef typename internal::S_or_D_array< Full_cell_handle, Dimen_plus > Full_cell_handle_array; + + Vertex_handle_array vertices_; + Full_cell_handle_array neighbors_; + + TFC_data(const int dmax) + : vertices_(dmax+1), neighbors_(dmax+1) + {} + void* for_compact_container() const { return vertices_.for_compact_container(); } + void* & for_compact_container() { return vertices_.for_compact_container(); } + int dimension() const { return ( vertices_.size() - 1 ); } + void set_mirror_index(const int, const int) {} +#ifdef BOOST_NO_INT64_T + typedef std::ptrdiff_t Xor_type; +#else + typedef boost::int_least64_t Xor_type; +#endif + Xor_type xor_of_vertices(const int cur_dim) const + { + Xor_type result(0); + for( int i = 0; i <= cur_dim; ++i ) + result ^= reinterpret_cast(&(*vertices_[i])); + return result; + } + // ASSUMES |*this| is indeed a neighbor of neighbor(i): + // NOT correct when the hole (in insert_in_hole) is doubly covered. + int mirror_index(const int i) const + { + int index = 0; + Full_cell_handle n = neighbors_[i]; + Full_cell_handle o = n->neighbor(index); + while( &(o->combinatorics_) != this ) + o = n->neighbor(++index); + return index; + } + Vertex_handle mirror_vertex(const int i, const int cur_dim) const + { + Xor_type opp_vertex = xor_of_vertices(cur_dim) + ^ neighbors_[i]->xor_of_vertices(cur_dim) + ^ reinterpret_cast(&(*vertices_[i])); + Vertex_handle mirror; + typedef typename Vertex_handle::pointer pointer; + // mirror.set_pointer(reinterpret_cast(opp_vertex)); + mirror = Compact_container + ::s_iterator_to(*(reinterpret_cast(opp_vertex))); + return mirror; + } + void swap_vertices(const int d1, const int d2) + { + std::swap(vertices_[d1], vertices_[d2]); + std::swap(neighbors_[d1], neighbors_[d2]); + } +}; + +} //namespace CGAL + +#endif // CGAL_TDS_FULL_CELL_DEFAULT_STORAGE_POLICY_H diff --git a/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h new file mode 100644 index 00000000..095dfe68 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/TDS_full_cell_mirror_storage_policy.h @@ -0,0 +1,71 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H +#define CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H + +#include + +namespace CGAL { + +// POLICY TAGS + +struct TDS_full_cell_mirror_storage_policy {}; // Stores the mirror index of all vertices. + +template< typename Vertex_handle, typename Full_cell_handle, typename Maximal_dimension > +struct TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_mirror_storage_policy > +: public TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_default_storage_policy > +{ + typedef TFC_data< Vertex_handle, Full_cell_handle, Maximal_dimension, TDS_full_cell_default_storage_policy > Base; + typedef typename Base::Vertex_handle_array Vertex_handle_array; + typedef typename Base::Full_cell_handle_array Full_cell_handle_array; + typedef typename internal::S_or_D_array< int, typename Base::Dimen_plus > Int_array; + +private: + Int_array mirror_vertices_; + +public: + TFC_data(const int dmax) + : Base(dmax), mirror_vertices_(dmax+1) + {} + + void set_mirror_index(const int i, const int index) + { + mirror_vertices_[i] = index; + } + int mirror_index(const int i) const + { + return mirror_vertices_[i]; + } + Vertex_handle mirror_vertex(const int i, const int) const + { + return Base::neighbors_[i]->vertex(mirror_index(i)); + } + void swap_vertices(const int d1, const int d2) + { + Base::swap_vertices(d1, d2); + std::swap(mirror_vertices_[d1], mirror_vertices_[d2]); + Base::neighbors_[d1]->set_mirror_index(mirror_vertices_[d1], d1); + Base::neighbors_[d2]->set_mirror_index(mirror_vertices_[d2], d2); + } +}; + +} //namespace CGAL + +#endif // CGAL_TDS_FULL_CELL_MIRROR_STORAGE_POLICY_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation.h b/src/common/include/gudhi_patches/CGAL/Triangulation.h new file mode 100644 index 00000000..906df92e --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation.h @@ -0,0 +1,1424 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_H +#define CGAL_TRIANGULATION_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace CGAL { + +// Iterator which iterates over vertex_handle's, but returns a point when +// dereferenced. If the current +// vertex_handle vh == vh_where_point_should_be_substituted, it returns +// "subtitute_point", otherwise, it returns vh->point() +template +class Substitute_point_in_vertex_iterator +{ + typedef typename std::iterator_traits::value_type Vertex_handle; + typedef typename Vertex_handle::value_type Vertex; + typedef typename Vertex::Point Point; + +public: + typedef Point const& result_type; // For result_of + + Substitute_point_in_vertex_iterator( + Vertex_handle vh_where_point_should_be_substituted, + Point const *subtitute_point) + : vh_where_point_should_be_substituted_(vh_where_point_should_be_substituted) + , subtitute_point_(subtitute_point) + {} + + result_type operator()(Vertex_handle vh) const + { + if (vh == vh_where_point_should_be_substituted_) + return *subtitute_point_; + else + return vh->point(); + } + +private: + Vertex_handle vh_where_point_should_be_substituted_; + Point const *subtitute_point_; + +}; + + +template < class TriangulationTraits, class TDS_ = Default > +class Triangulation +{ + typedef typename TriangulationTraits::Dimension Maximal_dimension_; + typedef typename Default::Get, + Triangulation_full_cell > + >::type TDS; + typedef Triangulation Self; + +protected: + typedef typename TriangulationTraits::Flat_orientation_d Flat_orientation_d; + typedef typename TriangulationTraits::Construct_flat_orientation_d Construct_flat_orientation_d; + typedef typename TriangulationTraits::In_flat_orientation_d In_flat_orientation_d; + + // Wrapper + struct Coaffine_orientation_d + { + boost::optional* fop; + Construct_flat_orientation_d cfo; + In_flat_orientation_d ifo; + + Coaffine_orientation_d( + boost::optional& x, + Construct_flat_orientation_d const&y, + In_flat_orientation_d const&z) + : fop(&x), cfo(y), ifo(z) {} + + template + CGAL::Orientation operator()(Iter a, Iter b) const + { + if (*fop) + return ifo(fop->get(),a,b); + *fop = cfo(a,b); + CGAL_assertion(ifo(fop->get(),a,b) == CGAL::POSITIVE); + return CGAL::POSITIVE; + } + }; + + void reset_flat_orientation() + { + if (current_dimension() == preset_flat_orientation_.first) + { + CGAL_assertion(preset_flat_orientation_.second != NULL); + flat_orientation_ = *preset_flat_orientation_.second; + } + else + flat_orientation_ = boost::none; + } + + typedef typename TriangulationTraits::Orientation_d + Orientation_d; + +public: + + typedef TriangulationTraits Geom_traits; + typedef TDS Triangulation_ds; + + typedef typename TDS::Vertex Vertex; + typedef typename TDS::Full_cell Full_cell; + typedef typename TDS::Facet Facet; + typedef typename TDS::Face Face; + + typedef Maximal_dimension_ Maximal_dimension; + typedef typename Geom_traits::Point_d Point; + + typedef typename TDS::Vertex_handle Vertex_handle; + typedef typename TDS::Vertex_iterator Vertex_iterator; + typedef typename TDS::Vertex_const_handle Vertex_const_handle; + typedef typename TDS::Vertex_const_iterator Vertex_const_iterator; + + typedef typename TDS::Full_cell_handle Full_cell_handle; + typedef typename TDS::Full_cell_iterator Full_cell_iterator; + typedef typename TDS::Full_cell_const_handle Full_cell_const_handle; + typedef typename TDS::Full_cell_const_iterator Full_cell_const_iterator; + + typedef typename TDS::Facet_iterator Facet_iterator; + + typedef typename TDS::size_type size_type; + typedef typename TDS::difference_type difference_type; + + /// The type of location a new point is found lying on + enum Locate_type + { + ON_VERTEX = 0 // simplex of dimension 0 + , IN_FACE = 1 // simplex of dimension in [ 1, |current_dimension()| - 2 ] + , IN_FACET = 2 // simplex of dimension |current_dimension()| - 1 + , IN_FULL_CELL = 3 /// simplex of dimension |current_dimension()| + , OUTSIDE_CONVEX_HULL = 4 + , OUTSIDE_AFFINE_HULL = 5 + }; + + // Finite elements iterators + + class Finiteness_predicate; + + typedef boost::filter_iterator + Finite_vertex_iterator; + typedef boost::filter_iterator + Finite_vertex_const_iterator; + typedef boost::filter_iterator + Finite_full_cell_iterator; + typedef boost::filter_iterator + Finite_full_cell_const_iterator; + typedef boost::filter_iterator + Finite_facet_iterator; + +protected: // DATA MEMBERS + + Triangulation_ds tds_; + const Geom_traits kernel_; + Vertex_handle infinity_; + mutable std::vector orientations_; + mutable boost::optional flat_orientation_; + // The user can specify a Flat_orientation_d object to be used for + // orienting simplices of a specific dimension + // (= preset_flat_orientation_.first) + // preset_flat_orientation_.first = numeric_limits::max() otherwise) + std::pair preset_flat_orientation_; + // for stochastic walk in the locate() function: + mutable Random rng_; +#ifdef CGAL_TRIANGULATION_STATISTICS + mutable unsigned long walk_size_; +#endif + +protected: // HELPER FUNCTIONS + + typedef CGAL::Iterator_project< + typename Full_cell::Vertex_handle_const_iterator, + internal::Triangulation::Point_from_vertex_handle + > Point_const_iterator; + + Point_const_iterator points_begin(Full_cell_const_handle c) const + { return Point_const_iterator(c->vertices_begin()); } + Point_const_iterator points_end(Full_cell_const_handle c) const + { return Point_const_iterator(c->vertices_end()); } + Point_const_iterator points_begin(Full_cell_handle c) const + { return Point_const_iterator(c->vertices_begin()); } + Point_const_iterator points_end(Full_cell_handle c) const + { return Point_const_iterator(c->vertices_end()); } + +public: + + // FACETS OPERATIONS + + Full_cell_handle full_cell(const Facet & f) const + { + return tds().full_cell(f); + } + + int index_of_covertex(const Facet & f) const + { + return tds().index_of_covertex(f); + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - UTILITIES + + // A co-dimension 2 sub-simplex. called a Rotor because we can rotate + // the two "covertices" around the sub-simplex. Useful for traversing the + // boundary of a hole. NOT DOCUMENTED + typedef cpp11::tuple Rotor; + + // Commented out because it was causing "internal compiler error" in MSVC + /*Full_cell_handle full_cell(const Rotor & r) const // NOT DOCUMENTED + { + return cpp11::get<0>(r); + } + int index_of_covertex(const Rotor & r) const // NOT DOCUMENTED + { + return cpp11::get<1>(r); + } + int index_of_second_covertex(const Rotor & r) const // NOT DOCUMENTED + { + return cpp11::get<2>(r); + }*/ + Rotor rotate_rotor(Rotor & r) // NOT DOCUMENTED... + { + int opposite = cpp11::get<0>(r)->mirror_index(cpp11::get<1>(r)); + Full_cell_handle s = cpp11::get<0>(r)->neighbor(cpp11::get<1>(r)); + int new_second = s->index(cpp11::get<0>(r)->vertex(cpp11::get<2>(r))); + return Rotor(s, new_second, opposite); + } + + // - - - - - - - - - - - - - - - - - - - - - - - - CREATION / CONSTRUCTORS + + Triangulation(int dim, const Geom_traits &k = Geom_traits()) + : tds_(dim) + , kernel_(k) + , infinity_() + , preset_flat_orientation_((std::numeric_limits::max)(), + (Flat_orientation_d*) NULL) + , rng_((long)0) +#ifdef CGAL_TRIANGULATION_STATISTICS + ,walk_size_(0) +#endif + { + clear(); + } + + // With this constructor, + // the user can specify a Flat_orientation_d object to be used for + // orienting simplices of a specific dimension + // (= preset_flat_orientation_.first) + // It it used for by dark triangulations created by DT::remove + Triangulation( + int dim, + const std::pair &preset_flat_orientation, + const Geom_traits k = Geom_traits()) + : tds_(dim) + , kernel_(k) + , infinity_() + , preset_flat_orientation_(preset_flat_orientation) + , rng_((long)0) +#ifdef CGAL_TRIANGULATION_STATISTICS + ,walk_size_(0) +#endif + { + clear(); + } + + Triangulation(const Triangulation & t2) + : tds_(t2.tds_) + , kernel_(t2.kernel_) + , infinity_() + , preset_flat_orientation_((std::numeric_limits::max)(), + (Flat_orientation_d*) NULL) + , rng_(t2.rng_) +#ifdef CGAL_TRIANGULATION_STATISTICS + ,walk_size_(t2.walk_size_) +#endif + { + // We find the vertex at infinity by scanning the vertices of both + // triangulations. This works because Compact_container garantees that + // the vertices in the copy (*this) are stored in the same order as in + // the original triangulation (t2) + infinity_ = vertices_begin(); + Vertex_const_iterator inf2 = t2.vertices_begin(); + while( inf2 != t2.infinite_vertex() ) + { + ++infinity_; + ++inf2; + } + // A full_cell has at most 1 + maximal_dimension() facets: + orientations_.resize(1 + maximal_dimension()); + // Our coaffine orientation predicates HAS state member variables + reset_flat_orientation(); + } + + ~Triangulation() {} + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ACCESS FUNCTIONS + + /* These three function are no longer needed since we do not use them anymore + in the Delaunay_triangulation::remove. *But*, they may reappear in the future + if we manage to passe the information that flags/TDS_data is available or not + for marking simplices in Delaunay_triangulation::remove. This would be useful + to make it a little faster, instead of binary searching if a simplex is marked + or not... + // NOT DOCUMENTED -- + bool get_visited(Full_cell_handle s) const + { + return tds().get_visited(s); + } + // NOT DOCUMENTED -- + bool get_visited(Full_cell_const_handle s) const + { + return tds().get_visited(s); + } + + // NOT DOCUMENTED -- + void set_visited(Full_cell_handle s, bool b) const + { + tds().set_visited(s, b); + } */ + + Coaffine_orientation_d coaffine_orientation_predicate() const + { + return Coaffine_orientation_d ( + flat_orientation_, + geom_traits().construct_flat_orientation_d_object(), + geom_traits().in_flat_orientation_d_object() + ); + } + + const Triangulation_ds & tds() const + { + return tds_; + } + + Triangulation_ds & tds() + { + return tds_; + } + + const Geom_traits & geom_traits() const + { + return kernel_; + } + + int maximal_dimension() const { return tds().maximal_dimension(); } + int current_dimension() const { return tds().current_dimension(); } + + bool empty() const + { + return current_dimension() == -1; + } + + size_type number_of_vertices() const + { + return tds().number_of_vertices() - 1; + } + + size_type number_of_full_cells() const + { + return tds().number_of_full_cells(); + } + + Vertex_handle infinite_vertex() const + { + return infinity_; + } + + Full_cell_handle infinite_full_cell() const + { + CGAL_assertion(infinite_vertex()->full_cell()->has_vertex(infinite_vertex())); + return infinite_vertex()->full_cell(); + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - NON CONSTANT-TIME ACCESS FUNCTIONS + + size_type number_of_finite_full_cells() const + { + Full_cell_const_iterator s = full_cells_begin(); + size_type result = number_of_full_cells(); + for( ; s != full_cells_end(); ++s ) + { + if( is_infinite(s) ) + --result; + } + return result; + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TRAVERSAL + + Vertex_iterator vertices_begin() { return tds().vertices_begin(); } + Vertex_iterator vertices_end() { return tds().vertices_end(); } + + Vertex_const_iterator vertices_begin() const { return tds().vertices_begin(); } + Vertex_const_iterator vertices_end() const { return tds().vertices_end(); } + + Finite_vertex_iterator finite_vertices_begin() + { return Finite_vertex_iterator(Finiteness_predicate(*this), vertices_begin(), vertices_end()); } + Finite_vertex_iterator finite_vertices_end() + { return Finite_vertex_iterator(Finiteness_predicate(*this), vertices_end(), vertices_end()); } + Finite_vertex_const_iterator finite_vertices_begin() const + { return Finite_vertex_const_iterator(Finiteness_predicate(*this), vertices_begin(), vertices_end()); } + Finite_vertex_const_iterator finite_vertices_end() const + { return Finite_vertex_const_iterator(Finiteness_predicate(*this), vertices_end(), vertices_end()); } + + Full_cell_iterator full_cells_begin() { return tds().full_cells_begin(); } + Full_cell_iterator full_cells_end() { return tds().full_cells_end(); } + + Full_cell_const_iterator full_cells_begin() const { return tds().full_cells_begin(); } + Full_cell_const_iterator full_cells_end() const { return tds().full_cells_end(); } + + Finite_full_cell_iterator finite_full_cells_begin() + { return Finite_full_cell_iterator(Finiteness_predicate(*this), full_cells_begin(), full_cells_end()); } + Finite_full_cell_iterator finite_full_cells_end() + { return Finite_full_cell_iterator(Finiteness_predicate(*this), full_cells_end(), full_cells_end()); } + Finite_full_cell_const_iterator finite_full_cells_begin() const + { return Finite_full_cell_const_iterator(Finiteness_predicate(*this), full_cells_begin(), full_cells_end()); } + Finite_full_cell_const_iterator finite_full_cells_end() const + { return Finite_full_cell_const_iterator(Finiteness_predicate(*this), full_cells_end(), full_cells_end()); } + + Facet_iterator facets_begin() { return tds().facets_begin(); } + Facet_iterator facets_end() { return tds().facets_end(); } + Facet_iterator finite_facets_begin() + { return Finite_facet_iterator(Finiteness_predicate(*this), facets_begin(), facets_end()); } + Facet_iterator finite_facets_end() + { return Finite_facet_iterator(Finiteness_predicate(*this), facets_end(), facets_end()); } + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SOME PREDICATE FUNCTORS + + class Finiteness_predicate + { + const Self & t_; + public: + Finiteness_predicate(const Self & t) : t_(t) {} + template < class T > + bool operator()(const T & t) const + { + return ! t_.is_infinite(t); + } + }; + + class Point_equality_predicate + { + const Point & o_; + public: + Point_equality_predicate(const Point & o) : o_(o) {} + bool operator()(const Point & o) const { return (o == o_ );} + }; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - SIMPLE QUERIES +/* + bool is_vertex(const Point & p, Vertex_handle & v, Full_cell_handle hint = Full_cell_handle()) const + { + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, hint); + if( ON_VERTEX == lt ) + { + v = s->vertex(f.index(0)); + return true; + } + return false; + } + + bool is_vertex(Vertex_const_handle v) const + { + return tds().is_vertex(v); + } + + bool is_full_cell(Full_cell_const_handle s) const + { + return tds().is_full_cell(s); + } +*/ + + bool is_infinite(Vertex_const_handle v) const + { + CGAL_precondition(Vertex_const_handle() != v); + return (infinite_vertex() == v); + } + + bool is_infinite(const Vertex & v) const /* internal use, not documented */ + { + return (&(*infinite_vertex()) == &v); + } + + bool is_infinite(Full_cell_const_handle s) const + { + CGAL_precondition(Full_cell_const_handle() != s); + return is_infinite(*s); + } + bool is_infinite(const Full_cell & s) const /* internal use, not documented */ + { + for(int i = 0; i <= current_dimension(); ++i) + if( is_infinite(s.vertex(i)) ) + return true; + return false; + } + bool is_infinite(const Facet & ft) const + { + Full_cell_const_handle s = full_cell(ft); + CGAL_precondition(s != Full_cell_const_handle()); + if( is_infinite(s) ) + return (s->vertex(index_of_covertex(ft)) != infinite_vertex()); + return false; + } + + bool is_infinite(const Face & f) const + { + Full_cell_const_handle s = f.full_cell(); + CGAL_precondition(s != Full_cell_const_handle()); + if( is_infinite(s) ) + { + Vertex_handle v; + for( int i(0); i<= f.face_dimension(); ++i) + if ( is_infinite( f.vertex(i) )) return true; + } + return false; + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ELEMENT GATHERING + + + template< typename OutputIterator > + OutputIterator incident_full_cells(const Face & f, OutputIterator out) const + { + return tds().incident_full_cells(f, out); + } + template< typename OutputIterator > + OutputIterator incident_full_cells(Vertex_const_handle v, OutputIterator out) const + { + return tds().incident_full_cells(v, out); + } + template< typename OutputIterator > + OutputIterator star(const Face & f, OutputIterator out) const + { + return tds().star(f, out); + } + + template< typename OutputIterator > + OutputIterator incident_faces(Vertex_const_handle v, int d, OutputIterator out) const + { + return tds().incident_faces(v, d, out); + } + /* + template< typename OutputIterator, class Comparator > + OutputIterator incident_upper_faces( Vertex_const_handle v, int d, + OutputIterator out, Comparator cmp = Comparator()) + { + return tds().incident_upper_faces(v, d, out, cmp); + } + template< typename OutputIterator > + OutputIterator incident_upper_faces( Vertex_const_handle v, int d, + OutputIterator out) + { // FIXME: uncomment this function, since it uses a comparator specific to + // *geometric* triangulation (taking infinite vertex into account) + internal::Triangulation::Compare_vertices_for_upper_face cmp(*this); + return tds().incident_upper_faces(v, d, out, cmp); + } + */ + Orientation orientation(Full_cell_const_handle s, bool in_is_valid = false) const + { + if( ! in_is_valid ) + CGAL_assertion( ! is_infinite(s) ); + if( 0 == current_dimension() ) + return POSITIVE; + if( current_dimension() == maximal_dimension() ) + { + Orientation_d ori = geom_traits().orientation_d_object(); + return ori(points_begin(s), points_begin(s) + 1 + current_dimension()); + } + else + { + return coaffine_orientation_predicate()(points_begin(s), points_begin(s) + 1 + current_dimension()); + } + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - UPDATE OPERATIONS + + void clear() + { + tds_.clear(); + infinity_ = tds().insert_increase_dimension(); + // A full_cell has at most 1 + maximal_dimension() facets: + orientations_.resize(1 + maximal_dimension()); + // Our coaffine orientation predicates HAS state member variables + reset_flat_orientation(); +#ifdef CGAL_TRIANGULATION_STATISTICS + walk_size_ = 0; +#endif + } + + void set_current_dimension(int d) + { + tds().set_current_dimension(d); + } + + Full_cell_handle new_full_cell() + { + return tds().new_full_cell(); + } + + Vertex_handle new_vertex() + { + return tds().new_vertex(); + } + + Vertex_handle new_vertex(const Point & p) + { + return tds().new_vertex(p); + } + + void set_neighbors(Full_cell_handle s, int i, Full_cell_handle s1, int j) + { + tds().set_neighbors(s, i, s1, j); + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY + + bool is_valid(bool = false, int = 0) const; + bool are_incident_full_cells_valid(Vertex_const_handle, bool = false, int = 0) const; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - POINT LOCATION + +protected: + template< typename OrientationPredicate > + Full_cell_handle do_locate(const Point &, Locate_type &, Face &, Facet &, + Full_cell_handle start, + const OrientationPredicate & o) const; +public: + Full_cell_handle locate(const Point &, Locate_type &, Face &, Facet &, + Full_cell_handle start = Full_cell_handle()) const; + Full_cell_handle locate(const Point &, Locate_type &, Face &, Facet &, + Vertex_handle) const; + Full_cell_handle locate(const Point & p, Full_cell_handle s = Full_cell_handle()) const; + Full_cell_handle locate(const Point & p, Vertex_handle v) const; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS + + Vertex_handle contract_face(const Point &, const Face &); + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - POINT INSERTION + + template< typename ForwardIterator > + size_type insert(ForwardIterator start, ForwardIterator end) + { + size_type n = number_of_vertices(); + std::vector points(start, end); + spatial_sort(points.begin(), points.end(), geom_traits()); + Full_cell_handle hint = Full_cell_handle(); + typename std::vector::const_iterator s = points.begin(); + while( s != points.end() ) + { + hint = insert(*s++, hint)->full_cell(); + } + return number_of_vertices() - n; + } + Vertex_handle insert(const Point &, Locate_type, const Face &, const Facet &, Full_cell_handle); + Vertex_handle insert(const Point &, Full_cell_handle start = Full_cell_handle()); + Vertex_handle insert(const Point &, Vertex_handle); + template< typename ForwardIterator > + Vertex_handle insert_in_hole(const Point & p, ForwardIterator start, ForwardIterator end, const Facet & ft) + { + Emptyset_iterator out; + return insert_in_hole(p, start, end, ft, out); + } + template< typename ForwardIterator, typename OutputIterator > + Vertex_handle insert_in_hole(const Point & p, ForwardIterator start, ForwardIterator end, const Facet & ft, + OutputIterator out) + { + Vertex_handle v = tds().insert_in_hole(start, end, ft, out); + v->set_point(p); + return v; + } + Vertex_handle insert_in_face(const Point &, const Face &); + Vertex_handle insert_in_facet(const Point &, const Facet &); + Vertex_handle insert_in_full_cell(const Point &, Full_cell_handle); + Vertex_handle insert_outside_convex_hull_1(const Point &, Full_cell_handle); + Vertex_handle insert_outside_convex_hull(const Point &, Full_cell_handle); + Vertex_handle insert_outside_affine_hull(const Point &); + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - FACET-TRAVERSAL PREDICATES + + template< typename OrientationPredicate > + class Outside_convex_hull_traversal_predicate + { + Triangulation & t_; + const Point & p_; + OrientationPredicate const& ori_; + int cur_dim_; + public: + Outside_convex_hull_traversal_predicate(Triangulation & t, const Point & p, + OrientationPredicate const& ori) + : t_(t), p_(p), ori_(ori), cur_dim_(t.current_dimension()) {} + // FUTURE change parameter to const reference + bool operator()(Facet f) const + { + Full_cell_handle s = t_.full_cell(f); + const int i = t_.index_of_covertex(f); + Full_cell_handle n = s->neighbor(i); + if( ! t_.is_infinite(n) ) + return false; + int inf_v_index = n->index(t_.infinite_vertex()); + n->vertex(inf_v_index)->set_point(p_); + bool ok = (POSITIVE == ori_(t_.points_begin(n), t_.points_begin(n) + cur_dim_ + 1)); + return ok; + } + }; + + // make sure all full_cells have positive orientation + void reorient_full_cells(); + +protected: + // This is used in the |remove(v)| member function to manage sets of Full_cell_handles + template< typename FCH > + struct Full_cell_set : public std::vector + { + typedef std::vector Base_set; + using Base_set::begin; + using Base_set::end; + void make_searchable() + { // sort the full cell handles + std::sort(begin(), end()); + } + bool contains(const FCH & fch) const + { + return std::binary_search(begin(), end(), fch); + } + bool contains_1st_and_not_2nd(const FCH & fst, const FCH & snd) const + { + return ( ! contains(snd) ) && ( contains(fst) ); + } + }; + + void display_all_full_cells__debugging() const + { + std::cerr << "ALL FULL CELLS:" << std::endl; + for (Full_cell_const_iterator cit = full_cells_begin() ; + cit != full_cells_end() ; ++cit ) + { + std::cerr << std::hex << &*cit << ": "; + for (int jj = 0 ; jj <= current_dimension() ; ++jj) + std::cerr << (is_infinite(cit->vertex(jj)) ? 0xFFFFFFFF : (unsigned int)&*cit->vertex(jj)) << " - "; + std::cerr << std::dec << std::endl; + } + std::cerr << std::endl; + } + + +}; // Triangulation<...> + +// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + +// CLASS MEMBER FUNCTIONS + +template < class TT, class TDS > +void +Triangulation +::reorient_full_cells() +{ + if( current_dimension() < 1 ) + return; + + Full_cell_iterator sit = full_cells_begin(); + Full_cell_iterator send = full_cells_end(); + for ( ; sit != send ; ++sit) + { + if( ! (is_infinite(sit) && (1 == current_dimension())) ) + { + sit->swap_vertices(current_dimension() - 1, current_dimension()); + } + } +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - THE REMOVAL METHODS + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::contract_face(const Point & p, const Face & f) +{ + CGAL_precondition( ! is_infinite(f) ); + Vertex_handle v = tds().contract_face(f); + v->set_point(p); + CGAL_expensive_postcondition_msg(are_incident_full_cells_valid(v), "new point is not where it should be"); + return v; +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - THE INSERTION METHODS + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert(const Point & p, Locate_type lt, const Face & f, const Facet & ft, Full_cell_handle s) +{ + switch( lt ) + { + case IN_FULL_CELL: + return insert_in_full_cell(p, s); + break; + case OUTSIDE_CONVEX_HULL: + return insert_outside_convex_hull(p, s); + break; + case OUTSIDE_AFFINE_HULL: + return insert_outside_affine_hull(p); + break; + case IN_FACET: + { + return insert_in_facet(p, ft); + break; + } + case IN_FACE: + return insert_in_face(p, f); + break; + case ON_VERTEX: + s->vertex(f.index(0))->set_point(p); + return s->vertex(f.index(0)); + break; + } + CGAL_assertion(false); + return Vertex_handle(); +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert(const Point & p, Full_cell_handle start) +{ + Locate_type lt; + Face f(maximal_dimension()); + Facet ft; + Full_cell_handle s = locate(p, lt, f, ft, start); + return insert(p, lt, f, ft, s); +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert(const Point & p, Vertex_handle v) +{ + if( Vertex_handle() == v ) + v = infinite_vertex(); + return insert(p, v->full_cell()); +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert_in_face(const Point & p, const Face & f) +{ + CGAL_precondition( ! is_infinite(f) ); + Vertex_handle v = tds().insert_in_face(f); + v->set_point(p); + return v; +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert_in_facet(const Point & p, const Facet & ft) +{ + CGAL_precondition( ! is_infinite(ft) ); + Vertex_handle v = tds().insert_in_facet(ft); + v->set_point(p); + return v; +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert_in_full_cell(const Point & p, Full_cell_handle s) +{ + CGAL_precondition( ! is_infinite(s) ); + Vertex_handle v = tds().insert_in_full_cell(s); + v->set_point(p); + return v; +} + +// NOT DOCUMENTED... +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert_outside_convex_hull_1(const Point & p, Full_cell_handle s) +{ + // This is a special case for dimension 1, because in that case, the right + // infinite full_cell is not correctly oriented... (sice its first vertex is the + // infinite one... + CGAL_precondition( is_infinite(s) ); + CGAL_precondition( 1 == current_dimension() ); + Vertex_handle v = tds().insert_in_full_cell(s); + v->set_point(p); + return v; +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert_outside_convex_hull(const Point & p, Full_cell_handle s) +{ + if( 1 == current_dimension() ) + { + return insert_outside_convex_hull_1(p, s); + } + CGAL_precondition( is_infinite(s) ); + CGAL_assertion( current_dimension() >= 2 ); + std::vector simps; + simps.reserve(64); + std::back_insert_iterator > out(simps); + if( current_dimension() < maximal_dimension() ) + { + Coaffine_orientation_d ori = coaffine_orientation_predicate(); + Outside_convex_hull_traversal_predicate + ochtp(*this, p, ori); + tds().gather_full_cells(s, ochtp, out); + } + else + { + Orientation_d ori = geom_traits().orientation_d_object(); + Outside_convex_hull_traversal_predicate + ochtp(*this, p, ori); + tds().gather_full_cells(s, ochtp, out); + } + int inf_v_index = s->index(infinite_vertex()); + Vertex_handle v = insert_in_hole( + p, simps.begin(), simps.end(), Facet(s, inf_v_index)); + return v; +} + +template < class TT, class TDS > +typename Triangulation::Vertex_handle +Triangulation +::insert_outside_affine_hull(const Point & p) +{ + CGAL_precondition( current_dimension() < maximal_dimension() ); + Vertex_handle v = tds().insert_increase_dimension(infinite_vertex()); + // reset the orientation predicate: + reset_flat_orientation(); + v->set_point(p); + if( current_dimension() >= 1 ) + { + Full_cell_handle inf_v_cell = infinite_vertex()->full_cell(); + int inf_v_index = inf_v_cell->index(infinite_vertex()); + Full_cell_handle s = inf_v_cell->neighbor(inf_v_index); + Orientation o = orientation(s); + CGAL_assertion( COPLANAR != o ); + if( NEGATIVE == o ) + reorient_full_cells(); + + + // We just inserted the second finite point and the right infinite + // cell is like : (inf_v, v), but we want it to be (v, inf_v) to be + // consistent with the rest of the cells + if (current_dimension() == 1) + { + // Is "inf_v_cell" the right infinite cell? + // Then inf_v_index should be 1 + if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 + && inf_v_index == 0) + { + inf_v_cell->swap_vertices( + current_dimension() - 1, current_dimension()); + } + // Otherwise, let's find the right infinite cell + else + { + inf_v_cell = inf_v_cell->neighbor((inf_v_index + 1) % 2); + inf_v_index = inf_v_cell->index(infinite_vertex()); + // Is "inf_v_cell" the right infinite cell? + // Then inf_v_index should be 1 + if (inf_v_cell->neighbor(inf_v_index)->index(inf_v_cell) == 0 + && inf_v_index == 0) + { + inf_v_cell->swap_vertices( + current_dimension() - 1, current_dimension()); + } + } + } + } + return v; +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - THE MAIN LOCATE(...) FUNCTION + +template < class TT, class TDS > +template< typename OrientationPredicate > +typename Triangulation::Full_cell_handle +Triangulation +::do_locate(const Point & p, // query point + Locate_type & loc_type,// type of result (full_cell, face, vertex) + Face & face,// the face containing the query in its interior (when appropriate) + Facet & facet,// the facet containing the query in its interior (when appropriate) + Full_cell_handle start, // starting full_cell for the walk + OrientationPredicate const& orientation_pred + ) const +{ + const int cur_dim = current_dimension(); + + if( cur_dim == -1 ) + { + loc_type = OUTSIDE_AFFINE_HULL; + return Full_cell_handle(); + } + else if( cur_dim == 0 ) + { + Vertex_handle vit = infinite_full_cell()->neighbor(0)->vertex(0); + if( EQUAL != geom_traits().compare_lexicographically_d_object()(p, vit->point()) ) + { + loc_type = OUTSIDE_AFFINE_HULL; + return Full_cell_handle(); + } + else + { + loc_type = ON_VERTEX; + face.set_full_cell(vit->full_cell()); + face.set_index(0, 0); + return vit->full_cell(); + } + } + + Full_cell_handle s; + + // if we don't know where to start, we start from any bounded full_cell + if( Full_cell_handle() == start ) + { + // THE HACK THAT NOBODY SHOULD DO... BUT DIFFICULT TO WORK AROUND + // THIS... TODO: WORK AROUND IT + Full_cell_handle inf_c = const_cast(this)->infinite_full_cell(); + int inf_v_index = inf_c->index(infinite_vertex()); + s = inf_c->neighbor(inf_v_index); + } + else + { + s = start; + if( is_infinite(s) ) + { + int inf_v_index = s->index(infinite_vertex()); + s = s->neighbor(inf_v_index); + } + } + + // Check if query |p| is outside the affine hull + if( cur_dim < maximal_dimension() ) + { + if( ! geom_traits().contained_in_affine_hull_d_object()( + points_begin(s), + points_begin(s) + current_dimension() + 1, + p) ) + { + loc_type = OUTSIDE_AFFINE_HULL; + return Full_cell_handle(); + } + } + + // we remember the |previous|ly visited full_cell to avoid the evaluation + // of one |orientation| predicate + Full_cell_handle previous = Full_cell_handle(); + bool full_cell_not_found = true; + while(full_cell_not_found) // we walk until we locate the query point |p| + { + #ifdef CGAL_TRIANGULATION_STATISTICS + ++walk_size_; + #endif + // For the remembering stochastic walk, we need to start trying + // with a random index: + int j, i = rng_.get_int(0, cur_dim); + // we check |p| against all the full_cell's hyperplanes in turn + + for(j = 0; j <= cur_dim; ++j, i = (i + 1) % (cur_dim + 1) ) + { + Full_cell_handle next = s->neighbor(i); + if( previous == next ) + { // no need to compute the orientation, we already know it + orientations_[i] = POSITIVE; + continue; // go to next full_cell's facet + } + + Substitute_point_in_vertex_iterator< + typename Full_cell::Vertex_handle_const_iterator> + spivi(s->vertex(i), &p); + + orientations_[i] = orientation_pred( + boost::make_transform_iterator(s->vertices_begin(), spivi), + boost::make_transform_iterator(s->vertices_begin() + cur_dim + 1, + spivi)); + + if( orientations_[i] != NEGATIVE ) + { + // from this facet's point of view, we are inside the + // full_cell or on its boundary, so we continue to next facet + continue; + } + + // At this point, we know that we have to jump to the |next| + // full_cell because orientation_[i] == NEGATIVE + previous = s; + s = next; + if( is_infinite(next) ) + { // we have arrived OUTSIDE the convex hull of the triangulation, + // so we stop the search + full_cell_not_found = false; + loc_type = OUTSIDE_CONVEX_HULL; + face.set_full_cell(s); + } + break; + } // end of the 'for' loop + if( ( cur_dim + 1 ) == j ) // we found the full_cell containing |p| + full_cell_not_found = false; + } + // Here, we know in which full_cell |p| is in. + // We now check more precisely where |p| landed: + // vertex, facet, face or full_cell. + if( ! is_infinite(s) ) + { + face.set_full_cell(s); + int num(0); + int verts(0); + for(int i = 0; i < cur_dim; ++i) + { + if( orientations_[i] == COPLANAR ) + { + ++num; + facet = Facet(s, i); + } + else + face.set_index(verts++, i); + } + //-- We could put the if{}else{} below in the loop above, but then we would + // need to test if (verts < cur_dim) many times... we do it only once + // here: + if( orientations_[cur_dim] == COPLANAR ) + { + ++num; + facet = Facet(s, cur_dim); + } + else if( verts < cur_dim ) + face.set_index(verts, cur_dim); + //-- end of remark above // + if( 0 == num ) + { + loc_type = IN_FULL_CELL; + face.clear(); + } + else if( cur_dim == num ) + loc_type = ON_VERTEX; + else if( 1 == num ) + loc_type = IN_FACET; + else + loc_type = IN_FACE; + } + return s; +} + +template < class TT, class TDS > +typename Triangulation::Full_cell_handle +Triangulation +::locate( const Point & p, // query point + Locate_type & loc_type,// type of result (full_cell, face, vertex) + Face & face,// the face containing the query in its interior (when appropriate) + Facet & facet,// the facet containing the query in its interior (when appropriate) + Full_cell_handle start// starting full_cell for the walk + ) const +{ + if( current_dimension() == maximal_dimension() ) + { + Orientation_d ori = geom_traits().orientation_d_object(); + return do_locate(p, loc_type, face, facet, start, ori); + } + else + return do_locate(p, loc_type, face, facet, start, coaffine_orientation_predicate()); +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - the locate(...) variants + +template < class TT, class TDS > +typename Triangulation::Full_cell_handle +Triangulation +::locate( const Point & p, + Locate_type & loc_type, + Face & face, + Facet & facet, + Vertex_handle start) const +{ + if( Vertex_handle() == start ) + start = infinite_vertex(); + return locate(p, loc_type, face, facet, start->full_cell()); +} + +template < class TT, class TDS > +typename Triangulation::Full_cell_handle +Triangulation +::locate(const Point & p, Full_cell_handle s) const +{ + Locate_type lt; + Face face(maximal_dimension()); + Facet facet; + return locate(p, lt, face, facet, s); +} + +template < class TT, class TDS > +typename Triangulation::Full_cell_handle +Triangulation +::locate(const Point & p, Vertex_handle v) const +{ + if( Vertex_handle() != v ) + v = infinite_vertex(); + return this->locate(p, v->full_cell()); +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY + +template < class TT, class TDS > +bool +Triangulation +::is_valid(bool verbose, int level) const +{ + if( ! tds().is_valid(verbose, level) ) + return false; + + Full_cell_const_iterator c; + if( current_dimension() < 0 ) + return true; + Orientation o; + for( c = full_cells_begin(); c != full_cells_end(); ++c ) + { + if( is_infinite(c) ) + { + if( current_dimension() > 1 ) + { + int i = c->index( infinite_vertex() ); + Full_cell_handle n = c->neighbor(i); + infinite_vertex()->set_point(n->vertex(c->mirror_index(i))->point()); + o = - orientation(c, true); + } + else + o = POSITIVE; + } + else + o = orientation(c, true); + if( NEGATIVE == o ) + { + if( verbose ) CGAL_warning_msg(false, "full_cell is not correctly oriented"); + return false; + } + if( COPLANAR == o ) + { + if( verbose ) CGAL_warning_msg(false, "full_cell is flat"); + return false; + } + } + return true; +} + +template < class TT, class TDS > +bool Triangulation::are_incident_full_cells_valid(Vertex_const_handle v, bool verbose, int) const +{ + if( current_dimension() <= 0 ) + return true; + typedef std::vector Simps; + Simps simps; + simps.reserve(64); + std::back_insert_iterator out(simps); + incident_full_cells(v, out); + typename Simps::const_iterator sit = simps.begin(); + for( ; sit != simps.end(); ++sit ) + { + if( is_infinite(*sit) ) + continue; + Orientation o = orientation(*sit); + if( NEGATIVE == o ) + { + if( verbose ) CGAL_warning_msg(false, "full_cell is not correctly oriented"); + return false; + } + if( COPLANAR == o ) + { + if( verbose ) CGAL_warning_msg(false, "full_cell is flat"); + return false; + } + } + return true; +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + +// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: + +template < class TT, class TDS > +std::istream & +operator>>(std::istream & is, Triangulation & tr) + // reads : + // - the dimensions (maximal and current) + // - the number of finite vertices + // - the non combinatorial information on vertices (point, etc) + // - the number of full_cells + // - the full_cells by the indices of their vertices in the preceding list + // of vertices, plus the non combinatorial information on each full_cell + // - the neighbors of each full_cell by their index in the preceding list +{ + typedef Triangulation T; + typedef typename T::Vertex_handle Vertex_handle; + + // read current dimension and number of vertices + size_t n; + int cd; + if( is_ascii(is) ) + is >> cd >> n; + else + { + read(is, cd); + read(is, n, io_Read_write()); + } + + CGAL_assertion_msg( cd <= tr.maximal_dimension(), "input Triangulation has too high dimension"); + + tr.clear(); + tr.set_current_dimension(cd); + + if( n == 0 ) + return is; + + std::vector vertices; + vertices.resize(n+1); + vertices[0] = tr.infinite_vertex(); + is >> (*vertices[0]); + + // read the vertices: + size_t i(1); + while( i <= n ) + { + vertices[i] = tr.new_vertex(); + is >> (*vertices[i]); // read a vertex + ++i; + } + + // now, read the combinatorial information + return tr.tds().read_full_cells(is, vertices); +} + +template < class TT, class TDS > +std::ostream & +operator<<(std::ostream & os, const Triangulation & tr) + // writes : + // - the dimensions (maximal and current) + // - the number of finite vertices + // - the non combinatorial information on vertices (point, etc) + // - the number of full_cells + // - the full_cells by the indices of their vertices in the preceding list + // of vertices, plus the non combinatorial information on each full_cell + // - the neighbors of each full_cell by their index in the preceding list +{ + typedef Triangulation T; + typedef typename T::Vertex_const_handle Vertex_handle; + typedef typename T::Vertex_const_iterator Vertex_iterator; + + // outputs dimensions and number of vertices + size_t n = tr.number_of_vertices(); + if( is_ascii(os) ) + os << tr.current_dimension() << std::endl << n << std::endl; + else + { + write(os, tr.current_dimension()); + write(os, n, io_Read_write()); + } + + if( n == 0 ) + return os; + + size_t i(0); + // write the vertices + std::map index_of_vertex; + + // infinite vertex has index 0 (among all the vertices) + index_of_vertex[tr.infinite_vertex()] = i++; + os << *tr.infinite_vertex(); + for( Vertex_iterator it = tr.vertices_begin(); it != tr.vertices_end(); ++it ) + { + if( tr.is_infinite(it) ) + continue; + os << *it; // write the vertex + index_of_vertex[it] = i++; + } + CGAL_assertion( i == n+1 ); + + // output the combinatorial information + return tr.tds().write_full_cells(os, index_of_vertex); +} + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h b/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h new file mode 100644 index 00000000..2493c712 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation_data_structure.h @@ -0,0 +1,1603 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_DATA_STRUCTURE_H +#define CGAL_TRIANGULATION_DATA_STRUCTURE_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace CGAL { + +template< class Dimen, + class Vb = Default, + class Fcb = Default > +class Triangulation_data_structure +{ + typedef Triangulation_data_structure Self; + typedef typename Default::Get >::type V_base; + typedef typename Default::Get >::type FC_base; + +public: + typedef typename V_base::template Rebind_TDS::Other Vertex; /* Concept */ + typedef typename FC_base::template Rebind_TDS::Other Full_cell; /* Concept */ + + // Tools to change the Vertex and Cell types of the TDS. + template < typename Vb2 > + struct Rebind_vertex { + typedef Triangulation_data_structure Other; + }; + + template < typename Fcb2 > + struct Rebind_full_cell { + typedef Triangulation_data_structure Other; + }; + + + + // we want to store an object of this class in every Full_cell: + class Full_cell_data + { + unsigned char bits_; + public: + Full_cell_data() : bits_(0) {} + Full_cell_data(const Full_cell_data & fcd) : bits_(fcd.bits_) {} + + void clear() { bits_ = 0; } + void mark_visited() { bits_ = 1; } + void clear_visited() { bits_ = 0; } + + bool is_clear() const { return bits_ == 0; } + bool is_visited() const { return bits_ == 1; } + // WARNING: if we use more bits and several bits can be set at once, + // then make sure to use bitwise operation above, instead of direct + // affectation. + }; + +protected: + typedef Compact_container Vertex_container; + typedef Compact_container Full_cell_container; + +public: + typedef Dimen Maximal_dimension; + + typedef typename Vertex_container::size_type size_type; /* Concept */ + typedef typename Vertex_container::difference_type difference_type; /* Concept */ + + typedef typename Vertex_container::iterator Vertex_handle; /* Concept */ + typedef typename Vertex_container::iterator Vertex_iterator; /* Concept */ + typedef typename Vertex_container::const_iterator Vertex_const_handle; + typedef typename Vertex_container::const_iterator Vertex_const_iterator; + + typedef typename Full_cell_container::iterator Full_cell_handle; /* Concept */ + typedef typename Full_cell_container::iterator Full_cell_iterator; /* Concept */ + typedef typename Full_cell_container::const_iterator Full_cell_const_handle; + typedef typename Full_cell_container::const_iterator Full_cell_const_iterator; + + typedef internal::Triangulation:: + Triangulation_ds_facet_iterator Facet_iterator; /* Concept */ + + /* The 2 types defined below, |Facet| and |Rotor| are used when traversing + the boundary `B' of the union of a set of full cells. |Rotor| makes it + easy to rotate around itself, in the search of neighbors in `B' (see + |rotate_rotor| and |insert_in_tagged_hole|) */ + + // A co-dimension 1 sub-simplex. + class Facet /* Concept */ + { + Full_cell_handle full_cell_; + int index_of_covertex_; + public: + Facet() : full_cell_(), index_of_covertex_(0) {} + Facet(Full_cell_handle f, int i) : full_cell_(f), index_of_covertex_(i) {} + Full_cell_handle full_cell() const { return full_cell_; } + int index_of_covertex() const { return index_of_covertex_; } + }; + + // A co-dimension 2 sub-simplex. called a Rotor because we can rotate + // the two "covertices" around the sub-simplex. Useful for traversing the + // boundary of a hole. NOT DOCUMENTED + class Rotor : public Facet + { + int index_of_second_covertex_; + public: + Rotor() : Facet(), index_of_second_covertex_(0) {} + Rotor(Full_cell_handle f, int first, int second) : Facet(f, first), index_of_second_covertex_(second) {} + int index_of_second_covertex() const { return index_of_second_covertex_; } + }; + + typedef Triangulation_face Face; /* Concept */ + +protected: // DATA MEMBERS + + int dmax_, dcur_; // dimension of the current triangulation + Vertex_container vertices_; // list of all vertices + Full_cell_container full_cells_; // list of all full cells + +private: + + void clean_dynamic_memory() + { + vertices_.clear(); + full_cells_.clear(); + } + + template < class Dim_tag > + struct get_maximal_dimension + { + static int value(int D) { return D; } + }; + // specialization + template < int D > + struct get_maximal_dimension > + { + static int value(int) { return D; } + }; + +public: + Triangulation_data_structure( int dim=0) /* Concept */ + : dmax_(get_maximal_dimension::value(dim)), dcur_(-2), + vertices_(), full_cells_() + { + CGAL_assertion_msg(dmax_ > 0, "maximal dimension must be positive."); + } + + ~Triangulation_data_structure() + { + clean_dynamic_memory(); + } + + Triangulation_data_structure(const Triangulation_data_structure & tds) + : dmax_(tds.dmax_), dcur_(tds.dcur_), + vertices_(tds.vertices_), full_cells_(tds.full_cells_) + { + typedef std::map V_map; + typedef std::map C_map; + V_map vmap; + C_map cmap; + Vertex_const_iterator vfrom = tds.vertices_begin(); + Vertex_iterator vto = vertices_begin(); + Full_cell_const_iterator cfrom = tds.full_cells_begin(); + Full_cell_iterator cto = full_cells_begin(); + while( vfrom != tds.vertices_end() ) + vmap[vfrom++] = vto++; + while( cfrom != tds.full_cells_end() ) + cmap[cfrom++] = cto++; + cto = full_cells_begin(); + while( cto != full_cells_end() ) + { + for( int i = 0; i <= (std::max)(0, current_dimension()); ++i ) + { + associate_vertex_with_full_cell(cto, i, vmap[cto->vertex(i)]); + cto->set_neighbor(i, cmap[cto->neighbor(i)]); + } + ++cto; + } + } + + // QUERIES + +protected: + + bool check_range(int i) const + { + if( current_dimension() < 0 ) + { + return (0 == i); + } + return ( (0 <= i) && (i <= current_dimension()) ); + } + +public: + + /* returns the current dimension of the full cells in the triangulation. */ + int maximal_dimension() const { return dmax_; } /* Concept */ + int current_dimension() const { return dcur_; } /* Concept */ + + size_type number_of_vertices() const /* Concept */ + { + return this->vertices_.size(); + } + size_type number_of_full_cells() const /* Concept */ + { + return this->full_cells_.size(); + } + + bool empty() const /* Concept */ + { + return current_dimension() == -2; + } + + Vertex_container & vertices() { return vertices_; } + const Vertex_container & vertices() const { return vertices_; } + Full_cell_container & full_cells() { return full_cells_; } + const Full_cell_container & full_cells() const { return full_cells_; } + + Vertex_handle vertex(Full_cell_handle s, int i) const /* Concept */ + { + CGAL_precondition(s != Full_cell_handle() && check_range(i)); + return s->vertex(i); + } + + Vertex_const_handle vertex(Full_cell_const_handle s, int i) const /* Concept */ + { + CGAL_precondition(s != Full_cell_handle() && check_range(i)); + return s->vertex(i); + } + + bool is_vertex(Vertex_const_handle v) const /* Concept */ + { + if( Vertex_const_handle() == v ) + return false; + Vertex_const_iterator vit = vertices_begin(); + while( vit != vertices_end() && ( v != vit ) ) + ++vit; + return v == vit; + } + + bool is_full_cell(Full_cell_const_handle s) const /* Concept */ + { + if( Full_cell_const_handle() == s ) + return false; + Full_cell_const_iterator sit = full_cells_begin(); + while( sit != full_cells_end() && ( s != sit ) ) + ++sit; + return s == sit; + } + + Full_cell_handle full_cell(Vertex_handle v) const /* Concept */ + { + CGAL_precondition(v != Vertex_handle()); + return v->full_cell(); + } + + Full_cell_const_handle full_cell(Vertex_const_handle v) const /* Concept */ + { + CGAL_precondition(Vertex_const_handle() != v); + return v->full_cell(); + } + + Full_cell_handle neighbor(Full_cell_handle s, int i) const /* Concept */ + { + CGAL_precondition(Full_cell_handle() != s && check_range(i)); + return s->neighbor(i); + } + + Full_cell_const_handle neighbor(Full_cell_const_handle s, int i) const/* Concept */ + { + CGAL_precondition(Full_cell_const_handle() != s && check_range(i)); + return s->neighbor(i); + } + + int mirror_index(Full_cell_handle s, int i) const /* Concept */ + { + CGAL_precondition(Full_cell_handle() != s && check_range(i)); + return s->mirror_index(i); + } + + int mirror_index(Full_cell_const_handle s, int i) const + { + CGAL_precondition(Full_cell_const_handle() != s && check_range(i)); /* Concept */ + return s->mirror_index(i); + } + + int mirror_vertex(Full_cell_handle s, int i) const /* Concept */ + { + CGAL_precondition(Full_cell_handle() != s && check_range(i)); + return s->mirror_vertex(i); + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - FACETS OPERATIONS + + // works for Face_ = Facet and Face_ = Rotor. + // NOT DOCUMENTED for the Rotor case... + template< typename Face_ > + Full_cell_handle full_cell(const Face_ & f) const /* Concept */ + { + return f.full_cell(); + } + + // works for Face_ = Facet and Face_ = Rotor. + // NOT DOCUMENTED for the Rotor case... + template< class Face_ > + int index_of_covertex(const Face_ & f) const /* Concept */ + { + return f.index_of_covertex(); + } + + // NOT DOCUMENTED + // A Rotor has two covertices + int index_of_second_covertex(const Rotor & f) const + { + return f.index_of_second_covertex(); + } + + // works for Face_ = Facet and Face_ = Rotor. + // NOT DOCUMENTED... + template< class Face_ > + bool is_boundary_facet(const Face_ & f) const + { + if( get_visited(neighbor(full_cell(f), index_of_covertex(f))) ) + return false; + if( ! get_visited(full_cell(f)) ) + return false; + return true; + } + + // NOT DOCUMENTED... + Rotor rotate_rotor(Rotor & f) + { + int opposite = mirror_index(full_cell(f), index_of_covertex(f)); + Full_cell_handle s = neighbor(full_cell(f), index_of_covertex(f)); + int new_second = s->index(vertex(full_cell(f), index_of_second_covertex(f))); + return Rotor(s, new_second, opposite); + } + + // NICE UPDATE OPERATIONS + +protected: + void do_insert_increase_dimension(Vertex_handle, Vertex_handle); +public: +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - REMOVALS + + Vertex_handle collapse_face(const Face &); /* Concept */ + void remove_decrease_dimension(Vertex_handle, Vertex_handle); /* Concept */ + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INSERTIONS + + Vertex_handle insert_in_full_cell(Full_cell_handle); /* Concept */ + Vertex_handle insert_in_face(const Face &); /* Concept */ + Vertex_handle insert_in_facet(const Facet &); /* Concept */ + template< typename Forward_iterator > + Vertex_handle insert_in_hole(Forward_iterator, Forward_iterator, Facet); /* Concept */ + template< typename Forward_iterator, typename OutputIterator > + Vertex_handle insert_in_hole(Forward_iterator, Forward_iterator, Facet, OutputIterator); /* Concept */ + + template< typename OutputIterator > + Full_cell_handle insert_in_tagged_hole(Vertex_handle, Facet, OutputIterator); + + Vertex_handle insert_increase_dimension(Vertex_handle=Vertex_handle()); /* Concept */ + +private: + + // Used by insert_in_tagged_hole + struct IITH_task + { + IITH_task( + Facet boundary_facet_, + int index_of_inside_cell_in_outside_cell_, + Full_cell_handle future_neighbor_ = Full_cell_handle(), + int new_cell_index_in_future_neighbor_ = -1, + int index_of_future_neighbor_in_new_cell_ = -1) + : boundary_facet(boundary_facet_), + index_of_inside_cell_in_outside_cell(index_of_inside_cell_in_outside_cell_), + future_neighbor(future_neighbor_), + new_cell_index_in_future_neighbor(new_cell_index_in_future_neighbor_), + index_of_future_neighbor_in_new_cell(index_of_future_neighbor_in_new_cell_) + {} + + // "new_cell" is the cell about to be created + Facet boundary_facet; + int index_of_inside_cell_in_outside_cell; + Full_cell_handle future_neighbor; + int new_cell_index_in_future_neighbor; + int index_of_future_neighbor_in_new_cell; + }; + + // NOT DOCUMENTED + void clear_visited_marks(Full_cell_handle) const; + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - DANGEROUS UPDATE OPERATIONS + +private: + + // NOT DOCUMENTED + template< typename FCH > // FCH = Full_cell_[const_]handle + bool get_visited(FCH c) const + { + return c->tds_data().is_visited(); + } + + // NOT DOCUMENTED + template< typename FCH > // FCH = Full_cell_[const_]handle + void set_visited(FCH c, bool m) const + { + if( m ) + c->tds_data().mark_visited(); + else + c->tds_data().clear_visited(); + } + +public: + + void clear() /* Concept */ + { + clean_dynamic_memory(); + dcur_ = -2; + } + + void set_current_dimension(int d) /* Concept */ + { + CGAL_precondition(-2<=d && d<=maximal_dimension()); + dcur_ = d; + } + + Full_cell_handle new_full_cell(Full_cell_handle s) + { + return full_cells_.emplace(*s); + } + + Full_cell_handle new_full_cell() /* Concept */ + { + return full_cells_.emplace(dmax_); + } + + void delete_full_cell(Full_cell_handle s) /* Concept */ + { + CGAL_precondition(Full_cell_handle() != s); + // CGAL_expensive_precondition(is_full_cell(s)); + full_cells_.erase(s); + } + + template< typename Forward_iterator > + void delete_full_cells(Forward_iterator start, Forward_iterator end) /* Concept */ + { + Forward_iterator s = start; + while( s != end ) + full_cells_.erase(*s++); + } + + template< class T > + Vertex_handle new_vertex( const T & t ) + { + return vertices_.emplace(t); + } + + Vertex_handle new_vertex() /* Concept */ + { + return vertices_.emplace(); + } + + void delete_vertex(Vertex_handle v) /* Concept */ + { + CGAL_precondition( Vertex_handle() != v ); + vertices_.erase(v); + } + + void associate_vertex_with_full_cell(Full_cell_handle s, int i, Vertex_handle v) /* Concept */ + { + CGAL_precondition(check_range(i)); + CGAL_precondition(s != Full_cell_handle()); + CGAL_precondition(v != Vertex_handle()); + s->set_vertex(i, v); + v->set_full_cell(s); + } + + void set_neighbors(Full_cell_handle s, int i, Full_cell_handle s1, int j) /* Concept */ + { + CGAL_precondition(check_range(i)); + CGAL_precondition(check_range(j)); + CGAL_precondition(s != Full_cell_handle()); + CGAL_precondition(s1 != Full_cell_handle()); + s->set_neighbor(i, s1); + s1->set_neighbor(j, s); + s->set_mirror_index(i, j); + s1->set_mirror_index(j, i); + } + + // SANITY CHECKS + + bool is_valid(bool = true, int = 0) const; /* Concept */ + + // NOT DOCUMENTED + template< class OutStream> void write_graph(OutStream &); + + Vertex_iterator vertices_begin() { return vertices_.begin(); } /* Concept */ + Vertex_iterator vertices_end() { return vertices_.end(); } /* Concept */ + Full_cell_iterator full_cells_begin() { return full_cells_.begin(); } /* Concept */ + Full_cell_iterator full_cells_end() { return full_cells_.end(); } /* Concept */ + + Vertex_const_iterator vertices_begin() const { return vertices_.begin(); } /* Concept */ + Vertex_const_iterator vertices_end() const { return vertices_.end(); } /* Concept */ + Full_cell_const_iterator full_cells_begin() const { return full_cells_.begin(); } /* Concept */ + Full_cell_const_iterator full_cells_end() const { return full_cells_.end(); } /* Concept */ + + Facet_iterator facets_begin() /* Concept */ + { + if( current_dimension() <= 0 ) + return facets_end(); + return Facet_iterator(*this); + } + Facet_iterator facets_end() /* Concept */ + { + return Facet_iterator(*this, 0); + } + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - FULL CELL GATHERING + + // a traversal predicate for gathering full_cells incident to a given face + // ``incident'' means that the given face is a subface of the full_cell + class Incident_full_cell_traversal_predicate + { + const Face & f_; + int dim_; + const Triangulation_data_structure & tds_; + public: + Incident_full_cell_traversal_predicate(const Triangulation_data_structure & tds, + const Face & f) + : f_(f), tds_(tds) + { + dim_ = f.face_dimension(); + } + bool operator()(const Facet & facet) const + { + Vertex_handle v = tds_.full_cell(facet)->vertex(tds_.index_of_covertex(facet)); + for( int i = 0; i <= dim_; ++i ) + { + if( v == f_.vertex(i) ) + return false; + } + return true; + } + }; + + // a traversal predicate for gathering full_cells having a given face as subface + class Star_traversal_predicate + { + const Face & f_; + int dim_; + const Triangulation_data_structure & tds_; + public: + Star_traversal_predicate(const Triangulation_data_structure & tds, + const Face & f) + : f_(f), tds_(tds) + { + dim_ = f.face_dimension(); + } + bool operator()(const Facet & facet) const + { + Full_cell_handle s = tds_.full_cell(facet)->neighbor(tds_.index_of_covertex(facet)); + for( int j = 0; j <= tds_.current_dimension(); ++j ) + { + for( int i = 0; i <= dim_; ++i ) + if( s->vertex(j) == f_.vertex(i) ) + return true; + } + return false; + } + }; + + template< typename TraversalPredicate, typename OutputIterator > + Facet gather_full_cells(Full_cell_handle, TraversalPredicate &, OutputIterator &) const; /* Concept */ + template< typename OutputIterator > + OutputIterator incident_full_cells(const Face &, OutputIterator) const; /* Concept */ + template< typename OutputIterator > + OutputIterator incident_full_cells(Vertex_const_handle, OutputIterator) const; /* Concept */ + template< typename OutputIterator > + OutputIterator star(const Face &, OutputIterator) const; /* Concept */ +#ifndef CGAL_CFG_NO_CPP0X_DEFAULT_TEMPLATE_ARGUMENTS_FOR_FUNCTION_TEMPLATES + template< typename OutputIterator, typename Comparator = std::less > + OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp = Comparator()) + { + return incident_faces(v, dim, out, cmp, true); + } + template< typename OutputIterator, typename Comparator = std::less > + OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, Comparator = Comparator(), bool = false) const; +#else + template< typename OutputIterator, typename Comparator > + OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp = Comparator()) + { + return incident_faces(v, dim, out, cmp, true); + } + template< typename OutputIterator > + OutputIterator incident_upper_faces(Vertex_const_handle v, int dim, OutputIterator out) + { + return incident_faces(v, dim, out, std::less(), true); + } + template< typename OutputIterator, typename Comparator > + OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, Comparator = Comparator(), bool = false) const; + template< typename OutputIterator > + OutputIterator incident_faces(Vertex_const_handle, int, OutputIterator, + std::less = std::less(), bool = false) const; +#endif + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - INPUT / OUTPUT + + std::istream & read_full_cells(std::istream &, const std::vector &); + std::ostream & write_full_cells(std::ostream &, std::map &) const; + +}; // end of ``declaration/definition'' of Triangulation_data_structure<...> + +// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + +// FUNCTIONS THAT ARE MEMBER FUNCTIONS: + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - THE GATHERING METHODS + +template< class Dim, class Vb, class Fcb > +template< typename OutputIterator > +OutputIterator +Triangulation_data_structure +::incident_full_cells(const Face & f, OutputIterator out) const /* Concept */ +{ + // CGAL_expensive_precondition_msg(is_full_cell(f.full_cell()), "the facet does not belong to the Triangulation"); + Incident_full_cell_traversal_predicate tp(*this, f); + gather_full_cells(f.full_cell(), tp, out); + return out; +} + +template< class Dim, class Vb, class Fcb > +template< typename OutputIterator > +OutputIterator +Triangulation_data_structure +::incident_full_cells(Vertex_const_handle v, OutputIterator out) const /* Concept */ +{ +// CGAL_expensive_precondition(is_vertex(v)); + CGAL_precondition(Vertex_handle() != v); + Face f(v->full_cell()); + f.set_index(0, v->full_cell()->index(v)); + return incident_full_cells(f, out); +} + +template< class Dim, class Vb, class Fcb > +template< typename OutputIterator > +OutputIterator +Triangulation_data_structure +::star(const Face & f, OutputIterator out) const /* Concept */ +{ + // CGAL_precondition_msg(is_full_cell(f.full_cell()), "the facet does not belong to the Triangulation"); + Star_traversal_predicate tp(*this, f); + gather_full_cells(f.full_cell(), tp, out); + return out; +} + +template< class Dim, class Vb, class Fcb > +template< typename TraversalPredicate, typename OutputIterator > +typename Triangulation_data_structure::Facet +Triangulation_data_structure +::gather_full_cells(Full_cell_handle start, + TraversalPredicate & tp, + OutputIterator & out) const /* Concept */ +{ + std::queue queue; + set_visited(start, true); + queue.push(start); + const int cur_dim = current_dimension(); + Facet ft; + while( ! queue.empty() ) + { + Full_cell_handle s = queue.front(); + queue.pop(); + *out = s; + ++out; + for( int i = 0; i <= cur_dim; ++i ) + { + Full_cell_handle n = s->neighbor(i); + if( ! get_visited(n) ) + { + set_visited(n, true); + if( tp(Facet(s, i)) ) + queue.push(n); + else + ft = Facet(s, i); + } + } + } + clear_visited_marks(start); + return ft; +} + +#ifdef CGAL_CFG_NO_CPP0X_DEFAULT_TEMPLATE_ARGUMENTS_FOR_FUNCTION_TEMPLATES +template< class Dim, class Vb, class Fcb > +template< typename OutputIterator > +OutputIterator +Triangulation_data_structure +::incident_faces(Vertex_const_handle v, int dim, OutputIterator out, + std::less cmp, bool upper_faces) const +{ + return incident_faces >(v, dim, out, cmp, upper_faces); +} +#endif + +template< class Dim, class Vb, class Fcb > +template< typename OutputIterator, typename Comparator > +OutputIterator +Triangulation_data_structure +::incident_faces(Vertex_const_handle v, int dim, OutputIterator out, Comparator cmp, bool upper_faces) const +{ + CGAL_precondition( 0 < dim ); + if( dim >= current_dimension() ) + return out; + typedef std::vector Simplices; + Simplices simps; + simps.reserve(64); + // gather incident full_cells + std::back_insert_iterator sout(simps); + incident_full_cells(v, sout); + // for storing the handles to the vertices of a full_cell + typedef std::vector Vertices; + typedef std::vector Indices; + Vertices vertices(1 + current_dimension()); + Indices sorted_idx(1 + current_dimension()); + // setup Face comparator and Face_set + typedef internal::Triangulation::Compare_faces_with_common_first_vertex + Upper_face_comparator; + Upper_face_comparator ufc(dim); + typedef std::set Face_set; + Face_set face_set(ufc); + for( typename Simplices::const_iterator s = simps.begin(); s != simps.end(); ++s ) + { + int v_idx(0); // the index of |v| in the sorted full_cell + // get the vertices of the full_cell and sort them + for( int i = 0; i <= current_dimension(); ++i ) + vertices[i] = (*s)->vertex(i); + if( upper_faces ) + { + std::sort(vertices.begin(), vertices.end(), cmp); + while( vertices[v_idx] != v ) + ++v_idx; + } + else + { + while( vertices[v_idx] != v ) + ++v_idx; + if( 0 != v_idx ) + std::swap(vertices[0], vertices[v_idx]); + v_idx = 0; + typename Vertices::iterator vbegin(vertices.begin()); + ++vbegin; + std::sort(vbegin, vertices.end(), cmp); + } + if( v_idx + dim > current_dimension() ) + continue; // |v| is too far to the right + // stores the index of the vertices of s in the same order + // as in |vertices|: + for( int i = 0; i <= current_dimension(); ++i ) + sorted_idx[i] = (*s)->index(vertices[i]); + // init state for enumerating all candidate faces: + internal::Combination_enumerator f_idx(dim, v_idx + 1, current_dimension()); + Face f(*s); + f.set_index(0, sorted_idx[v_idx]); + while( ! f_idx.end() ) + { + for( int i = 0; i < dim; ++i ) + f.set_index(1 + i, sorted_idx[f_idx[i]]); + face_set.insert(f); // checks if face has already been found + + // compute next sorted face (lexicographic enumeration) + ++f_idx; + } + } + typename Face_set::iterator fit = face_set.begin(); + while( fit != face_set.end() ) + *out++ = *fit++; + return out; +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - THE REMOVAL METHODS + +template +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::collapse_face(const Face & f) /* Concept */ +{ + const int fd = f.face_dimension(); + CGAL_precondition( (1 <= fd ) && (fd < current_dimension())); + std::vector simps; + // save the Face's vertices: + Full_cell s; + for( int i = 0; i <= fd; ++i ) + s.set_vertex(i, f.vertex(i)); + // compute the star of f + simps.reserve(64); + std::back_insert_iterator > out(simps); + star(f, out); + Vertex_handle v = insert_in_hole(simps.begin(), simps.end(), Facet(f.full_cell(), f.index(0))); + for( int i = 0; i <= fd; ++i ) + delete_vertex(s.vertex(i)); + return v; +} + +template +void +Triangulation_data_structure +::remove_decrease_dimension(Vertex_handle v, Vertex_handle star) /* Concept */ +{ + CGAL_assertion( current_dimension() >= -1 ); + if( -1 == current_dimension() ) + { + clear(); + return; + } + else if( 0 == current_dimension() ) + { + delete_full_cell(v->full_cell()); + delete_vertex(v); + star->full_cell()->set_neighbor(0, Full_cell_handle()); + set_current_dimension(-1); + return; + } + else if( 1 == current_dimension() ) + { + Full_cell_handle s = v->full_cell(); + int star_index; + if( s->has_vertex(star, star_index) ) + s = s->neighbor(star_index); + // Here, |star| is not a vertex of |s|, so it's the only finite + // full_cell + Full_cell_handle inf1 = s->neighbor(0); + Full_cell_handle inf2 = s->neighbor(1); + Vertex_handle v2 = s->vertex(1 - s->index(v)); + delete_vertex(v); + delete_full_cell(s); + inf1->set_vertex(1, Vertex_handle()); + inf1->set_vertex(1, Vertex_handle()); + inf2->set_neighbor(1, Full_cell_handle()); + inf2->set_neighbor(1, Full_cell_handle()); + associate_vertex_with_full_cell(inf1, 0, star); + associate_vertex_with_full_cell(inf2, 0, v2); + set_neighbors(inf1, 0, inf2, 0); + set_current_dimension(0); + return; + } + typedef std::vector Simplices; + Simplices simps; + incident_full_cells(v, std::back_inserter(simps)); + for( typename Simplices::iterator it = simps.begin(); it != simps.end(); ++it ) + { + int v_idx = (*it)->index(v); + if( ! (*it)->has_vertex(star) ) + { + delete_full_cell((*it)->neighbor(v_idx)); + for( int i = 0; i <= current_dimension(); ++i ) + (*it)->vertex(i)->set_full_cell(*it); + } + else + star->set_full_cell(*it); + if( v_idx != current_dimension() ) + { + (*it)->swap_vertices(v_idx, current_dimension()); + (*it)->swap_vertices(current_dimension() - 2, current_dimension() - 1); + } + (*it)->set_vertex(current_dimension(), Vertex_handle()); + (*it)->set_neighbor(current_dimension(), Full_cell_handle()); + } + set_current_dimension(current_dimension()-1); + delete_vertex(v); +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - THE INSERTION METHODS + +template +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::insert_in_full_cell(Full_cell_handle s) /* Concept */ +{ + CGAL_precondition(0 < current_dimension()); + CGAL_precondition(Full_cell_handle() != s); + // CGAL_expensive_precondition(is_full_cell(s)); + + const int cur_dim = current_dimension(); + Vertex_handle v = new_vertex(); + // the full_cell 'fc' is just used to store the handle to all the new full_cells. + Full_cell fc(maximal_dimension()); + for( int i = 1; i <= cur_dim; ++i ) + { + Full_cell_handle new_s = new_full_cell(s); + fc.set_neighbor(i, new_s); + associate_vertex_with_full_cell(new_s, i, v); + s->vertex(i-1)->set_full_cell(new_s); + set_neighbors(new_s, i, neighbor(s, i), mirror_index(s, i)); + } + fc.set_neighbor(0, s); + associate_vertex_with_full_cell(s, 0, v); + for( int i = 0; i <= cur_dim; ++i ) + for( int j = 0; j <= cur_dim; ++j ) + { + if( j == i ) continue; + set_neighbors(fc.neighbor(i), j, fc.neighbor(j), i); + } + return v; +} + +template +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::insert_in_face(const Face & f) /* Concept */ +{ + std::vector simps; + simps.reserve(64); + std::back_insert_iterator > out(simps); + incident_full_cells(f, out); + return insert_in_hole(simps.begin(), simps.end(), Facet(f.full_cell(), f.index(0))); +} +template +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::insert_in_facet(const Facet & ft) /* Concept */ +{ + Full_cell_handle s[2]; + s[0] = full_cell(ft); + int i = index_of_covertex(ft); + s[1] = s[0]->neighbor(i); + i = ( i + 1 ) % current_dimension(); + return insert_in_hole(s, s+2, Facet(s[0], i)); +} + +template +template < typename OutputIterator > +typename Triangulation_data_structure::Full_cell_handle +Triangulation_data_structure +::insert_in_tagged_hole(Vertex_handle v, Facet f, + OutputIterator new_full_cells) +{ + CGAL_assertion_msg(is_boundary_facet(f), "starting facet should be on the hole boundary"); + + const int cur_dim = current_dimension(); + Full_cell_handle new_s; + + std::queue task_queue; + task_queue.push( + IITH_task(f, mirror_index(full_cell(f), index_of_covertex(f))) ); + + while (!task_queue.empty()) + { + IITH_task task = task_queue.front(); + task_queue.pop(); + + Full_cell_handle old_s = full_cell(task.boundary_facet); + const int facet_index = index_of_covertex(task.boundary_facet); + + Full_cell_handle outside_neighbor = neighbor(old_s, facet_index); + // Here, "new_s" might actually be a new cell, but it might also be "old_s" + // if it has not been treated already in the meantime + new_s = neighbor(outside_neighbor, task.index_of_inside_cell_in_outside_cell); + // If the cell has not been treated yet + if (old_s == new_s) + { + new_s = new_full_cell(); + + int i(0); + for ( ; i < facet_index ; ++i) + associate_vertex_with_full_cell(new_s, i, old_s->vertex(i)); + ++i; // skip facet_index + for ( ; i <= cur_dim ; ++i) + associate_vertex_with_full_cell(new_s, i, old_s->vertex(i)); + associate_vertex_with_full_cell(new_s, facet_index, v); + set_neighbors(new_s, + facet_index, + outside_neighbor, + mirror_index(old_s, facet_index)); + + // add the new full_cell to the list of new full_cells + *new_full_cells++ = new_s; + + // check all of |Facet f|'s neighbors + for (i = 0 ; i <= cur_dim ; ++i) + { + if (facet_index == i) + continue; + // we define a |Rotor| because it makes it easy to rotate around + // in a self contained fashion. The corresponding potential + // boundary facet is Facet(full_cell(rot), index_of_covertex(rot)) + Rotor rot(old_s, i, facet_index); + // |rot| on line above, stands for Candidate Facet + while (!is_boundary_facet(rot)) + rot = rotate_rotor(rot); + + // we did find the |i|-th neighbor of Facet(old_s, facet_index)... + // has it already been extruded to center point |v| ? + Full_cell_handle inside = full_cell(rot); + Full_cell_handle outside = neighbor(inside, index_of_covertex(rot)); + // "m" is the vertex of outside which is not on the boundary + Vertex_handle m = inside->mirror_vertex(index_of_covertex(rot), current_dimension()); // CJTODO: use mirror_index? + // "index" is the index of m in "outside" + int index = outside->index(m); + // new_neighbor is the inside cell which is registered as the neighbor + // of the outside cell => it's either a newly created inside cell or an + // old inside cell which we are about to delete + Full_cell_handle new_neighbor = outside->neighbor(index); + + // Is new_neighbor still the old neighbor? + if (new_neighbor == inside) + { + task_queue.push(IITH_task( + Facet(inside, index_of_covertex(rot)), // boundary facet + index, // index_of_inside_cell_in_outside_cell + new_s, // future_neighbor + i, // new_cell_index_in_future_neighbor + index_of_second_covertex(rot) // index_of_future_neighbor_in_new_cell + )); + } + } + } + + // If there is some neighbor stories to fix + if (task.future_neighbor != Full_cell_handle()) + { + // now the new neighboring full_cell exists, we link both + set_neighbors(new_s, + task.index_of_future_neighbor_in_new_cell, + task.future_neighbor, + task.new_cell_index_in_future_neighbor); + } + } + + return new_s; +} + +template< class Dim, class Vb, class Fcb > +template< typename Forward_iterator, typename OutputIterator > +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::insert_in_hole(Forward_iterator start, Forward_iterator end, Facet f, + OutputIterator out) /* Concept */ +{ + CGAL_expensive_precondition( + ( std::distance(start, end) == 1 ) + || ( current_dimension() > 1 ) ); + Forward_iterator sit = start; + while( end != sit ) + set_visited(*sit++, true); + Vertex_handle v = new_vertex(); + insert_in_tagged_hole(v, f, out); + delete_full_cells(start, end); + return v; +} + +template< class Dim, class Vb, class Fcb > +template< typename Forward_iterator > +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::insert_in_hole(Forward_iterator start, Forward_iterator end, Facet f) /* Concept */ +{ + Emptyset_iterator out; + return insert_in_hole(start, end, f, out); +} + +template +void +Triangulation_data_structure +::clear_visited_marks(Full_cell_handle start) const // NOT DOCUMENTED +{ + CGAL_precondition(start != Full_cell_handle()); + + std::queue queue; + set_visited(start, false); + queue.push(start); + const int cur_dim = current_dimension(); + while( ! queue.empty() ) + { + Full_cell_handle s = queue.front(); + queue.pop(); + for( int i = 0; i <= cur_dim; ++i ) + { + if( get_visited(s->neighbor(i)) ) + { + set_visited(s->neighbor(i), false); + queue.push(s->neighbor(i)); + } + } + } +} + +template +void Triangulation_data_structure +::do_insert_increase_dimension(Vertex_handle x, Vertex_handle star) +{ + Full_cell_handle start = full_cells_begin(); + Full_cell_handle swap_me; + const int cur_dim = current_dimension(); + for( Full_cell_iterator S = full_cells_begin(); S != full_cells_end(); ++S ) + { + if( Vertex_handle() != S->vertex(cur_dim) ) + continue; + set_visited(S, true); + // extends full_cell |S| to include the new vertex as the + // current_dimension()-th vertex + associate_vertex_with_full_cell(S, cur_dim, x); + if( ! S->has_vertex(star) ) + { // S is bounded, we create its unbounded "twin" full_cell + Full_cell_handle S_new = new_full_cell(); + set_neighbors(S, cur_dim, S_new, 0); + associate_vertex_with_full_cell(S_new, 0, star); + // here, we could be clever so as to get consistent orientation + for( int k = 1; k <= cur_dim; ++k ) + associate_vertex_with_full_cell(S_new, k, vertex(S, k - 1)); + } + } + // now we setup the neighbors + set_visited(start, false); + std::queue queue; + queue.push(start); + while( ! queue.empty() ) + { + Full_cell_handle S = queue.front(); + queue.pop(); + // here, the first visit above ensured that all neighbors exist now. + // Now we need to connect them with adjacency relation + int star_index; + if( S->has_vertex(star, star_index) ) + { + set_neighbors( S, cur_dim, neighbor(neighbor(S, star_index), cur_dim), + // this is tricky :-) : + mirror_index(S, star_index) + 1); + } + else + { + Full_cell_handle S_new = neighbor(S, cur_dim); + for( int k = 0 ; k < cur_dim ; ++k ) + { + Full_cell_handle S_opp = neighbor(S, k); + if( ! S_opp->has_vertex(star) ) + set_neighbors(S_new, k + 1, neighbor(S_opp, cur_dim), mirror_index(S, k) + 1); + // neighbor of S_new opposite to v is S_new' + // the vertex opposite to v remains the same but ... + // remember the shifting of the vertices one step to the right + } + } + for( int k = 0 ; k < cur_dim ; ++k ) + if( get_visited(neighbor(S, k)) ) + { + set_visited(neighbor(S, k), false); + queue.push(neighbor(S, k)); + } + } + if( ( ( cur_dim % 2 ) == 0 ) && ( cur_dim > 1 ) ) + { + for( Full_cell_iterator S = full_cells_begin(); S != full_cells_end(); ++S ) + { + if( x != S->vertex(cur_dim) ) + S->swap_vertices(cur_dim - 1, cur_dim); + } + } + if( Full_cell_handle() != swap_me ) + swap_me->swap_vertices(1, 2); +} + +template +typename Triangulation_data_structure::Vertex_handle +Triangulation_data_structure +::insert_increase_dimension(Vertex_handle star) /* Concept */ +{ + const int prev_cur_dim = current_dimension(); + CGAL_precondition(prev_cur_dim < maximal_dimension()); + if( -2 != current_dimension() ) + { + CGAL_precondition( Vertex_handle() != star ); + CGAL_expensive_precondition(is_vertex(star)); + } + + set_current_dimension(prev_cur_dim + 1); + Vertex_handle v = new_vertex(); + switch( prev_cur_dim ) + { + case -2: + { // insertion of the first vertex + // ( geometrically : infinite vertex ) + Full_cell_handle s = new_full_cell(); + associate_vertex_with_full_cell(s, 0, v); + break; + } + case -1: + { // insertion of the second vertex + // ( geometrically : first finite vertex ) + //we create a triangulation of the 0-sphere, with + // vertices |star| and |v| + Full_cell_handle infinite_full_cell = star->full_cell(); + Full_cell_handle finite_full_cell = new_full_cell(); + associate_vertex_with_full_cell(finite_full_cell, 0, v); + set_neighbors(infinite_full_cell, 0, finite_full_cell, 0); + break; + } + default: + do_insert_increase_dimension(v, star); + break; + } + return v; +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - VALIDITY CHECKS + +template +bool Triangulation_data_structure +::is_valid(bool verbose, int /* level */) const /* Concept */ +{ + Full_cell_const_handle s, t; + Vertex_const_handle v; + int i, j, k; + + if( current_dimension() == -2 ) + { + if( ! vertices_.empty() || ! full_cells_.empty() ) + { + if( verbose ) CGAL_warning_msg(false, "current dimension is -2 but there are vertices or full_cells"); + return false; + } + } + + if( current_dimension() == -1 ) + { + if ( (number_of_vertices() != 1) || (number_of_full_cells() != 1) ) + { + if( verbose ) CGAL_warning_msg(false, "current dimension is -1 but there isn't one vertex and one full_cell"); + return false; + } + } + + for( v = vertices_begin(); v != vertices_end(); ++v ) + { + if( ! v->is_valid(verbose) ) + return false; + } + + // FUTURE: for each vertex v, gather incident full_cells. then, check that + // any full_cell containing v is among those gathered full_cells... + + if( current_dimension() < 0 ) + return true; + + for( s = full_cells_begin(); s != full_cells_end(); ++s ) + { + if( ! s->is_valid(verbose) ) + return false; + // check that the full cell has no duplicate vertices + for( i = 0; i <= current_dimension(); ++i ) + for( j = i + 1; j <= current_dimension(); ++j ) + if( vertex(s,i) == vertex(s,j) ) + { + CGAL_warning_msg(false, "a full_cell has two equal vertices"); + return false; + } + } + + for( s = full_cells_begin(); s != full_cells_end(); ++s ) + { + for( i = 0; i <= current_dimension(); ++i ) + if( (t = neighbor(s,i)) != Full_cell_const_handle() ) + { + int l = mirror_index(s,i); + if( s != neighbor(t,l) || i != mirror_index(t,l) ) + { + if( verbose ) CGAL_warning_msg(false, "neighbor relation is not symmetric"); + return false; + } + for( j = 0; j <= current_dimension(); ++j ) + if( j != i ) + { + // j must also occur as a vertex of t + for( k = 0; k <= current_dimension() && ( vertex(s,j) != vertex(t,k) || k == l); ++k ) + ; + if( k > current_dimension() ) + { + if( verbose ) CGAL_warning_msg(false, "too few shared vertices between neighbors full_cells."); + return false; + } + } + } + else + { + if( verbose ) CGAL_warning_msg(false, "full_cell has a NULL neighbor"); + return false; + } + } + return true; +} + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// - - - - - - - - - - - - - - - - - - - - - - - - INPUT / OUTPUT + +// NOT DOCUMENTED +template +template +void Triangulation_data_structure +::write_graph(OutStream & os) +{ + std::vector > edges; + os << number_of_vertices() + 1; // add the vertex at infinity + int count(1); + for( Vertex_iterator vit = vertices_begin(); vit != vertices_end(); ++vit ) + vit->idx_ = count++; + edges.resize(number_of_vertices()+1); + for( Full_cell_iterator sit = full_cells_begin(); sit != full_cells_end(); ++sit ) + { + int v1 = 0; + while( v1 < current_dimension() ) + { + int v2 = v1 + 1; + while( v2 <= current_dimension() ) + { + int i1, i2; + if( Vertex_handle() != sit-> vertex(v1) ) + i1 = sit->vertex(v1)->idx_; + else + i1 = 0; + if( Vertex_handle() != sit-> vertex(v2) ) + i2 = sit->vertex(v2)->idx_; + else + i2 = 0; + edges[i1].insert(i2); + edges[i2].insert(i1); + ++v2; + } + ++v1; + } + } + for( std::size_t i = 0; i < edges.size(); ++i ) + { + os << std::endl << edges[i].size(); + for( std::set::const_iterator nit = edges[i].begin(); + nit != edges[i].end(); ++nit ) + { + os << ' ' << (*nit); + } + } +} + +// NOT DOCUMENTED... +template +std::istream & +Triangulation_data_structure +::read_full_cells(std::istream & is, const std::vector & vertices) +{ + std::size_t m; // number of full_cells + int index; + const int cd = current_dimension(); + if( is_ascii(is) ) + is >> m; + else + read(is, m, io_Read_write()); + + std::vector full_cells; + full_cells.reserve(m); + // read the vertices of each full_cell + std::size_t i = 0; + while( i < m ) + { + Full_cell_handle s = new_full_cell(); + full_cells.push_back(s); + for( int j = 0; j <= cd; ++j ) + { + if( is_ascii(is) ) + is >> index; + else + read(is, index); + s->set_vertex(j, vertices[index]); + } + // read other non-combinatorial information for the full_cells + is >> (*s); + ++i; + } + + // read the neighbors of each full_cell + i = 0; + if( is_ascii(is) ) + while( i < m ) + { + for( int j = 0; j <= cd; ++j ) + { + is >> index; + full_cells[i]->set_neighbor(j, full_cells[index]); + } + ++i; + } + else + while( i < m ) + { + for( int j = 0; j <= cd; ++j ) + { + read(is, index); + full_cells[i]->set_neighbor(j, full_cells[index]); + } + ++i; + } + + // compute the mirror indices + for( i = 0; i < m; ++i ) + { + Full_cell_handle s = full_cells[i]; + for( int j = 0; j <= cd; ++j ) + { + if( -1 != s->mirror_index(j) ) + continue; + Full_cell_handle n = s->neighbor(j); + int k = 0; + Full_cell_handle nn = n->neighbor(k); + while( s != nn ) + nn = n->neighbor(++k); + s->set_mirror_index(j,k); + n->set_mirror_index(k,j); + } + } + return is; +} + +// NOT DOCUMENTED... +template +std::ostream & +Triangulation_data_structure +::write_full_cells(std::ostream & os, std::map & index_of_vertex) const +{ + std::map index_of_full_cell; + + std::size_t m = number_of_full_cells(); + + if( is_ascii(os) ) + os << std::endl << m; + else + write(os, m, io_Read_write()); + + const int cur_dim = current_dimension(); + // write the vertex indices of each full_cell + int i = 0; + for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it ) + { + index_of_full_cell[it] = i++; + if( is_ascii(os) ) + os << std::endl; + for( int j = 0; j <= cur_dim; ++j ) + { + if( is_ascii(os) ) + os << ' ' << index_of_vertex[it->vertex(j)]; + else + write(os, index_of_vertex[it->vertex(j)]); + } + // write other non-combinatorial information for the full_cells + os << (*it); + } + + CGAL_assertion( (std::size_t) i == m ); + + // write the neighbors of each full_cell + if( is_ascii(os) ) + for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it ) + { + os << std::endl; + for( int j = 0; j <= cur_dim; ++j ) + os << ' ' << index_of_full_cell[it->neighbor(j)]; + } + else + for( Full_cell_const_iterator it = full_cells_begin(); it != full_cells_end(); ++it ) + { + for( int j = 0; j <= cur_dim; ++j ) + write(os, index_of_full_cell[it->neighbor(j)]); + } + + return os; +} + +// = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + +// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: + +template +std::istream & +operator>>(std::istream & is, Triangulation_data_structure & tr) + // reads : + // - the dimensions (maximal and current) + // - the number of finite vertices + // - the non combinatorial information on vertices (point, etc) + // - the number of full_cells + // - the full_cells by the indices of their vertices in the preceding list + // of vertices, plus the non combinatorial information on each full_cell + // - the neighbors of each full_cell by their index in the preceding list +{ + typedef Triangulation_data_structure TDS; + typedef typename TDS::Vertex_handle Vertex_handle; + + // read current dimension and number of vertices + std::size_t n; + int cd; + if( is_ascii(is) ) + is >> cd >> n; + else + { + read(is, cd); + read(is, n, io_Read_write()); + } + + CGAL_assertion_msg( cd <= tr.maximal_dimension(), "input Triangulation_data_structure has too high dimension"); + + tr.clear(); + tr.set_current_dimension(cd); + + if( n == 0 ) + return is; + + std::vector vertices; + vertices.resize(n); + + // read the vertices: + std::size_t i(0); + while( i < n ) + { + vertices[i] = tr.new_vertex(); + is >> (*vertices[i]); // read a vertex + ++i; + } + + // now, read the combinatorial information + return tr.read_full_cells(is, vertices); +} + +template +std::ostream & +operator<<(std::ostream & os, const Triangulation_data_structure & tr) + // writes : + // - the dimensions (maximal and current) + // - the number of finite vertices + // - the non combinatorial information on vertices (point, etc) + // - the number of full cells + // - the full cells by the indices of their vertices in the preceding list + // of vertices, plus the non combinatorial information on each full_cell + // - the neighbors of each full_cell by their index in the preceding list +{ + typedef Triangulation_data_structure TDS; + typedef typename TDS::Vertex_const_handle Vertex_handle; + typedef typename TDS::Vertex_const_iterator Vertex_iterator; + + // outputs dimension and number of vertices + std::size_t n = tr.number_of_vertices(); + if( is_ascii(os) ) + os << tr.current_dimension() << std::endl << n; + else + { + write(os, tr.current_dimension()); + write(os, n, io_Read_write()); + } + + if( n == 0 ) + return os; + + // write the vertices + std::map index_of_vertex; + int i = 0; + for( Vertex_iterator it = tr.vertices_begin(); it != tr.vertices_end(); ++it, ++i ) + { + os << *it; // write the vertex + if (is_ascii(os)) + os << std::endl; + index_of_vertex[it] = i; + } + CGAL_assertion( (std::size_t) i == n ); + + // output the combinatorial information + return tr.write_full_cells(os, index_of_vertex); +} + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_DATA_STRUCTURE_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h new file mode 100644 index 00000000..541a6a85 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_full_cell.h @@ -0,0 +1,311 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_DS_FULL_CELL_H +#define CGAL_TRIANGULATION_DS_FULL_CELL_H + +#include +#include +#include +#include +#include +#include + +namespace CGAL { + +template< class TDS = void, typename FullCellStoragePolicy = Default > +class Triangulation_ds_full_cell +{ + typedef typename Default::Get::type + Storage_policy; + typedef Triangulation_ds_full_cell Self; + typedef typename TDS::Maximal_dimension Maximal_dimension; + +public: + typedef TDS Triangulation_data_structure; + typedef typename TDS::Face Face; + typedef typename TDS::Vertex_handle Vertex_handle; /* Concept */ + typedef typename TDS::Vertex_const_handle Vertex_const_handle; + typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */ + typedef typename TDS::Full_cell_const_handle Full_cell_const_handle; + typedef typename TDS::Full_cell_data TDS_data; /* data that the TDS wants to be stored here */ + template< typename TDS2 > + struct Rebind_TDS /* Concept */ + { + typedef Triangulation_ds_full_cell Other; + }; + +private: // STORAGE + typedef TFC_data< Vertex_handle, Full_cell_handle, + Maximal_dimension, Storage_policy > Combinatorics; + friend struct TFC_data< Vertex_handle, Full_cell_handle, + Maximal_dimension, Storage_policy >; + // array of vertices + typedef typename Combinatorics::Vertex_handle_array Vertex_handle_array; + // neighbor simplices + typedef typename Combinatorics::Full_cell_handle_array Full_cell_handle_array; + + // NOT DOCUMENTED... + typename Combinatorics::Xor_type xor_of_vertices(const int cur_dim) const + { + return combinatorics_.xor_of_vertices(cur_dim); + } + +public: + typedef typename Vertex_handle_array::const_iterator Vertex_handle_const_iterator; + typedef Vertex_handle_const_iterator Vertex_handle_iterator; /* Concept */ + + Triangulation_ds_full_cell(const int dmax) /* Concept */ + : combinatorics_(dmax), tds_data_() + { + CGAL_assertion( dmax > 0 ); + for( int i = 0; i <= dmax; ++i ) + { + set_neighbor(i, Full_cell_handle()); + set_vertex(i, Vertex_handle()); + set_mirror_index(i, -1); + } + } + + Triangulation_ds_full_cell(const Triangulation_ds_full_cell & s) /* Concept */ + : combinatorics_(s.combinatorics_), tds_data_(s.tds_data_) + {} + + ~Triangulation_ds_full_cell() {} + + int maximal_dimension() const /* Concept */ + { + return static_cast(vertices().size() - 1); + } + + Vertex_handle_const_iterator vertices_begin() const /* Concept */ + { + return vertices().begin(); + } + + Vertex_handle_const_iterator vertices_end() const /* Concept */ + { + return vertices().end(); + } + + Vertex_handle vertex(const int i) const /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + return vertices()[i]; + } + + Full_cell_handle neighbor(const int i) const /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + return neighbors()[i]; + } + + int mirror_index(const int i) const /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + return combinatorics_.mirror_index(i); + } + + // Advanced... + Vertex_handle mirror_vertex(const int i, const int cur_dim) const /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + return combinatorics_.mirror_vertex(i, cur_dim); + } + + int index(Full_cell_const_handle s) const /* Concept */ + { + // WE ASSUME THE FULL CELL WE ARE LOOKING FOR INDEED EXISTS ! + CGAL_precondition(has_neighbor(s)); + int index(0); + while( neighbor(index) != s ) + ++index; + return index; + } + + int index(Vertex_const_handle v) const /* Concept */ + { + // WE ASSUME THE VERTEX WE ARE LOOKING FOR INDEED EXISTS ! + CGAL_precondition(has_vertex(v)); + int index(0); + while( vertex(index) != v ) + ++index; + return index; + } + + void set_vertex(const int i, Vertex_handle v) /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + vertices()[i] = v; + } + + void set_neighbor(const int i, Full_cell_handle s) /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + neighbors()[i] = s; + } + + void set_mirror_index(const int i, const int index) /* Concept */ + { + CGAL_precondition(0<=i && i<=maximal_dimension()); + combinatorics_.set_mirror_index(i, index); + } + + bool has_vertex(Vertex_const_handle v) const /* Concept */ + { + int index; + return has_vertex(v, index); + } + + bool has_vertex(Vertex_const_handle v, int & index) const /* Concept */ + { + const int d = maximal_dimension(); + index = 0; + while( (index <= d) && (vertex(index) != v) ) + ++index; + return (index <= d); + } + + bool has_neighbor(Full_cell_const_handle s) const /* Concept */ + { + int index; + return has_neighbor(s, index); + } + + bool has_neighbor(Full_cell_const_handle s, int & index) const /* Concept */ + { + const int d = maximal_dimension(); + index = 0; + while( (index <= d) && (neighbor(index) != s) ) + ++index; + return (index <= d); + } + + void swap_vertices(const int d1, const int d2) /* Concept */ + { + CGAL_precondition(0 <= d1 && d1<=maximal_dimension()); + CGAL_precondition(0 <= d2 && d2<=maximal_dimension()); + combinatorics_.swap_vertices(d1, d2); + } + + const TDS_data & tds_data() const { return tds_data_; } /* Concept */ + TDS_data & tds_data() { return tds_data_; } /* Concept */ + + void* for_compact_container() const { return combinatorics_.for_compact_container(); } + void* & for_compact_container() { return combinatorics_.for_compact_container(); } + + bool is_valid(bool verbose = false, int = 0) const /* Concept */ + { + const int d = maximal_dimension(); + int i(0); + // test that the non-null Vertex_handles come first, before all null ones + while( i <= d && vertex(i) != Vertex_handle() ) ++i; + while( i <= d && vertex(i) == Vertex_handle() ) ++i; + if( i <= d ) + { + if( verbose ) CGAL_warning_msg(false, "full cell has garbage handles to vertices."); + return false; + } + for( i = 0; i <= d; ++i ) + { + if( Vertex_handle() == vertex(i) ) + break; // there are no more vertices + Full_cell_handle n(neighbor(i)); + if( Full_cell_handle() != n ) + { + int mirror_idx(mirror_index(i)); + if( n->neighbor(mirror_idx) == Full_cell_handle() ) + { + if( verbose ) CGAL_warning_msg(false, "neighbor has no back-neighbor."); + return false; + } + if( &(*(n->neighbor(mirror_idx))) != this ) + { + if( verbose ) CGAL_warning_msg(false, "neighbor does not point back to correct full cell."); + return false; + } + } + } + return true; + } + +private: + // access to data members: + Full_cell_handle_array & neighbors() {return combinatorics_.neighbors_; } + const Full_cell_handle_array & neighbors() const {return combinatorics_.neighbors_; } + Vertex_handle_array & vertices() {return combinatorics_.vertices_; } + const Vertex_handle_array & vertices() const {return combinatorics_.vertices_; } + + // DATA MEMBERS + Combinatorics combinatorics_; + mutable TDS_data tds_data_; +}; + +// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: + +template < typename TDS, typename SSP > +std::ostream & +operator<<(std::ostream & O, const Triangulation_ds_full_cell &) /* Concept */ +{ + /*if( is_ascii(O) ) + { + // os << '\n'; + } + else {}*/ + return O; +} + +template < typename TDS, typename SSP > +std::istream & +operator>>(std::istream & I, Triangulation_ds_full_cell &) /* Concept */ +{ + /*if( is_ascii(I) ) + {} + else {}*/ + return I; +} + +// Special case: specialization when template parameter is void. + +// we must declare it for each possible full_cell storage policy because : +// (GCC error:) default template arguments may not be used in partial specializations +template< typename StoragePolicy > +class Triangulation_ds_full_cell +{ +public: + typedef internal::Triangulation::Dummy_TDS TDS; + typedef TDS Triangulation_data_structure; + typedef TDS::Vertex_handle Vertex_handle; + typedef TDS::Vertex_const_handle Vertex_const_handle; + typedef TDS::Full_cell_handle Full_cell_handle; + typedef TDS::Full_cell_const_handle Full_cell_const_handle; + typedef TDS::Vertex_handle_const_iterator Vertex_handle_const_iterator; + typedef TDS::Full_cell_data TDS_data; + template + struct Rebind_TDS + { + typedef Triangulation_ds_full_cell Other; + }; + Vertex_handle_const_iterator vertices_begin(); + Vertex_handle_const_iterator vertices_end(); +}; + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_DS_FULL_CELL_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h new file mode 100644 index 00000000..381b97e1 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation_ds_vertex.h @@ -0,0 +1,154 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_DS_VERTEX_H +#define CGAL_TRIANGULATION_DS_VERTEX_H + +#include +#include + +namespace CGAL { + +/* The template parameter TDS must be a model of the concept + * 'TriangulationDataStructure' that stores vertices of type + * 'Triangulation_ds_vertex' + */ +template< class TDS = void > +class Triangulation_ds_vertex +{ + typedef Triangulation_ds_vertex Self; + +public: + typedef TDS Triangulation_data_structure; + typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */ + + template + struct Rebind_TDS /* Concept */ + { + typedef Triangulation_ds_vertex Other; + }; + +protected: // DATA MEMBERS + Full_cell_handle full_cell_; // A handle to an incident full_cell + +public: + // Constructs a vertex with incident full_cell 's' + Triangulation_ds_vertex(Full_cell_handle s) : full_cell_(s) /* Concept */ + { + CGAL_assertion( Full_cell_handle() != s ); + } + // Constructs a vertex with no incident full_cell + Triangulation_ds_vertex() : full_cell_() {} /* Concept */ + + ~Triangulation_ds_vertex() {} + + /// Set 's' as an incident full_cell + void set_full_cell(Full_cell_handle s) /* Concept */ + { + full_cell_ = s; + } + + /// Returns a full_cell incident to the vertex + Full_cell_handle full_cell() const /* Concept */ + { + return full_cell_; + } + + bool is_valid(bool verbose = false, int /* level */ = 0) const /* Concept */ + { + if( Full_cell_handle() == full_cell() ) + { + if( verbose ) + CGAL_warning_msg(false, "vertex has no incident full cell."); + return false; + } + bool found(false); + // These two typename below are OK because TDS fullfils the + // TriangulationDataStructure concept. + typename TDS::Full_cell::Vertex_handle_iterator vit(full_cell()->vertices_begin()); + typedef typename TDS::Vertex_handle Vertex_handle; + while( vit != full_cell()->vertices_end() ) + { + if( Vertex_handle() == *vit ) + break; // The full cell has no more vertices + if( this == &(**vit) ) + { + found = true; + break; + } + ++vit; + } + if( ! found ) + { + if( verbose ) + CGAL_warning_msg(false, "vertex's adjacent full cell does not contain that vertex."); + return false; + } + return true; + } + +public: // FOR MEMORY MANAGEMENT + + void* for_compact_container() const { return full_cell_.for_compact_container(); } + void* & for_compact_container() { return full_cell_.for_compact_container(); } + +}; // end of Triangulation_ds_vertex + +// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: + +template < class TDS > +std::istream & +operator>>(std::istream & is, Triangulation_ds_vertex &) /* Concept */ +{ + /*if( is_ascii(is) ) + {} + else {}*/ + return is; +} + +template< class TDS > +std::ostream & +operator<<(std::ostream & os, const Triangulation_ds_vertex &) /* Concept */ +{ + /*if( is_ascii(os) ) + { + os << '\n'; + } + else {}*/ + return os; +} + +// Special case: specialization when template parameter is void. + +template<> +class Triangulation_ds_vertex +{ +public: + typedef internal::Triangulation::Dummy_TDS Triangulation_data_structure; + typedef Triangulation_data_structure::Full_cell_handle Full_cell_handle; /* Concept */ + template + struct Rebind_TDS /* Concept */ + { + typedef Triangulation_ds_vertex Other; + }; +}; + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_DS_VERTEX_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_face.h b/src/common/include/gudhi_patches/CGAL/Triangulation_face.h new file mode 100644 index 00000000..bc9c1781 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation_face.h @@ -0,0 +1,111 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_FACE_H +#define CGAL_TRIANGULATION_FACE_H + +#include +#include + +namespace CGAL { + +template< typename TDS > +class Triangulation_face +{ + typedef typename internal::Dimen_plus_one::type Dimen_plus; +public: + typedef TDS Triangulation_data_structure; + typedef typename TDS::Full_cell_handle Full_cell_handle; /* Concept */ + typedef typename TDS::Vertex_handle Vertex_handle; /* Concept */ + typedef internal::S_or_D_array Indices; + +protected: + Full_cell_handle full_cell_; + Indices indices_; + +public: + explicit Triangulation_face(Full_cell_handle s) /* Concept */ + : full_cell_(s), indices_(s->maximal_dimension()+2) + { + CGAL_assertion( Full_cell_handle() != s ); + clear(); + } + + explicit Triangulation_face(const int maximal_dim) /* Concept */ + : full_cell_(), indices_(maximal_dim+2) + { + clear(); + } + + Triangulation_face(const Triangulation_face & f) /* Concept */ + : full_cell_(f.full_cell_), indices_(f.indices_) + {} + + int face_dimension() const /* Concept */ + { + int i(0); + while( -1 != indices_[i] ) ++i; + return (i-1); + } + + Full_cell_handle full_cell() const /* Concept */ + { + return full_cell_; + } + + int index(const int i) const /* Concept */ + { + CGAL_precondition( (0 <= i) && (i <= face_dimension()) ); + return indices_[i]; + } + + Vertex_handle vertex(const int i) const /* Concept */ + { + int j = index(i); + if( j == -1 ) + return Vertex_handle(); + return full_cell()->vertex(j); + } + +// - - - - - - - - - - - - - - - - - - UPDATE FUNCTIONS + + void clear() /* Concept */ + { + const std::size_t d = indices_.size(); + for(std::size_t i = 0; i < d; ++i ) + indices_[i] = -1; + } + + void set_full_cell(Full_cell_handle s) /* Concept */ + { + CGAL_precondition( Full_cell_handle() != s ); + full_cell_ = s; + } + + void set_index(const int i, const int idx) /* Concept */ + { + CGAL_precondition( (0 <= i) && ((size_t)i+1 < indices_.size()) ); + CGAL_precondition( (0 <= idx) && ((size_t)idx < indices_.size()) ); + indices_[i] = idx; + } +}; + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_FACE_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h b/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h new file mode 100644 index 00000000..a0c5246f --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation_full_cell.h @@ -0,0 +1,148 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_SIMPLEX_H +#define CGAL_TRIANGULATION_SIMPLEX_H + +#include +#include +#include +#include + +namespace CGAL { + +struct No_full_cell_data {}; + +template< class TriangulationTraits, typename Data_ = No_full_cell_data, class TDSFullCell = Default > +class Triangulation_full_cell : public Default::Get >::type +{ + // The default type for TDSFullCell is Triangulation_ds_full_cell<> : + typedef typename Default::Get >::type + Base; + typedef Triangulation_full_cell Self; +public: + typedef Data_ Data; + typedef typename Base::Vertex_handle Vertex_handle; + typedef typename Base::Vertex_const_handle Vertex_const_handle; + typedef typename Base::Vertex_handle_const_iterator Vertex_handle_const_iterator; + typedef typename Base::Full_cell_const_handle Full_cell_const_handle; + typedef typename TriangulationTraits::Point_d Point; + typedef typename TriangulationTraits::Point_d Point_d; + +private: // DATA MEMBERS + Data data_; + +public: + + using Base::vertices_begin; + using Base::vertices_end; + + template< class TDS2 > + struct Rebind_TDS + { + typedef typename Base::template Rebind_TDS::Other TDSFullCell2; + typedef Triangulation_full_cell Other; + }; + + Triangulation_full_cell(const int d) + : Base(d), data_() {} + + Triangulation_full_cell(const Self & s) + : Base(s), data_(s.data_) {} + + const Data & data() const + { + return data_; + } + + Data & data() + { + return data_; + } + + struct Point_from_vertex_handle + { + typedef Vertex_handle argument_type; + typedef Point result_type; + result_type & operator()(argument_type & x) const + { + return x->point(); + } + const result_type & operator()(const argument_type & x) const + { + return x->point(); + } + }; + +protected: + + typedef CGAL::Iterator_project< + Vertex_handle_const_iterator, + internal::Triangulation::Point_from_vertex_handle + > Point_const_iterator; + + Point_const_iterator points_begin() const + { return Point_const_iterator(Base::vertices_begin()); } + Point_const_iterator points_end() const + { return Point_const_iterator(Base::vertices_end()); } +}; + +// FUNCTIONS THAT ARE NOT MEMBER FUNCTIONS: + +inline +std::istream & +operator>>(std::istream & is, No_full_cell_data &) +{ + return is; +} + +inline +std::ostream & +operator<<(std::ostream & os, const No_full_cell_data &) +{ + return os; +} + +template < typename TDS, typename Data, typename SSP > +std::ostream & +operator<<(std::ostream & O, const Triangulation_full_cell & s) +{ + /*if( is_ascii(O) ) + { + // os << '\n'; + } + else {}*/ + O << s.data(); + return O; +} + +template < typename TDS, typename Data, typename SSP > +std::istream & +operator>>(std::istream & I, Triangulation_full_cell & s) +{ + /*if( is_ascii(I) ) + {} + else {}*/ + I >> s.data(); + return I; +} + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_SIMPLEX_H diff --git a/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h b/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h new file mode 100644 index 00000000..f364717f --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/Triangulation_vertex.h @@ -0,0 +1,128 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_TRIANGULATION_VERTEX_H +#define CGAL_TRIANGULATION_VERTEX_H + +#include +#include + +namespace CGAL { + +struct No_vertex_data {}; + +template< class TriangulationTraits, typename Data_ = No_vertex_data, class TDSVertex = Default > +class Triangulation_vertex : public Default::Get >::type +{ + // The default type for TDSVertex is Triangulation_ds_vertex<> : + typedef typename Default::Get >::type + Base; + typedef Triangulation_vertex Self; +public: + typedef Data_ Data; + typedef typename TriangulationTraits::Point_d Point; + typedef typename TriangulationTraits::Point_d Point_d; + typedef typename Base::Full_cell_handle Full_cell_handle; + + template + struct Rebind_TDS + { + typedef typename Base::template Rebind_TDS::Other TDSVertex2; + typedef Triangulation_vertex Other; + }; + +private: // DATA MEMBERS + Point point_; + Data data_; + +public: + template< typename T > + Triangulation_vertex(Full_cell_handle s, const Point & p, const T & t) + : Base(s), point_(p), data_(t) {} + Triangulation_vertex(Full_cell_handle s, const Point & p) + : Base(s), point_(p), data_() {} + template< typename T > + Triangulation_vertex(const Point & p, const T & t) + : Base(), point_(p), data_(t) {} + Triangulation_vertex(const Point & p) + : Base(), point_(p), data_() {} + Triangulation_vertex() : Base(), point_(), data_() {} + + ~Triangulation_vertex() {} + + /// Set the position in space of the vertex to 'p' + void set_point(const Point & p) + { + point_ = p; + } + + /// Returns the position in space of the vertex + const Point & point() const + { + return point_; + } + + const Data & data() const + { + return data_; + } + + Data & data() + { + return data_; + } + +}; // end of Triangulation_vertex + +// NON CLASS-MEMBER FUNCTIONS + +inline +std::istream & +operator>>(std::istream & is, No_vertex_data &) +{ + return is; +} + +inline +std::ostream & +operator<<(std::ostream & os, const No_vertex_data &) +{ + return os; +} + +template < class A, typename Data, class B > +std::istream & +operator>>(std::istream & is, Triangulation_vertex & v) +{ + is >> v.point(); + return (is >> v.data()); +} + +template< class A, typename Data, class B > +std::ostream & +operator<<(std::ostream & os, const Triangulation_vertex & v) +{ + os << v.point(); + os << v.data(); + return os; +} + +} //namespace CGAL + +#endif // CGAL_TRIANGULATION_VERTEX_H diff --git a/src/common/include/gudhi_patches/CGAL/argument_swaps.h b/src/common/include/gudhi_patches/CGAL/argument_swaps.h new file mode 100644 index 00000000..aa16f29b --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/argument_swaps.h @@ -0,0 +1,88 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_ARGUMENT_SWAPS_H +#define CGAL_ARGUMENT_SWAPS_H + +#include +#include + +#ifndef CGAL_CXX11 +#include +#include +#endif + +namespace CGAL { + +#ifdef CGAL_CXX11 + +namespace internal { + +template struct Apply_to_last_then_rest_; + +template +struct Apply_to_last_then_rest_ { + typedef typename Apply_to_last_then_rest_::result_type result_type; + inline result_type operator()(F&&f,T&&t,U&&...u)const{ + return Apply_to_last_then_rest_()( + std::forward(f), + std::forward(u)..., + std::forward(t)); + } +}; + +template +struct Apply_to_last_then_rest_<0,F,T,U...> { + typedef decltype(std::declval()(std::declval(), std::declval()...)) result_type; + inline result_type operator()(F&&f,T&&t,U&&...u)const{ + return std::forward(f)(std::forward(t), std::forward(u)...); + } +}; + +} // namespace internal + + +struct Apply_to_last_then_rest { + template inline + typename internal::Apply_to_last_then_rest_::result_type + operator()(F&&f,T&&t,U&&...u)const{ + return internal::Apply_to_last_then_rest_()( + std::forward(f), + std::forward(t), + std::forward(u)...); + } +}; + +#else // CGAL_CXX11 + +struct Apply_to_last_then_rest { +#define CGAL_CODE(Z,N,_) template \ + typename boost::result_of::type \ + operator()(F const&f, BOOST_PP_ENUM_BINARY_PARAMS(N,T,const&t), T const&t) const { \ + return f(t,BOOST_PP_ENUM_PARAMS(N,t)); \ + } + BOOST_PP_REPEAT_FROM_TO(1,11,CGAL_CODE,_) +#undef CGAL_CODE +}; + +#endif // CGAL_CXX11 + +} // namespace CGAL + +#endif // CGAL_ARGUMENT_SWAPS_H diff --git a/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h b/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h new file mode 100644 index 00000000..e1bad64e --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/determinant_of_vectors.h @@ -0,0 +1,117 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_DETVEC_H +#define CGAL_DETVEC_H +#include +#include + +namespace CGAL { + // TODO: determine whether it is better to pass them by lines or columns. + + template inline + NT determinant_of_vectors(Vector const&a, Vector const&b){ + return determinant(a[0],a[1],b[0],b[1]); + } + template inline + typename Sgn::result_type + sign_of_determinant_of_vectors(Vector const&a, Vector const&b){ + return sign_of_determinant(a[0],a[1],b[0],b[1]); + } + + template + NT determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c){ + return determinant(a[0],a[1],a[2],b[0],b[1],b[2],c[0],c[1],c[2]); + } + template + typename Sgn::result_type + sign_of_determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c){ + return sign_of_determinant(a[0],a[1],a[2],b[0],b[1],b[2],c[0],c[1],c[2]); + } + + template + NT determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c, Vector const&d){ + return determinant( + a[0],a[1],a[2],a[3], + b[0],b[1],b[2],b[3], + c[0],c[1],c[2],c[3], + d[0],d[1],d[2],d[3]); + } + template + typename Sgn::result_type + sign_of_determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c, Vector const&d){ + return sign_of_determinant( + a[0],a[1],a[2],a[3], + b[0],b[1],b[2],b[3], + c[0],c[1],c[2],c[3], + d[0],d[1],d[2],d[3]); + } + + template + NT determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e){ + return determinant( + a[0],a[1],a[2],a[3],a[4], + b[0],b[1],b[2],b[3],b[4], + c[0],c[1],c[2],c[3],c[4], + d[0],d[1],d[2],d[3],d[4], + e[0],e[1],e[2],e[3],e[4]); + } + template + typename Sgn::result_type + sign_of_determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e){ + return sign_of_determinant( + a[0],a[1],a[2],a[3],a[4], + b[0],b[1],b[2],b[3],b[4], + c[0],c[1],c[2],c[3],c[4], + d[0],d[1],d[2],d[3],d[4], + e[0],e[1],e[2],e[3],e[4]); + } + + template + NT determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f){ + return determinant( + a[0],a[1],a[2],a[3],a[4],a[5], + b[0],b[1],b[2],b[3],b[4],b[5], + c[0],c[1],c[2],c[3],c[4],c[5], + d[0],d[1],d[2],d[3],d[4],d[5], + e[0],e[1],e[2],e[3],e[4],e[5], + f[0],f[1],f[2],f[3],f[4],f[5]); + } + template + typename Sgn::result_type + sign_of_determinant_of_vectors(Vector const&a, Vector const&b, + Vector const&c, Vector const&d, Vector const&e, Vector const&f){ + return sign_of_determinant( + a[0],a[1],a[2],a[3],a[4],a[5], + b[0],b[1],b[2],b[3],b[4],b[5], + c[0],c[1],c[2],c[3],c[4],c[5], + d[0],d[1],d[2],d[3],d[4],d[5], + e[0],e[1],e[2],e[3],e[4],e[5], + f[0],f[1],f[2],f[3],f[4],f[5]); + } + +} +#endif diff --git a/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h b/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h new file mode 100644 index 00000000..f411e827 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/internal/Combination_enumerator.h @@ -0,0 +1,148 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_INTERNAL_COMBINATION_ENUMERATOR_H +#define CGAL_INTERNAL_COMBINATION_ENUMERATOR_H + +#include +#include + +namespace CGAL { + +namespace internal { + +class Combination_enumerator +{ + // types and member data + typedef std::vector Combination; + Combination combi_; + const int k_; + const int min_; + const int max_; + const int max_at_pos_0_; + +public: + + // For generating all the combinations of |k| distinct elements in the + // interval [min, max] (both included) + Combination_enumerator(const int k, const int min, const int max) + : combi_(k), k_(k), min_(min), max_(max), max_at_pos_0_(max + 1 - k) + { + CGAL_assertion_msg( min <= max, "min is larger than max"); + CGAL_assertion_msg( 1 <= k && k <= ( max - min + 1 ), "wrong value of k"); + init(); + } + + Combination_enumerator(const Combination_enumerator & c) + : combi_(c.combi_), k_(c.k_), min_(c.min_), max_(c.max_), max_at_pos_0_(c.max_at_pos_0_) + {} + + int number_of_elements() + { + return k_; + } + + void init() + { + combi_.resize(k_); + for( int i = 0; i < k_; ++i ) + element(i) = min_ + i; + } + + bool end() const + { + return ( element(0) > max_at_pos_0_ ); + } + + int element(const int i) const + { + CGAL_assertion( 0 <= i && i < k_ ); + return combi_[i]; + } + + int & element(const int i) + { + CGAL_assertion( 0 <= i && i < k_ ); + return combi_[i]; + } + + int operator[](const int i) const + { + return element(i); + } + + int & operator[](const int i) + { + return element(i); + } + + void operator++() + { + int i = k_ - 1; + int max_at_pos_i(max_); + while( ( i >= 0 ) && ( element(i) >= max_at_pos_i ) ) + { + --i; + --max_at_pos_i; + } + if( -1 == i ) + { + if( element(0) == max_at_pos_0_ ) + ++element(0); // mark then end of the enumeration with an impossible value + // Note than when we have arrived at the end of the enumeration, applying + // operator++() again does not change anything, so it is safe to + // apply it too many times. + } + else + { + ++element(i); + for( int j = i + 1; j < k_; ++j ) + element(j) = element(i) + j - i; + } + } + + Combination_enumerator operator++(int) + { + Combination_enumerator tmp(*this); + ++(*this); + return tmp; + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - TESTING +#if 0 + void test() + { + std::cerr << '\n'; + while( ! end() ) + { + std::cerr << '\n'; + for( int i = 0; i < k_; ++i ) + std::cerr << element(i) << ' '; + ++(*this); + } + init(); + } +#endif +}; + +} // end of namespace internal + +} // end of namespace CGAL + +#endif // CGAL_INTERNAL_COMBINATION_ENUMERATOR_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h b/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h new file mode 100644 index 00000000..ee6195d9 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/internal/Static_or_dynamic_array.h @@ -0,0 +1,116 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H +#define CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H + +#include +#include +#include +#include + +namespace CGAL { + +namespace internal { + +// Utility for adding one to an Dimension_tag: + +template +struct Dimen_plus_one; + +template<> +struct Dimen_plus_one +{ + typedef Dynamic_dimension_tag type; +}; + +template +struct Dimen_plus_one > +{ + typedef Dimension_tag type; +}; + +// A SMALL CONTAINER UTILITY FOR DYNAMIC/STATIC MEMORY MANAGEMENT + +// stores an array of static or dynamic size, depending on template parameter . + +template< typename Containee, typename D, bool WithCompactContainerHelper = false> + struct S_or_D_array; // S = static, D = dynamic + +// The case of static size: +template< typename Containee, int D, bool WithCompactContainerHelper > +struct S_or_D_array< Containee, Dimension_tag< D >, WithCompactContainerHelper > +: public array +{ + typedef array Base; + S_or_D_array(const int) + : Base() + {} + S_or_D_array(const int, const Containee & c) + : Base() + { + assign(c); + } + void* for_compact_container() const + { + return (*this)[0].for_compact_container(); + } + void* & for_compact_container() + { + return (*this)[0].for_compact_container(); + } +}; + +// The case of dynamic size +template< typename Containee > +struct S_or_D_array< Containee, Dynamic_dimension_tag, false > +: public std::vector +{ + typedef std::vector Base; + // TODO: maybe we should use some "small-vector-optimized" class. + S_or_D_array(const int d) + : Base(d) + {} + S_or_D_array(const int d, const Containee & c) + : Base(d, c) + {} +}; + +// The case of dynamic size with for_compact_container +template< typename Containee > +struct S_or_D_array< Containee, Dynamic_dimension_tag, true > +: public std::vector +{ + typedef std::vector Base; + S_or_D_array(const int d) + : Base(d), fcc_(NULL) + {} + S_or_D_array(const int d, const Containee & c) + : Base(d, c), fcc_(NULL) + {} + void* fcc_; + void* for_compact_container() const { return fcc_; } + void* & for_compact_container() { return fcc_; } +}; + +} // end of namespace internal + +} // end of namespace CGAL + +#endif // CGAL_INTERNAL_STATIC_OR_DYNAMIC_ARRAY_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h new file mode 100644 index 00000000..b3a0ec98 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Dummy_TDS.h @@ -0,0 +1,49 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H +#define CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H + +namespace CGAL { + +namespace internal { +namespace Triangulation { + +struct Dummy_TDS +{ + struct Vertex {}; + struct Vertex_handle {}; + struct Vertex_iterator {}; + struct Vertex_const_handle {}; + struct Vertex_const_iterator {}; + struct Full_cell {}; + struct Full_cell_handle {}; + struct Full_cell_iterator {}; + struct Full_cell_const_handle {}; + struct Full_cell_const_iterator {}; + struct Vertex_handle_const_iterator {}; + struct Full_cell_data {}; +}; + +} // namespace Triangulation +} // namespace internal + +} //namespace CGAL + +#endif // CGAL_INTERNAL_TRIANGULATION_DUMMY_TDS_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h new file mode 100644 index 00000000..7e360026 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/Triangulation_ds_iterators.h @@ -0,0 +1,154 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus (Well... `copy, paste and hack' of Monique Teillaud's work) + +#ifndef CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H +#define CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H + +namespace CGAL { + +namespace internal { +namespace Triangulation { + +template< typename TDS > +class Triangulation_ds_facet_iterator +{ + typedef typename TDS::Full_cell_handle Full_cell_handle; + typedef typename TDS::Facet Facet; + + typedef Facet value_type; + typedef const Facet * pointer; + typedef const Facet & reference; + typedef std::size_t size_type; + typedef std::ptrdiff_t difference_type; + typedef std::bidirectional_iterator_tag iterator_category; + + typedef Triangulation_ds_facet_iterator Facet_iterator; + + TDS & tds_; + Facet ft_; + const int cur_dim_; + +public: + Triangulation_ds_facet_iterator(TDS & tds) + : tds_(tds), ft_(tds.full_cells_begin(), 0), cur_dim_(tds.current_dimension()) + { + CGAL_assertion( cur_dim_ > 0 ); + while( ! canonical() ) + raw_increment(); + } + + Triangulation_ds_facet_iterator(TDS & tds, int) + : tds_(tds), ft_(tds.full_cells_end(), 0), cur_dim_(tds.current_dimension()) + { + CGAL_assertion( cur_dim_ > 0 ); + CGAL_assertion( canonical() ); + } + + Facet_iterator & operator++() + { + increment(); + return (*this); + } + + Facet_iterator operator++(int) + { + Facet_iterator tmp(*this); + increment(); + return tmp; + } + + Facet_iterator & operator--() + { + decrement(); + return (*this); + } + + Facet_iterator operator--(int) + { + Facet_iterator tmp(*this); + decrement(); + return tmp; + } + + bool operator==(const Facet_iterator & fi) const + { + return (&tds_ == &fi.tds_) && + (tds_.index_of_covertex(ft_) == fi.tds_.index_of_covertex(fi.ft_)) && + (tds_.full_cell(ft_) == fi.tds_.full_cell(fi.ft_)); + } + + bool operator!=(const Facet_iterator & fi) const + { + return !(*this == fi); + } + + reference operator*() const + { + return ft_; + } + + pointer operator->() const + { + return &ft_; + } + +private: + bool canonical() + { + if( tds_.full_cells_end() == tds_.full_cell(ft_) ) + return ( 0 == tds_.index_of_covertex(ft_) ); + return ( tds_.full_cell(ft_) < + tds_.full_cell(ft_)->neighbor(tds_.index_of_covertex(ft_)) ); + } + + void raw_decrement() + { + int i = tds_.index_of_covertex(ft_); + if( i == 0 ) + ft_ = Facet(--tds_.full_cell(ft_), cur_dim_); + else + ft_ = Facet(tds_.full_cell(ft_), i - 1); + } + + void raw_increment() + { + int i = tds_.index_of_covertex(ft_); + if( i == cur_dim_ ) + ft_ = Facet(++tds_.full_cell(ft_), 0); + else + ft_ = Facet(tds_.full_cell(ft_), i + 1); + } + + void decrement() + { + do { raw_decrement(); } while( ! canonical() ); + } + + void increment() + { + do { raw_increment(); } while( ! canonical() ); + } +}; + +} // namespace Triangulation +} // namespace internal + +} //namespace CGAL + +#endif // CGAL_INTERNAL_TRIANGULATION_TRIANGULATION_DS_ITERATORS_H diff --git a/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h new file mode 100644 index 00000000..a1ffc775 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/internal/Triangulation/utilities.h @@ -0,0 +1,154 @@ +// Copyright (c) 2009-2014 INRIA Sophia-Antipolis (France). +// All rights reserved. +// +// This file is part of CGAL (www.cgal.org). +// You can redistribute it and/or modify it under the terms of the GNU +// General Public License as published by the Free Software Foundation, +// either version 3 of the License, or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Samuel Hornus + +#ifndef CGAL_INTERNAL_TRIANGULATION_UTILITIES_H +#define CGAL_INTERNAL_TRIANGULATION_UTILITIES_H + +#include + +namespace CGAL { + +namespace internal { +namespace Triangulation { + +template< class TDS > +struct Dark_full_cell_data +{ + typedef typename TDS::Full_cell_handle Full_cell_handle; + Full_cell_handle light_copy_; + int count_; + Dark_full_cell_data() : light_copy_(), count_(0) {} +}; + +template< class TDS > +struct Compare_faces_with_common_first_vertex +{ + typedef typename TDS::Face Face; + + const int d_; + +public: + + Compare_faces_with_common_first_vertex(const int d) + : d_(d) + { + CGAL_assertion( 0 < d ); + } + + explicit Compare_faces_with_common_first_vertex(); + + bool operator()(const Face & left, const Face & right) const + { + CGAL_assertion( d_ == left.face_dimension() ); + CGAL_assertion( d_ == right.face_dimension() ); + for( int i = 1; i <= d_; ++i ) + { + if( left.vertex(i) < right.vertex(i) ) + return true; + if( right.vertex(i) < left.vertex(i) ) + return false; + } + return false; + } +}; + +template< class T > +struct Compare_vertices_for_upper_face +{ + typedef typename T::Vertex_const_handle VCH; + + const T & t_; + +public: + + Compare_vertices_for_upper_face(const T & t) + : t_(t) + {} + + explicit Compare_vertices_for_upper_face(); + + bool operator()(const VCH & left, const VCH & right) const + { + if( left == right ) + return false; + if( t_.is_infinite(left) ) + return true; + if( t_.is_infinite(right) ) + return false; + return left < right; + } +}; + +template< class T > +struct Compare_points_for_perturbation +{ + typedef typename T::Geom_traits::Point_d Point; + + const T & t_; + +public: + + Compare_points_for_perturbation(const T & t) + : t_(t) + {} + + explicit Compare_points_for_perturbation(); + + bool operator()(const Point * left, const Point * right) const + { + return (SMALLER == t_.geom_traits().compare_lexicographically_d_object()(*left, *right)); + } +}; + +template< class T > +struct Point_from_pointer +{ + typedef const typename T::Geom_traits::Point_d * argument_type; + typedef const typename T::Geom_traits::Point_d result_type; + result_type & operator()(argument_type & x) const + { + return (*x); + } + const result_type & operator()(const argument_type & x) const + { + return (*x); + } +}; + +template< typename Vertex_handle, typename Point > +struct Point_from_vertex_handle +{ + typedef Vertex_handle argument_type; + typedef Point result_type; + result_type & operator()(argument_type & x) const + { + return x->point(); + } + const result_type & operator()(const argument_type & x) const + { + return x->point(); + } +}; + +} // namespace Triangulation +} // namespace internal + +} //namespace CGAL + +#endif // CGAL_INTERNAL_TRIANGULATION_UTILITIES_H diff --git a/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h b/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h new file mode 100644 index 00000000..110bb4be --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/iterator_from_indices.h @@ -0,0 +1,75 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_ITERATOR_FROM_INDICES_H +#define CGAL_ITERATOR_FROM_INDICES_H +#include +#include +namespace CGAL { +template +struct Default_coordinate_access { + typedef Ref_ result_type; + template Ref_ operator()(T const& t, std::ptrdiff_t i)const{ + return t[i]; + } +}; + +//TODO: default type for Value_: typename same_cv::type::value_type>::type +template ()[0]) +#else + Value_& +#endif + , class Coord_access = Default_coordinate_access + > +class Iterator_from_indices +: public boost::iterator_facade, + Value_, std::bidirectional_iterator_tag, Ref_> +{ + friend class boost::iterator_core_access; + //FIXME: use int to save space + //TODO: use a tuple to save space when Coord_access is empty + typedef std::ptrdiff_t index_t; + Container_* cont; + index_t index; + Coord_access ca; + void increment(){ ++index; } + void decrement(){ --index; } + void advance(std::ptrdiff_t n){ index+=n; } + ptrdiff_t distance_to(Iterator_from_indices const& other)const{ + return other.index-index; + } + bool equal(Iterator_from_indices const& other)const{ + return index==other.index; + } + Ref_ dereference()const{ + //FIXME: use the functor properly + //Uh, and what did I mean by that? + return ca(*cont,index); + } + public: + Iterator_from_indices(Container_& cont_,std::size_t n) + : cont(&cont_), index(n) {} + template + Iterator_from_indices(Container_& cont_,std::size_t n,T const&t) + : cont(&cont_), index(n), ca(t) {} +}; +} +#endif // CGAL_ITERATOR_FROM_INDICES_H diff --git a/src/common/include/gudhi_patches/CGAL/transforming_iterator.h b/src/common/include/gudhi_patches/CGAL/transforming_iterator.h new file mode 100644 index 00000000..15ea19a5 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/transforming_iterator.h @@ -0,0 +1,123 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_TRANSFORMING_ITERATOR_H +#define CGAL_TRANSFORMING_ITERATOR_H +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Inspired by the boost version, but more compact and +// without any iterator_category games. + +namespace CGAL { +namespace internal { + +// non-empty case +template::value> struct Functor_as_base { + Functor_as_base(){} + Functor_as_base(T const& t):f(t){} + //template Functor_as_base(Functor_as_base const&g):f(g.functor()){} + T const& functor()const{return f;} + T & functor() {return f;} + private: + T f; +}; + +// empty case +template struct Functor_as_base : public T { + Functor_as_base(){} + Functor_as_base(T const& t):T(t){} + //template Functor_as_base(Functor_as_base const&g):T(g.functor()){} + T const& functor()const{return *this;} + T & functor() {return *this;} +}; + +template +class transforming_iterator_helper +{ + typedef std::iterator_traits Iter_traits; + typedef typename Iter_traits::reference Iter_ref; + typedef typename Default::Get()(std::declval())) +#else + typename boost::result_of::type + // should be reference instead of value_type +#endif + >::type reference_; + + typedef typename Default::Get::type>::type>::type value_type; + + // Crappy heuristic. If we have *it that returns a Weighted_point and F that returns a reference to the Point contained in the Weighted_point it takes as argument, we do NOT want the transformed iterator to return a reference to the temporary *it. On the other hand, if *it returns an int n, and F returns a reference to array[n] it is not so good to lose the reference. This probably should be done elsewhere and should at least be made optional... + typedef typename boost::mpl::if_< + boost::mpl::or_, + boost::is_integral >, + reference_, value_type>::type reference; + + public: + typedef boost::iterator_adaptor< + Derived, + Iter, + value_type, + typename Iter_traits::iterator_category, + reference + > type; +}; +} + +template +class transforming_iterator +: public internal::transforming_iterator_helper,F,Iter,Ref,Val>::type, +private internal::Functor_as_base +{ + friend class boost::iterator_core_access; + typedef typename internal::transforming_iterator_helper::type Base; + typedef internal::Functor_as_base Functor_base; + typename Base::reference dereference()const{ + return functor()(*this->base_reference()); + } + public: + using Functor_base::functor; + transforming_iterator(){} + explicit transforming_iterator(Iter i,F const& f=F()) + :Base(i),Functor_base(f){} + template + transforming_iterator( + transforming_iterator const&i, + typename boost::enable_if_convertible::type* = 0, + typename boost::enable_if_convertible::type* = 0) + : Base(i.base()),Functor_base(i.functor()) {} + +}; + +template inline +transforming_iterator make_transforming_iterator(Iter i, F const&f=F()) { + return transforming_iterator(i,f); +} + +} + +#endif // CGAL_TRANSFORMING_ITERATOR_H diff --git a/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h b/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h new file mode 100644 index 00000000..48dac132 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/transforming_pair_iterator.h @@ -0,0 +1,127 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_TRANSFORMING_PAIR_ITERATOR_H +#define CGAL_TRANSFORMING_PAIR_ITERATOR_H +// Should be a combination of transform_iterator and zip_iterator, +// but boost's iterator_category games are a pain. + +#include +#include +#include + + + + +namespace CGAL { +namespace internal { +template ::value> +struct Min_category { + CGAL_static_assertion((boost::is_convertible::value)); + typedef Cat1 type; +}; + +template +struct Min_category { + typedef Cat2 type; +}; + + +template +class transforming_pair_iterator_helper +{ + typedef typename Min_category< + typename std::iterator_traits::iterator_category, + typename std::iterator_traits::iterator_category> + ::type iterator_category; + + typedef typename Default::Get()(std::declval::reference>(),std::declval::reference>())) +#else + typename boost::result_of::value_type,typename std::iterator_traits::value_type)>::type + // should be reference instead of value_type +#endif + >::type reference; + + typedef typename Default::Get::type>::type>::type value_type; + + public: + typedef boost::iterator_facade< + Derived, + value_type, + iterator_category, + reference + // expect ptrdiff_t is good enough for difference + > type; +}; +} + +template +class transforming_pair_iterator +: public internal::transforming_pair_iterator_helper,F,It1,It2,Ref,Val>::type, +private internal::Functor_as_base +{ + It1 iter1; It2 iter2; + friend class boost::iterator_core_access; + typedef typename internal::transforming_pair_iterator_helper::type Base; + typedef internal::Functor_as_base Functor_base; + typename Base::reference dereference()const{ + return functor()(*iter1,*iter2); + } + bool equal(transforming_pair_iterator const&i)const{ + bool b=(iter1==i.iter1); + CGAL_assertion(b==(iter2==i.iter2)); + //FIXME: or do we want only one driving iterator + return b; + } + void increment(){ ++iter1; ++iter2; } + void decrement(){ --iter1; --iter2; } + void advance(std::ptrdiff_t n){ + std::advance(iter1,n); + std::advance(iter2,n); + } + std::ptrdiff_t distance_to(transforming_pair_iterator const&i)const{ + std::ptrdiff_t dist=std::distance(iter1,i.iter1); + CGAL_assertion(dist==std::distance(iter2,i.iter2)); + return dist; + } + public: + using Functor_base::functor; + transforming_pair_iterator(){} + explicit transforming_pair_iterator(It1 i1,It2 i2,F const& f=F()) + :Functor_base(f),iter1(i1),iter2(i2){} + template + transforming_pair_iterator( + transforming_pair_iterator const&i, + typename boost::enable_if_convertible::type* = 0, + typename boost::enable_if_convertible::type* = 0, + typename boost::enable_if_convertible::type* = 0) + : Functor_base(i.functor()),iter1(i.iter1),iter2(i.iter2) {} + +}; + +template inline +transforming_pair_iterator make_transforming_pair_iterator(It1 i1, It2 i2, F const&f=F()) { + return transforming_pair_iterator(i1,i2,f); +} + +} + +#endif // CGAL_TRANSFORMING_PAIR_ITERATOR_H diff --git a/src/common/include/gudhi_patches/CGAL/typeset.h b/src/common/include/gudhi_patches/CGAL/typeset.h new file mode 100644 index 00000000..d4e24281 --- /dev/null +++ b/src/common/include/gudhi_patches/CGAL/typeset.h @@ -0,0 +1,117 @@ +// Copyright (c) 2014 +// INRIA Saclay-Ile de France (France) +// +// This file is part of CGAL (www.cgal.org); you can redistribute it and/or +// modify it under the terms of the GNU Lesser General Public License as +// published by the Free Software Foundation; either version 3 of the License, +// or (at your option) any later version. +// +// Licensees holding a valid commercial license may use this file in +// accordance with the commercial license agreement provided with the software. +// +// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE +// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. +// +// $URL$ +// $Id$ +// +// Author(s) : Marc Glisse + +#ifndef CGAL_TYPESET_H +#define CGAL_TYPESET_H +#include +#ifdef CGAL_CXX11 +#include +#else +#include +#endif + +// Sometimes using tuple just to list types is overkill (takes forever to +// instantiate). + +namespace CGAL { +#ifdef CGAL_CXX11 + template struct typeset; + template struct typeset { + typedef H head; + typedef typeset tail; + typedef typeset type; + template using contains = typename + std::conditional< + std::is_same::value, + std::true_type, + typename tail::template contains + >::type; + template using add = typename + std::conditional< + contains::value, + typeset, + typeset + >::type; + }; + template<> struct typeset<> { + typedef typeset type; + template using contains = std::false_type; + template using add = typeset; + }; +#else + template struct typeset; + template, void, typeset >::type > + struct typeset { + typedef typeset type; + typedef H head; + typedef T tail; + template struct contains : + boost::mpl::if_,boost::true_type,typename tail::template contains >::type + {}; + template struct add; + //boost::mpl::if_,typeset,typeset >::type + }; + template<> struct typeset<> { + typedef typeset type; + template struct contains : boost::false_type {}; + template struct add : CGAL::typeset {}; + }; + + template + template + struct typeset::add : typeset::type> {}; + template + template + struct typeset::add : typeset {}; +#endif + + template struct typeset_union_ : + typeset_union_::type, typename T2::tail> + {}; + template struct typeset_union_ > : T {}; + + template + struct typeset_intersection_ { + typedef typename T1::head H; + typedef typename typeset_intersection_::type U; + typedef typename +#ifdef CGAL_CXX11 + std::conditional::value, +#else + boost::mpl::if_, +#endif + typename U::template add::type, U>::type type; + }; + template + struct typeset_intersection_,T> : typeset<> {}; + +#ifdef CGAL_CXX11 + template + using typeset_union = typename typeset_union_::type; + template + using typeset_intersection = typename typeset_intersection_::type; +#else + template + struct typeset_union : typeset_union_::type {}; + template + struct typeset_intersection : typeset_intersection_::type {}; +#endif +} +#endif -- cgit v1.2.3 From 92479a53aa228c4e212067b375ab1b665116834e Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 11 Oct 2016 15:24:42 +0000 Subject: Fix doxygen warning CMake message fix CMake test tangential examples git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1704 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: efa3c76e767d6ba55367b57c10ab87844c968457 --- src/Simplex_tree/doc/Intro_simplex_tree.h | 4 ++-- src/Tangential_complex/benchmark/CMakeLists.txt | 22 ++++---------------- src/Tangential_complex/example/CMakeLists.txt | 27 +++++++++---------------- src/common/doc/main_page.h | 10 ++++----- 4 files changed, 21 insertions(+), 42 deletions(-) (limited to 'src/common') diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h index be061785..940dd694 100644 --- a/src/Simplex_tree/doc/Intro_simplex_tree.h +++ b/src/Simplex_tree/doc/Intro_simplex_tree.h @@ -66,8 +66,8 @@ Expand the simplex tree in 3.8e-05 s. Information of the Simplex Tree: Number of vertices = 10 Number of simplices = 98 \endcode * - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp - Simplex tree is computed and displayed from a 3D alpha + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp - Simplex tree is computed and displayed from a 3D alpha * complex (Requires CGAL, GMP and GMPXX to be installed) * * diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt index 12488201..a217d6e6 100644 --- a/src/Tangential_complex/benchmark/CMakeLists.txt +++ b/src/Tangential_complex/benchmark/CMakeLists.txt @@ -13,28 +13,14 @@ endif() # need CGAL 4.8 if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.8.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - add_executable(Tangential_complex_benchmark benchmark_tc.cpp) target_link_libraries(Tangential_complex_benchmark ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) if (TBB_FOUND) target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES}) - endif() - - # Do not forget to copy test files in current binary dir - #file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Tangential complex feature.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential complex feature. Version 4.8.0 is required.") - endif () -endif() + endif(TBB_FOUND) + endif(EIGEN3_FOUND) + endif (NOT CGAL_VERSION VERSION_LESS 4.8.0) +endif(CGAL_FOUND) diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt index 7ba043f0..a75ccd5b 100644 --- a/src/Tangential_complex/example/CMakeLists.txt +++ b/src/Tangential_complex/example/CMakeLists.txt @@ -3,14 +3,7 @@ project(Tangential_complex_examples) if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.8.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - include_directories (BEFORE "../../include") - add_executable( Tangential_complex_example_basic example_basic.cpp ) target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY}) add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp ) @@ -18,13 +11,13 @@ if(CGAL_FOUND) if (TBB_FOUND) target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES}) target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES}) - endif() - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for the Tangential_complex examples.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential_complex examples. Version 4.8.0 is required.") - endif () -else() - message(WARNING "CGAL not found. It is required for the Tangential_complex examples.") -endif() + endif(TBB_FOUND) + + add_test(Tangential_complex_example_basic + ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_example_basic) + + add_test(Tangential_complex_example_with_perturb + ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_example_with_perturb) + endif(EIGEN3_FOUND) + endif(NOT CGAL_VERSION VERSION_LESS 4.8.0) +endif(CGAL_FOUND) diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index fe23c4e7..1a2cb6ba 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -220,8 +220,8 @@ make \endverbatim * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: * \li * Persistent_cohomology/alpha_complex_3d_persistence.cpp - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp * * The following example requires CGAL version ≥ 4.6: * \li @@ -281,8 +281,8 @@ make \endverbatim * Persistent_cohomology/alpha_complex_persistence.cpp * \li * Simplex_tree/simple_simplex_tree.cpp - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp * \li * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp * \li @@ -351,7 +351,7 @@ make \endverbatim * @example Persistent_cohomology/custom_persistence_sort.cpp * @example Simplex_tree/mini_simplex_tree.cpp * @example Simplex_tree/simple_simplex_tree.cpp - * @example Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp -- cgit v1.2.3